diff --git a/cmd/benchcmp/benchcmp.go b/cmd/benchcmp/benchcmp.go index ed53d717c9f..d078d3d4d9c 100644 --- a/cmd/benchcmp/benchcmp.go +++ b/cmd/benchcmp/benchcmp.go @@ -133,7 +133,7 @@ func main() { } } -func fatal(msg interface{}) { +func fatal(msg any) { fmt.Fprintln(os.Stderr, msg) os.Exit(1) } diff --git a/cmd/bundle/main_test.go b/cmd/bundle/main_test.go index 0dee2afb0b2..42dac86a2b8 100644 --- a/cmd/bundle/main_test.go +++ b/cmd/bundle/main_test.go @@ -27,7 +27,7 @@ func testBundle(t *testing.T, x packagestest.Exporter) { e := packagestest.Export(t, x, []packagestest.Module{ { Name: "initial", - Files: map[string]interface{}{ + Files: map[string]any{ "a.go": load("testdata/src/initial/a.go"), "b.go": load("testdata/src/initial/b.go"), "c.go": load("testdata/src/initial/c.go"), @@ -35,7 +35,7 @@ func testBundle(t *testing.T, x packagestest.Exporter) { }, { Name: "domain.name/importdecl", - Files: map[string]interface{}{ + Files: map[string]any{ "p.go": load("testdata/src/domain.name/importdecl/p.go"), }, }, diff --git a/cmd/file2fuzz/main.go b/cmd/file2fuzz/main.go index c2b7ee52089..2a86c2ece88 100644 --- a/cmd/file2fuzz/main.go +++ b/cmd/file2fuzz/main.go @@ -13,7 +13,7 @@ // output to stdout. If any position arguments are provided stdin is ignored // and the arguments are assumed to be input files to convert. // -// The -o flag provides an path to write output files to. If only one positional +// The -o flag provides a path to write output files to. If only one positional // argument is specified it may be a file path or an existing directory, if there are // multiple inputs specified it must be a directory. If a directory is provided // the name of the file will be the SHA-256 hash of its contents. diff --git a/cmd/godex/godex.go b/cmd/godex/godex.go index e91dbfcea5f..619976d4a37 100644 --- a/cmd/godex/godex.go +++ b/cmd/godex/godex.go @@ -84,7 +84,7 @@ func main() { } } -func logf(format string, args ...interface{}) { +func logf(format string, args ...any) { if *verbose { fmt.Fprintf(os.Stderr, format, args...) } diff --git a/cmd/godex/print.go b/cmd/godex/print.go index 57383e0e7ec..120c2e04d6b 100644 --- a/cmd/godex/print.go +++ b/cmd/godex/print.go @@ -48,7 +48,7 @@ func (p *printer) print(s string) { } } -func (p *printer) printf(format string, args ...interface{}) { +func (p *printer) printf(format string, args ...any) { p.print(fmt.Sprintf(format, args...)) } diff --git a/cmd/godex/writetype.go b/cmd/godex/writetype.go index bfe36977892..866f718f05f 100644 --- a/cmd/godex/writetype.go +++ b/cmd/godex/writetype.go @@ -111,7 +111,7 @@ func (p *printer) writeTypeInternal(this *types.Package, typ types.Type, visited // n := t.NumMethods() if n == 0 { - p.print("interface{}") + p.print("any") return } diff --git a/cmd/godoc/godoc_test.go b/cmd/godoc/godoc_test.go index 94159445a54..66b93f10630 100644 --- a/cmd/godoc/godoc_test.go +++ b/cmd/godoc/godoc_test.go @@ -71,14 +71,14 @@ func serverAddress(t *testing.T) string { return ln.Addr().String() } -func waitForServerReady(t *testing.T, ctx context.Context, cmd *exec.Cmd, addr string) { +func waitForServerReady(t *testing.T, ctx context.Context, addr string) { waitForServer(t, ctx, fmt.Sprintf("http://%v/", addr), "Go Documentation Server", false) } -func waitForSearchReady(t *testing.T, ctx context.Context, cmd *exec.Cmd, addr string) { +func waitForSearchReady(t *testing.T, ctx context.Context, _ *exec.Cmd, addr string) { waitForServer(t, ctx, fmt.Sprintf("http://%v/search?q=FALLTHROUGH", addr), "The list of tokens.", @@ -208,7 +208,7 @@ func testWeb(t *testing.T, x packagestest.Exporter, bin string, withIndex bool) e := packagestest.Export(t, x, []packagestest.Module{ { Name: "godoc.test/repo1", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a.go": `// Package a is a package in godoc.test/repo1. package a; import _ "godoc.test/repo2/a"; const Name = "repo1a"`, "b/b.go": `package b; const Name = "repo1b"`, @@ -216,7 +216,7 @@ package a; import _ "godoc.test/repo2/a"; const Name = "repo1a"`, }, { Name: "godoc.test/repo2", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a.go": `package a; const Name = "repo2a"`, "b/b.go": `package b; const Name = "repo2b"`, }, @@ -261,7 +261,7 @@ package a; import _ "godoc.test/repo2/a"; const Name = "repo1a"`, if withIndex { waitForSearchReady(t, ctx, cmd, addr) } else { - waitForServerReady(t, ctx, cmd, addr) + waitForServerReady(t, ctx, addr) waitUntilScanComplete(t, ctx, addr) } diff --git a/cmd/gotype/gotype.go b/cmd/gotype/gotype.go index 4a731f26233..591f163f561 100644 --- a/cmd/gotype/gotype.go +++ b/cmd/gotype/gotype.go @@ -185,7 +185,7 @@ func report(err error) { } // parse may be called concurrently -func parse(filename string, src interface{}) (*ast.File, error) { +func parse(filename string, src any) (*ast.File, error) { if *verbose { fmt.Println(filename) } diff --git a/cmd/goyacc/yacc.go b/cmd/goyacc/yacc.go index bc6395480e8..965a76f14dc 100644 --- a/cmd/goyacc/yacc.go +++ b/cmd/goyacc/yacc.go @@ -52,6 +52,7 @@ import ( "go/format" "math" "os" + "slices" "strconv" "strings" "unicode" @@ -2323,7 +2324,7 @@ func wrstate(i int) { var pp, qq int if len(errors) > 0 { - actions := append([]int(nil), temp1...) + actions := slices.Clone(temp1) defaultAction := ERRCODE if lastred != 0 { defaultAction = -lastred @@ -3176,7 +3177,7 @@ func create(s string) *bufio.Writer { } // write out error comment -func lerrorf(lineno int, s string, v ...interface{}) { +func lerrorf(lineno int, s string, v ...any) { nerrors++ fmt.Fprintf(stderr, s, v...) fmt.Fprintf(stderr, ": %v:%v\n", infile, lineno) @@ -3186,7 +3187,7 @@ func lerrorf(lineno int, s string, v ...interface{}) { } } -func errorf(s string, v ...interface{}) { +func errorf(s string, v ...any) { lerrorf(lineno, s, v...) } diff --git a/cmd/signature-fuzzer/fuzz-driver/driver.go b/cmd/signature-fuzzer/fuzz-driver/driver.go index f61ca4b4b52..bd5e5550d42 100644 --- a/cmd/signature-fuzzer/fuzz-driver/driver.go +++ b/cmd/signature-fuzzer/fuzz-driver/driver.go @@ -59,7 +59,7 @@ var selbadfcnflag = flag.Int("badfcnidx", 0, "[Testing only] select index of bad var goimpflag = flag.Bool("goimports", false, "Run 'goimports' on generated code.") var randctlflag = flag.Int("randctl", generator.RandCtlChecks|generator.RandCtlPanic, "Wraprand control flag") -func verb(vlevel int, s string, a ...interface{}) { +func verb(vlevel int, s string, a ...any) { if *verbflag >= vlevel { fmt.Printf(s, a...) fmt.Printf("\n") diff --git a/cmd/signature-fuzzer/fuzz-runner/rnr_test.go b/cmd/signature-fuzzer/fuzz-runner/rnr_test.go index 2bab5b41add..77891c13946 100644 --- a/cmd/signature-fuzzer/fuzz-runner/rnr_test.go +++ b/cmd/signature-fuzzer/fuzz-runner/rnr_test.go @@ -16,7 +16,7 @@ import ( "golang.org/x/tools/internal/testenv" ) -func canRace(t *testing.T) bool { +func canRace() bool { _, err := exec.Command("go", "run", "-race", "./testdata/himom.go").CombinedOutput() return err == nil } @@ -70,7 +70,7 @@ func testRace(t *testing.T, binaryPath string) { // For this test to work, the current test platform has to support the // race detector. Check to see if that is the case by running a very // simple Go program through it. - if !canRace(t) { + if !canRace() { t.Skip("current platform does not appear to support the race detector") } diff --git a/cmd/signature-fuzzer/fuzz-runner/runner.go b/cmd/signature-fuzzer/fuzz-runner/runner.go index 27ab975f0c8..a1c4a11e90a 100644 --- a/cmd/signature-fuzzer/fuzz-runner/runner.go +++ b/cmd/signature-fuzzer/fuzz-runner/runner.go @@ -43,19 +43,19 @@ var forcetmpcleanflag = flag.Bool("forcetmpclean", false, "[Testing only] force var cleancacheflag = flag.Bool("cleancache", true, "[Testing only] don't clean the go cache") var raceflag = flag.Bool("race", false, "[Testing only] build generated code with -race") -func verb(vlevel int, s string, a ...interface{}) { +func verb(vlevel int, s string, a ...any) { if *verbflag >= vlevel { fmt.Printf(s, a...) fmt.Printf("\n") } } -func warn(s string, a ...interface{}) { +func warn(s string, a ...any) { fmt.Fprintf(os.Stderr, s, a...) fmt.Fprintf(os.Stderr, "\n") } -func fatal(s string, a ...interface{}) { +func fatal(s string, a ...any) { fmt.Fprintf(os.Stderr, s, a...) fmt.Fprintf(os.Stderr, "\n") os.Exit(1) diff --git a/cmd/signature-fuzzer/internal/fuzz-generator/generator.go b/cmd/signature-fuzzer/internal/fuzz-generator/generator.go index ba5f0552516..6c8002f9f0c 100644 --- a/cmd/signature-fuzzer/internal/fuzz-generator/generator.go +++ b/cmd/signature-fuzzer/internal/fuzz-generator/generator.go @@ -445,7 +445,7 @@ func writeCom(b *bytes.Buffer, i int) { var Verbctl int = 0 -func verb(vlevel int, s string, a ...interface{}) { +func verb(vlevel int, s string, a ...any) { if Verbctl >= vlevel { fmt.Printf(s, a...) fmt.Printf("\n") @@ -856,10 +856,7 @@ func (s *genstate) GenFunc(fidx int, pidx int) *funcdef { f.returns = append(f.returns, r) } spw := uint(s.wr.Intn(11)) - rstack := 1 << spw - if rstack < 4 { - rstack = 4 - } + rstack := max(1< ns { - en = ns - } + en := min(st+nel, ns) return "\"" + string(letters[st:en]) + "\"", value + 1 } diff --git a/cmd/splitdwarf/internal/macho/file.go b/cmd/splitdwarf/internal/macho/file.go index ceaaa028e16..dbfa2c0ac4a 100644 --- a/cmd/splitdwarf/internal/macho/file.go +++ b/cmd/splitdwarf/internal/macho/file.go @@ -15,6 +15,7 @@ import ( "fmt" "io" "os" + "slices" "strings" "unsafe" ) @@ -314,7 +315,7 @@ type FormatError struct { msg string } -func formatError(off int64, format string, data ...interface{}) *FormatError { +func formatError(off int64, format string, data ...any) *FormatError { return &FormatError{off, fmt.Sprintf(format, data...)} } @@ -518,7 +519,7 @@ func (b LoadBytes) String() string { } func (b LoadBytes) Raw() []byte { return b } -func (b LoadBytes) Copy() LoadBytes { return LoadBytes(append([]byte{}, b...)) } +func (b LoadBytes) Copy() LoadBytes { return LoadBytes(slices.Clone(b)) } func (b LoadBytes) LoadSize(t *FileTOC) uint32 { return uint32(len(b)) } func (lc LoadCmd) Put(b []byte, o binary.ByteOrder) int { @@ -648,7 +649,7 @@ func (s *Symtab) Put(b []byte, o binary.ByteOrder) int { func (s *Symtab) String() string { return fmt.Sprintf("Symtab %#v", s.SymtabCmd) } func (s *Symtab) Copy() *Symtab { - return &Symtab{SymtabCmd: s.SymtabCmd, Syms: append([]Symbol{}, s.Syms...)} + return &Symtab{SymtabCmd: s.SymtabCmd, Syms: slices.Clone(s.Syms)} } func (s *Symtab) LoadSize(t *FileTOC) uint32 { return uint32(unsafe.Sizeof(SymtabCmd{})) @@ -719,7 +720,7 @@ type Dysymtab struct { func (s *Dysymtab) String() string { return fmt.Sprintf("Dysymtab %#v", s.DysymtabCmd) } func (s *Dysymtab) Copy() *Dysymtab { - return &Dysymtab{DysymtabCmd: s.DysymtabCmd, IndirectSyms: append([]uint32{}, s.IndirectSyms...)} + return &Dysymtab{DysymtabCmd: s.DysymtabCmd, IndirectSyms: slices.Clone(s.IndirectSyms)} } func (s *Dysymtab) LoadSize(t *FileTOC) uint32 { return uint32(unsafe.Sizeof(DysymtabCmd{})) @@ -898,7 +899,7 @@ func NewFile(r io.ReaderAt) (*File, error) { if _, err := r.ReadAt(symdat, int64(hdr.Symoff)); err != nil { return nil, err } - st, err := f.parseSymtab(symdat, strtab, cmddat, &hdr, offset) + st, err := f.parseSymtab(symdat, strtab, &hdr, offset) st.SymtabCmd = hdr if err != nil { return nil, err @@ -1060,7 +1061,7 @@ func NewFile(r io.ReaderAt) (*File, error) { return f, nil } -func (f *File) parseSymtab(symdat, strtab, cmddat []byte, hdr *SymtabCmd, offset int64) (*Symtab, error) { +func (f *File) parseSymtab(symdat, strtab []byte, hdr *SymtabCmd, offset int64) (*Symtab, error) { bo := f.ByteOrder symtab := make([]Symbol, hdr.Nsyms) b := bytes.NewReader(symdat) diff --git a/cmd/splitdwarf/internal/macho/file_test.go b/cmd/splitdwarf/internal/macho/file_test.go index eacd238a16c..c28f3a294bf 100644 --- a/cmd/splitdwarf/internal/macho/file_test.go +++ b/cmd/splitdwarf/internal/macho/file_test.go @@ -13,7 +13,7 @@ import ( type fileTest struct { file string hdr FileHeader - loads []interface{} + loads []any sections []*SectionHeader relocations map[string][]Reloc } @@ -22,7 +22,7 @@ var fileTests = []fileTest{ { "testdata/gcc-386-darwin-exec", FileHeader{0xfeedface, Cpu386, 0x3, 0x2, 0xc, 0x3c0, 0x85}, - []interface{}{ + []any{ &SegmentHeader{LcSegment, 0x38, "__PAGEZERO", 0x0, 0x1000, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0}, &SegmentHeader{LcSegment, 0xc0, "__TEXT", 0x1000, 0x1000, 0x0, 0x1000, 0x7, 0x5, 0x2, 0x0, 0}, &SegmentHeader{LcSegment, 0xc0, "__DATA", 0x2000, 0x1000, 0x1000, 0x1000, 0x7, 0x3, 0x2, 0x0, 2}, @@ -48,7 +48,7 @@ var fileTests = []fileTest{ { "testdata/gcc-amd64-darwin-exec", FileHeader{0xfeedfacf, CpuAmd64, 0x80000003, 0x2, 0xb, 0x568, 0x85}, - []interface{}{ + []any{ &SegmentHeader{LcSegment64, 0x48, "__PAGEZERO", 0x0, 0x100000000, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0}, &SegmentHeader{LcSegment64, 0x1d8, "__TEXT", 0x100000000, 0x1000, 0x0, 0x1000, 0x7, 0x5, 0x5, 0x0, 0}, &SegmentHeader{LcSegment64, 0x138, "__DATA", 0x100001000, 0x1000, 0x1000, 0x1000, 0x7, 0x3, 0x3, 0x0, 5}, @@ -76,7 +76,7 @@ var fileTests = []fileTest{ { "testdata/gcc-amd64-darwin-exec-debug", FileHeader{0xfeedfacf, CpuAmd64, 0x80000003, 0xa, 0x4, 0x5a0, 0}, - []interface{}{ + []any{ nil, // LC_UUID &SegmentHeader{LcSegment64, 0x1d8, "__TEXT", 0x100000000, 0x1000, 0x0, 0x0, 0x7, 0x5, 0x5, 0x0, 0}, &SegmentHeader{LcSegment64, 0x138, "__DATA", 0x100001000, 0x1000, 0x0, 0x0, 0x7, 0x3, 0x3, 0x0, 5}, @@ -104,7 +104,7 @@ var fileTests = []fileTest{ { "testdata/clang-386-darwin-exec-with-rpath", FileHeader{0xfeedface, Cpu386, 0x3, 0x2, 0x10, 0x42c, 0x1200085}, - []interface{}{ + []any{ nil, // LC_SEGMENT nil, // LC_SEGMENT nil, // LC_SEGMENT @@ -128,7 +128,7 @@ var fileTests = []fileTest{ { "testdata/clang-amd64-darwin-exec-with-rpath", FileHeader{0xfeedfacf, CpuAmd64, 0x80000003, 0x2, 0x10, 0x4c8, 0x200085}, - []interface{}{ + []any{ nil, // LC_SEGMENT nil, // LC_SEGMENT nil, // LC_SEGMENT @@ -155,7 +155,7 @@ var fileTests = []fileTest{ nil, nil, map[string][]Reloc{ - "__text": []Reloc{ + "__text": { { Addr: 0x1d, Type: uint8(GENERIC_RELOC_VANILLA), @@ -190,7 +190,7 @@ var fileTests = []fileTest{ nil, nil, map[string][]Reloc{ - "__text": []Reloc{ + "__text": { { Addr: 0x19, Type: uint8(X86_64_RELOC_BRANCH), @@ -208,7 +208,7 @@ var fileTests = []fileTest{ Value: 2, }, }, - "__compact_unwind": []Reloc{ + "__compact_unwind": { { Addr: 0x0, Type: uint8(X86_64_RELOC_UNSIGNED), diff --git a/cmd/splitdwarf/splitdwarf.go b/cmd/splitdwarf/splitdwarf.go index e2a7790106f..90ff10b6a05 100644 --- a/cmd/splitdwarf/splitdwarf.go +++ b/cmd/splitdwarf/splitdwarf.go @@ -35,11 +35,11 @@ const ( pageAlign = 12 // 4096 = 1 << 12 ) -func note(format string, why ...interface{}) { +func note(format string, why ...any) { fmt.Fprintf(os.Stderr, format+"\n", why...) } -func fail(format string, why ...interface{}) { +func fail(format string, why ...any) { note(format, why...) os.Exit(1) } @@ -191,7 +191,7 @@ for input_exe need to allow writing. exeNeedsUuid := uuid == nil if exeNeedsUuid { - uuid = &macho.Uuid{macho.UuidCmd{LoadCmd: macho.LcUuid}} + uuid = &macho.Uuid{UuidCmd: macho.UuidCmd{LoadCmd: macho.LcUuid}} uuid.Len = uuid.LoadSize(newtoc) copy(uuid.Id[0:], contentuuid(&exeMacho.FileTOC)[0:16]) uuid.Id[6] = uuid.Id[6]&^0xf0 | 0x40 // version 4 (pseudo-random); see section 4.1.3 diff --git a/cmd/ssadump/main.go b/cmd/ssadump/main.go index f04c1c04633..7eda7b5e2ec 100644 --- a/cmd/ssadump/main.go +++ b/cmd/ssadump/main.go @@ -188,7 +188,7 @@ func doMain() error { // e.g. --flag=one --flag=two would produce []string{"one", "two"}. type stringListValue []string -func (ss *stringListValue) Get() interface{} { return []string(*ss) } +func (ss *stringListValue) Get() any { return []string(*ss) } func (ss *stringListValue) String() string { return fmt.Sprintf("%q", *ss) } diff --git a/cmd/stringer/multifile_test.go b/cmd/stringer/multifile_test.go index 32914c5e825..152e1cd7cc1 100644 --- a/cmd/stringer/multifile_test.go +++ b/cmd/stringer/multifile_test.go @@ -29,7 +29,7 @@ import ( // Several tests expect the type Foo generated in some package. func expectFooString(pkg string) []byte { - return []byte(fmt.Sprintf(` + return fmt.Appendf(nil, ` // Header comment ignored. package %s @@ -54,7 +54,7 @@ func (i Foo) String() string { return "Foo(" + strconv.FormatInt(int64(i), 10) + ")" } return _Foo_name[_Foo_index[i]:_Foo_index[i+1]] -}`, pkg)) +}`, pkg) } func TestMultifileStringer(t *testing.T) { diff --git a/cmd/stringer/stringer.go b/cmd/stringer/stringer.go index 09be11ca58e..038e8e831b6 100644 --- a/cmd/stringer/stringer.go +++ b/cmd/stringer/stringer.go @@ -244,10 +244,10 @@ type Generator struct { buf bytes.Buffer // Accumulated output. pkg *Package // Package we are scanning. - logf func(format string, args ...interface{}) // test logging hook; nil when not testing + logf func(format string, args ...any) // test logging hook; nil when not testing } -func (g *Generator) Printf(format string, args ...interface{}) { +func (g *Generator) Printf(format string, args ...any) { fmt.Fprintf(&g.buf, format, args...) } @@ -279,7 +279,7 @@ type Package struct { func loadPackages( patterns, tags []string, trimPrefix string, lineComment bool, - logf func(format string, args ...interface{}), + logf func(format string, args ...any), ) []*Package { cfg := &packages.Config{ Mode: packages.NeedName | packages.NeedTypes | packages.NeedTypesInfo | packages.NeedSyntax | packages.NeedFiles, diff --git a/go.mod b/go.mod index 0f49047782e..8cea866daf8 100644 --- a/go.mod +++ b/go.mod @@ -5,10 +5,10 @@ go 1.22.0 // => default GODEBUG has gotypesalias=0 require ( github.com/google/go-cmp v0.6.0 github.com/yuin/goldmark v1.4.13 - golang.org/x/mod v0.22.0 - golang.org/x/net v0.34.0 - golang.org/x/sync v0.10.0 + golang.org/x/mod v0.23.0 + golang.org/x/net v0.35.0 + golang.org/x/sync v0.11.0 golang.org/x/telemetry v0.0.0-20240521205824-bda55230c457 ) -require golang.org/x/sys v0.29.0 // indirect +require golang.org/x/sys v0.30.0 // indirect diff --git a/go.sum b/go.sum index c788c5fbdc3..2d11b060c08 100644 --- a/go.sum +++ b/go.sum @@ -2,13 +2,13 @@ github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/yuin/goldmark v1.4.13 h1:fVcFKWvrslecOb/tg+Cc05dkeYx540o0FuFt3nUVDoE= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= -golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= -golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= -golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0= -golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k= -golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ= -golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= -golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU= -golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/mod v0.23.0 h1:Zb7khfcRGKk+kqfxFaP5tZqCnDZMjC5VtUBs87Hr6QM= +golang.org/x/mod v0.23.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= +golang.org/x/net v0.35.0 h1:T5GQRQb2y08kTAByq9L4/bz8cipCdA8FbRTXewonqY8= +golang.org/x/net v0.35.0/go.mod h1:EglIi67kWsHKlRzzVMUD93VMSWGFOMSZgxFjparz1Qk= +golang.org/x/sync v0.11.0 h1:GGz8+XQP4FvTTrjZPzNKTMFtSXH80RAzG+5ghFPgK9w= +golang.org/x/sync v0.11.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc= +golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/telemetry v0.0.0-20240521205824-bda55230c457 h1:zf5N6UOrA487eEFacMePxjXAJctxKmyjKUsjA11Uzuk= golang.org/x/telemetry v0.0.0-20240521205824-bda55230c457/go.mod h1:pRgIJT+bRLFKnoM1ldnzKoxTIn14Yxz928LQRYYgIN0= diff --git a/go/analysis/analysistest/analysistest.go b/go/analysis/analysistest/analysistest.go index 3cc2beca737..775fd20094d 100644 --- a/go/analysis/analysistest/analysistest.go +++ b/go/analysis/analysistest/analysistest.go @@ -16,6 +16,7 @@ import ( "path/filepath" "regexp" "runtime" + "slices" "sort" "strconv" "strings" @@ -35,6 +36,12 @@ import ( // and populates it with a GOPATH-style project using filemap (which // maps file names to contents). On success it returns the name of the // directory and a cleanup function to delete it. +// +// TODO(adonovan): provide a newer version that accepts a testing.T, +// calls T.TempDir, and calls T.Fatal on any error, avoiding the need +// to return cleanup or err: +// +// func WriteFilesToTmp(t *testing.T filemap map[string]string) string func WriteFiles(filemap map[string]string) (dir string, cleanup func(), err error) { gopath, err := os.MkdirTemp("", "analysistest") if err != nil { @@ -167,50 +174,27 @@ func RunWithSuggestedFixes(t Testing, dir string, a *analysis.Analyzer, patterns act := result.Action // file -> message -> edits + // TODO(adonovan): this mapping assumes fix.Messages are unique across analyzers, + // whereas they are only unique within a given Diagnostic. fileEdits := make(map[*token.File]map[string][]diff.Edit) - fileContents := make(map[*token.File][]byte) - // Validate edits, prepare the fileEdits map and read the file contents. + // We may assume that fixes are validated upon creation in Pass.Report. + // Group fixes by file and message. for _, diag := range act.Diagnostics { for _, fix := range diag.SuggestedFixes { - // Assert that lazy fixes have a Category (#65578, #65087). if inTools && len(fix.TextEdits) == 0 && diag.Category == "" { t.Errorf("missing Diagnostic.Category for SuggestedFix without TextEdits (gopls requires the category for the name of the fix command") } for _, edit := range fix.TextEdits { - start, end := edit.Pos, edit.End - if !end.IsValid() { - end = start - } - // Validate the edit. - if start > end { - t.Errorf( - "diagnostic for analysis %v contains Suggested Fix with malformed edit: pos (%v) > end (%v)", - act.Analyzer.Name, start, end) - continue - } - file, endfile := act.Package.Fset.File(start), act.Package.Fset.File(end) - if file == nil || endfile == nil || file != endfile { - t.Errorf( - "diagnostic for analysis %v contains Suggested Fix with malformed spanning files %v and %v", - act.Analyzer.Name, file.Name(), endfile.Name()) - continue - } - if _, ok := fileContents[file]; !ok { - contents, err := os.ReadFile(file.Name()) - if err != nil { - t.Errorf("error reading %s: %v", file.Name(), err) - } - fileContents[file] = contents - } + file := act.Package.Fset.File(edit.Pos) if _, ok := fileEdits[file]; !ok { fileEdits[file] = make(map[string][]diff.Edit) } fileEdits[file][fix.Message] = append(fileEdits[file][fix.Message], diff.Edit{ - Start: file.Offset(start), - End: file.Offset(end), + Start: file.Offset(edit.Pos), + End: file.Offset(edit.End), New: string(edit.NewText), }) } @@ -219,9 +203,10 @@ func RunWithSuggestedFixes(t Testing, dir string, a *analysis.Analyzer, patterns for file, fixes := range fileEdits { // Get the original file contents. - orig, ok := fileContents[file] - if !ok { - t.Errorf("could not find file contents for %s", file.Name()) + // TODO(adonovan): plumb pass.ReadFile. + orig, err := os.ReadFile(file.Name()) + if err != nil { + t.Errorf("error reading %s: %v", file.Name(), err) continue } @@ -242,8 +227,15 @@ func RunWithSuggestedFixes(t Testing, dir string, a *analysis.Analyzer, patterns t.Errorf("%s.golden has leading comment; we don't know what to do with it", file.Name()) continue } - - for sf, edits := range fixes { + // Sort map keys for determinism in tests. + // TODO(jba): replace with slices.Sorted(maps.Keys(fixes)) when go.mod >= 1.23. + var keys []string + for k := range fixes { + keys = append(keys, k) + } + slices.Sort(keys) + for _, sf := range keys { + edits := fixes[sf] found := false for _, vf := range ar.Files { if vf.Name == sf { @@ -266,10 +258,17 @@ func RunWithSuggestedFixes(t Testing, dir string, a *analysis.Analyzer, patterns } } else { // all suggested fixes are represented by a single file - + // TODO(adonovan): fix: this makes no sense if len(fixes) > 1. + // Sort map keys for determinism in tests. + // TODO(jba): replace with slices.Sorted(maps.Keys(fixes)) when go.mod >= 1.23. + var keys []string + for k := range fixes { + keys = append(keys, k) + } + slices.Sort(keys) var catchallEdits []diff.Edit - for _, edits := range fixes { - catchallEdits = append(catchallEdits, edits...) + for _, k := range keys { + catchallEdits = append(catchallEdits, fixes[k]...) } if err := applyDiffsAndCompare(orig, ar.Comment, catchallEdits, file.Name()); err != nil { diff --git a/go/analysis/checker/checker.go b/go/analysis/checker/checker.go index 5935a62abaf..502ec922179 100644 --- a/go/analysis/checker/checker.go +++ b/go/analysis/checker/checker.go @@ -35,6 +35,7 @@ import ( "go/types" "io" "log" + "os" "reflect" "sort" "strings" @@ -55,9 +56,10 @@ type Options struct { SanityCheck bool // check fact encoding is ok and deterministic FactLog io.Writer // if non-nil, log each exported fact to it - // TODO(adonovan): add ReadFile so that an Overlay specified + // TODO(adonovan): expose ReadFile so that an Overlay specified // in the [packages.Config] can be communicated via // Pass.ReadFile to each Analyzer. + readFile analysisinternal.ReadFileFunc } // Graph holds the results of a round of analysis, including the graph @@ -335,8 +337,14 @@ func (act *Action) execOnce() { TypeErrors: act.Package.TypeErrors, Module: module, - ResultOf: inputs, - Report: func(d analysis.Diagnostic) { act.Diagnostics = append(act.Diagnostics, d) }, + ResultOf: inputs, + Report: func(d analysis.Diagnostic) { + // Assert that SuggestedFixes are well formed. + if err := analysisinternal.ValidateFixes(act.Package.Fset, act.Analyzer, d.SuggestedFixes); err != nil { + panic(err) + } + act.Diagnostics = append(act.Diagnostics, d) + }, ImportObjectFact: act.ObjectFact, ExportObjectFact: act.exportObjectFact, ImportPackageFact: act.PackageFact, @@ -344,7 +352,11 @@ func (act *Action) execOnce() { AllObjectFacts: act.AllObjectFacts, AllPackageFacts: act.AllPackageFacts, } - pass.ReadFile = analysisinternal.MakeReadFile(pass) + readFile := os.ReadFile + if act.opts.readFile != nil { + readFile = act.opts.readFile + } + pass.ReadFile = analysisinternal.CheckedReadFile(pass, readFile) act.pass = pass act.Result, act.Err = func() (any, error) { diff --git a/go/analysis/diagnostic.go b/go/analysis/diagnostic.go index ee083a2d686..f6118bec647 100644 --- a/go/analysis/diagnostic.go +++ b/go/analysis/diagnostic.go @@ -65,7 +65,9 @@ type RelatedInformation struct { // user can choose to apply to their code. Usually the SuggestedFix is // meant to fix the issue flagged by the diagnostic. // -// The TextEdits must not overlap, nor contain edits for other packages. +// The TextEdits must not overlap, nor contain edits for other +// packages. Edits need not be totally ordered, but the order +// determines how insertions at the same point will be applied. type SuggestedFix struct { // A verb phrase describing the fix, to be shown to // a user trying to decide whether to accept it. diff --git a/go/analysis/internal/checker/checker.go b/go/analysis/internal/checker/checker.go index 0c2fc5e59db..fb3c47b1625 100644 --- a/go/analysis/internal/checker/checker.go +++ b/go/analysis/internal/checker/checker.go @@ -17,9 +17,9 @@ import ( "flag" "fmt" "go/format" - "go/token" "io" - "io/ioutil" + "maps" + "log" "os" "runtime" @@ -31,10 +31,11 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/checker" + "golang.org/x/tools/go/analysis/internal" "golang.org/x/tools/go/analysis/internal/analysisflags" "golang.org/x/tools/go/packages" + "golang.org/x/tools/internal/analysisinternal" "golang.org/x/tools/internal/diff" - "golang.org/x/tools/internal/robustio" ) var ( @@ -54,8 +55,12 @@ var ( // IncludeTests indicates whether test files should be analyzed too. IncludeTests = true - // Fix determines whether to apply all suggested fixes. + // Fix determines whether to apply (!Diff) or display (Diff) all suggested fixes. Fix bool + + // Diff causes the file updates to be displayed, but not applied. + // This flag has no effect unless Fix is true. + Diff bool ) // RegisterFlags registers command-line flags used by the analysis driver. @@ -71,6 +76,7 @@ func RegisterFlags() { flag.BoolVar(&IncludeTests, "test", IncludeTests, "indicates whether test files should be analyzed, too") flag.BoolVar(&Fix, "fix", false, "apply all suggested fixes") + flag.BoolVar(&Diff, "diff", false, "with -fix, don't update the files, but print a unified diff") } // Run loads the packages specified by args using go/packages, @@ -139,6 +145,9 @@ func Run(args []string, analyzers []*analysis.Analyzer) int { return 1 } + // TODO(adonovan): simplify exit code logic by using a single + // exit code variable and applying "code = max(code, X)" each + // time an error of code X occurs. pkgsExitCode := 0 // Print package and module errors regardless of RunDespiteErrors. // Do not exit if there are errors, yet. @@ -166,13 +175,18 @@ func Run(args []string, analyzers []*analysis.Analyzer) int { return 1 } - // Apply all fixes from the root actions. + // Don't print the diagnostics, + // but apply all fixes from the root actions. if Fix { - if err := applyFixes(graph.Roots); err != nil { + if err := applyFixes(graph.Roots, Diff); err != nil { // Fail when applying fixes failed. log.Print(err) return 1 } + // TODO(adonovan): don't proceed to print the text or JSON output + // if we applied fixes; stop here. + // + // return pkgsExitCode } // Print the results. If !RunDespiteErrors and there @@ -261,7 +275,13 @@ func load(patterns []string, allSyntax bool) ([]*packages.Package, error) { } mode |= packages.NeedModule conf := packages.Config{ - Mode: mode, + Mode: mode, + // Ensure that child process inherits correct alias of PWD. + // (See discussion at Dir field of [exec.Command].) + // However, this currently breaks some tests. + // TODO(adonovan): Investigate. + // + // Dir: os.Getenv("PWD"), Tests: IncludeTests, } initial, err := packages.Load(&conf, patterns...) @@ -271,181 +291,257 @@ func load(patterns []string, allSyntax bool) ([]*packages.Package, error) { return initial, err } -// applyFixes applies suggested fixes associated with diagnostics -// reported by the specified actions. It verifies that edits do not -// conflict, even through file-system level aliases such as symbolic -// links, and then edits the files. -func applyFixes(actions []*checker.Action) error { - // Visit all of the actions and accumulate the suggested edits. - paths := make(map[robustio.FileID]string) - editsByAction := make(map[robustio.FileID]map[*checker.Action][]diff.Edit) +// applyFixes attempts to apply the first suggested fix associated +// with each diagnostic reported by the specified actions. +// All fixes must have been validated by [analysisinternal.ValidateFixes]. +// +// Each fix is treated as an independent change; fixes are merged in +// an arbitrary deterministic order as if by a three-way diff tool +// such as the UNIX diff3 command or 'git merge'. Any fix that cannot be +// cleanly merged is discarded, in which case the final summary tells +// the user to re-run the tool. +// TODO(adonovan): make the checker tool re-run the analysis itself. +// +// When the same file is analyzed as a member of both a primary +// package "p" and a test-augmented package "p [p.test]", there may be +// duplicate diagnostics and fixes. One set of fixes will be applied +// and the other will be discarded; but re-running the tool may then +// show zero fixes, which may cause the confused user to wonder what +// happened to the other ones. +// TODO(adonovan): consider pre-filtering completely identical fixes. +// +// A common reason for overlapping fixes is duplicate additions of the +// same import. The merge algorithm may often cleanly resolve such +// fixes, coalescing identical edits, but the merge may sometimes be +// confused by nearby changes. +// +// Even when merging succeeds, there is no guarantee that the +// composition of the two fixes is semantically correct. Coalescing +// identical edits is appropriate for imports, but not for, say, +// increments to a counter variable; the correct resolution in that +// case might be to increment it twice. Or consider two fixes that +// each delete the penultimate reference to an import or local +// variable: each fix is sound individually, and they may be textually +// distant from each other, but when both are applied, the program is +// no longer valid because it has an unreferenced import or local +// variable. +// TODO(adonovan): investigate replacing the final "gofmt" step with a +// formatter that applies the unused-import deletion logic of +// "goimports". +// +// Merging depends on both the order of fixes and they order of edits +// within them. For example, if three fixes add import "a" twice and +// import "b" once, the two imports of "a" may be combined if they +// appear in order [a, a, b], or not if they appear as [a, b, a]. +// TODO(adonovan): investigate an algebraic approach to imports; +// that is, for fixes to Go source files, convert changes within the +// import(...) portion of the file into semantic edits, compose those +// edits algebraically, then convert the result back to edits. +// +// applyFixes returns success if all fixes are valid, could be cleanly +// merged, and the corresponding files were successfully updated. +// +// If showDiff, instead of updating the files it display the final +// patch composed of all the cleanly merged fixes. +// +// TODO(adonovan): handle file-system level aliases such as symbolic +// links using robustio.FileID. +func applyFixes(actions []*checker.Action, showDiff bool) error { + + // Select fixes to apply. + // + // If there are several for a given Diagnostic, choose the first. + // Preserve the order of iteration, for determinism. + type fixact struct { + fix *analysis.SuggestedFix + act *checker.Action + } + var fixes []*fixact for _, act := range actions { - editsForTokenFile := make(map[*token.File][]diff.Edit) for _, diag := range act.Diagnostics { - for _, sf := range diag.SuggestedFixes { - for _, edit := range sf.TextEdits { - // Validate the edit. - // Any error here indicates a bug in the analyzer. - start, end := edit.Pos, edit.End - file := act.Package.Fset.File(start) - if file == nil { - return fmt.Errorf("analysis %q suggests invalid fix: missing file info for pos (%v)", - act.Analyzer.Name, edit.Pos) - } - if !end.IsValid() { - end = start - } - if start > end { - return fmt.Errorf("analysis %q suggests invalid fix: pos (%v) > end (%v)", - act.Analyzer.Name, edit.Pos, edit.End) - } - if eof := token.Pos(file.Base() + file.Size()); end > eof { - return fmt.Errorf("analysis %q suggests invalid fix: end (%v) past end of file (%v)", - act.Analyzer.Name, edit.End, eof) - } - edit := diff.Edit{ - Start: file.Offset(start), - End: file.Offset(end), - New: string(edit.NewText), - } - editsForTokenFile[file] = append(editsForTokenFile[file], edit) + for i := range diag.SuggestedFixes { + fix := &diag.SuggestedFixes[i] + if i == 0 { + fixes = append(fixes, &fixact{fix, act}) + } else { + // TODO(adonovan): abstract the logger. + log.Printf("%s: ignoring alternative fix %q", act, fix.Message) } } } + } - for f, edits := range editsForTokenFile { - id, _, err := robustio.GetFileID(f.Name()) + // Read file content on demand, from the virtual + // file system that fed the analyzer (see #62292). + // + // This cache assumes that all successful reads for the same + // file name return the same content. + // (It is tempting to group fixes by package and do the + // merge/apply/format steps one package at a time, but + // packages are not disjoint, due to test variants, so this + // would not really address the issue.) + baselineContent := make(map[string][]byte) + getBaseline := func(readFile analysisinternal.ReadFileFunc, filename string) ([]byte, error) { + content, ok := baselineContent[filename] + if !ok { + var err error + content, err = readFile(filename) if err != nil { - return err - } - if _, hasId := paths[id]; !hasId { - paths[id] = f.Name() - editsByAction[id] = make(map[*checker.Action][]diff.Edit) + return nil, err } - editsByAction[id][act] = edits + baselineContent[filename] = content } + return content, nil } - // Validate and group the edits to each actual file. - editsByPath := make(map[string][]diff.Edit) - for id, actToEdits := range editsByAction { - path := paths[id] - actions := make([]*checker.Action, 0, len(actToEdits)) - for act := range actToEdits { - actions = append(actions, act) - } + // Apply each fix, updating the current state + // only if the entire fix can be cleanly merged. + accumulatedEdits := make(map[string][]diff.Edit) + goodFixes := 0 +fixloop: + for _, fixact := range fixes { + readFile := internal.Pass(fixact.act).ReadFile + + // Convert analysis.TextEdits to diff.Edits, grouped by file. + // Precondition: a prior call to validateFix succeeded. + fileEdits := make(map[string][]diff.Edit) + fset := fixact.act.Package.Fset + for _, edit := range fixact.fix.TextEdits { + file := fset.File(edit.Pos) + + baseline, err := getBaseline(readFile, file.Name()) + if err != nil { + log.Printf("skipping fix to file %s: %v", file.Name(), err) + continue fixloop + } - // Does any action create conflicting edits? - for _, act := range actions { - edits := actToEdits[act] - if _, invalid := validateEdits(edits); invalid > 0 { - name, x, y := act.Analyzer.Name, edits[invalid-1], edits[invalid] - return diff3Conflict(path, name, name, []diff.Edit{x}, []diff.Edit{y}) + // We choose to treat size mismatch as a serious error, + // as it indicates a concurrent write to at least one file, + // and possibly others (consider a git checkout, for example). + if file.Size() != len(baseline) { + return fmt.Errorf("concurrent file modification detected in file %s (size changed from %d -> %d bytes); aborting fix", + file.Name(), file.Size(), len(baseline)) } + + fileEdits[file.Name()] = append(fileEdits[file.Name()], diff.Edit{ + Start: file.Offset(edit.Pos), + End: file.Offset(edit.End), + New: string(edit.NewText), + }) } - // Does any pair of different actions create edits that conflict? - for j := range actions { - for k := range actions[:j] { - x, y := actions[j], actions[k] - if x.Analyzer.Name > y.Analyzer.Name { - x, y = y, x - } - xedits, yedits := actToEdits[x], actToEdits[y] - combined := append(xedits, yedits...) - if _, invalid := validateEdits(combined); invalid > 0 { - // TODO: consider applying each action's consistent list of edits entirely, - // and then using a three-way merge (such as GNU diff3) on the resulting - // files to report more precisely the parts that actually conflict. - return diff3Conflict(path, x.Analyzer.Name, y.Analyzer.Name, xedits, yedits) + // Apply each set of edits by merging atop + // the previous accumulated state. + after := make(map[string][]diff.Edit) + for file, edits := range fileEdits { + if prev := accumulatedEdits[file]; len(prev) > 0 { + merged, ok := diff.Merge(prev, edits) + if !ok { + // debugging + if false { + log.Printf("%s: fix %s conflicts", fixact.act, fixact.fix.Message) + } + continue fixloop // conflict } + edits = merged } + after[file] = edits } - var edits []diff.Edit - for act := range actToEdits { - edits = append(edits, actToEdits[act]...) + // The entire fix applied cleanly; commit it. + goodFixes++ + maps.Copy(accumulatedEdits, after) + // debugging + if false { + log.Printf("%s: fix %s applied", fixact.act, fixact.fix.Message) } - editsByPath[path], _ = validateEdits(edits) // remove duplicates. already validated. } + badFixes := len(fixes) - goodFixes - // Now we've got a set of valid edits for each file. Apply them. - // TODO(adonovan): don't abort the operation partway just because one file fails. - for path, edits := range editsByPath { - // TODO(adonovan): this should really work on the same - // gulp from the file system that fed the analyzer (see #62292). - contents, err := os.ReadFile(path) - if err != nil { - return err + // Show diff or update files to final state. + var files []string + for file := range accumulatedEdits { + files = append(files, file) + } + sort.Strings(files) // for deterministic -diff + var filesUpdated, totalFiles int + for _, file := range files { + edits := accumulatedEdits[file] + if len(edits) == 0 { + continue // the diffs annihilated (a miracle?) } - out, err := diff.ApplyBytes(contents, edits) + // Apply accumulated fixes. + baseline := baselineContent[file] // (cache hit) + final, err := diff.ApplyBytes(baseline, edits) if err != nil { - return err - } - - // Try to format the file. - if formatted, err := format.Source(out); err == nil { - out = formatted + log.Fatalf("internal error in diff.ApplyBytes: %v", err) } - if err := os.WriteFile(path, out, 0644); err != nil { - return err + // Attempt to format each file. + if formatted, err := format.Source(final); err == nil { + final = formatted } - } - return nil -} -// validateEdits returns a list of edits that is sorted and -// contains no duplicate edits. Returns the index of some -// overlapping adjacent edits if there is one and <0 if the -// edits are valid. -func validateEdits(edits []diff.Edit) ([]diff.Edit, int) { - if len(edits) == 0 { - return nil, -1 - } - equivalent := func(x, y diff.Edit) bool { - return x.Start == y.Start && x.End == y.End && x.New == y.New - } - diff.SortEdits(edits) - unique := []diff.Edit{edits[0]} - invalid := -1 - for i := 1; i < len(edits); i++ { - prev, cur := edits[i-1], edits[i] - // We skip over equivalent edits without considering them - // an error. This handles identical edits coming from the - // multiple ways of loading a package into a - // *go/packages.Packages for testing, e.g. packages "p" and "p [p.test]". - if !equivalent(prev, cur) { - unique = append(unique, cur) - if prev.End > cur.Start { - invalid = i + if showDiff { + // Since we formatted the file, we need to recompute the diff. + unified := diff.Unified(file+" (old)", file+" (new)", string(baseline), string(final)) + // TODO(adonovan): abstract the I/O. + os.Stdout.WriteString(unified) + + } else { + // write + totalFiles++ + // TODO(adonovan): abstract the I/O. + if err := os.WriteFile(file, final, 0644); err != nil { + log.Println(err) + continue } + filesUpdated++ } } - return unique, invalid -} - -// diff3Conflict returns an error describing two conflicting sets of -// edits on a file at path. -func diff3Conflict(path string, xlabel, ylabel string, xedits, yedits []diff.Edit) error { - contents, err := ioutil.ReadFile(path) - if err != nil { - return err - } - oldlabel, old := "base", string(contents) - xdiff, err := diff.ToUnified(oldlabel, xlabel, old, xedits, diff.DefaultContextLines) - if err != nil { - return err - } - ydiff, err := diff.ToUnified(oldlabel, ylabel, old, yedits, diff.DefaultContextLines) - if err != nil { - return err + // TODO(adonovan): consider returning a structured result that + // maps each SuggestedFix to its status: + // - invalid + // - secondary, not selected + // - applied + // - had conflicts. + // and a mapping from each affected file to: + // - its final/original content pair, and + // - whether formatting was successful. + // Then file writes and the UI can be applied by the caller + // in whatever form they like. + + // If victory was incomplete, report an error that indicates partial progress. + // + // badFixes > 0 indicates that we decided not to attempt some + // fixes due to conflicts or failure to read the source; still + // it's a relatively benign situation since the user can + // re-run the tool, and we may still make progress. + // + // filesUpdated < totalFiles indicates that some file updates + // failed. This should be rare, but is a serious error as it + // may apply half a fix, or leave the files in a bad state. + // + // These numbers are potentially misleading: + // The denominator includes duplicate conflicting fixes due to + // common files in packages "p" and "p [p.test]", which may + // have been fixed fixed and won't appear in the re-run. + // TODO(adonovan): eliminate identical fixes as an initial + // filtering step. + // + // TODO(adonovan): should we log that n files were updated in case of total victory? + if badFixes > 0 || filesUpdated < totalFiles { + if showDiff { + return fmt.Errorf("%d of %d fixes skipped (e.g. due to conflicts)", badFixes, len(fixes)) + } else { + return fmt.Errorf("applied %d of %d fixes; %d files updated. (Re-run the command to apply more.)", + goodFixes, len(fixes), filesUpdated) + } } - return fmt.Errorf("conflicting edits from %s and %s on %s\nfirst edits:\n%s\nsecond edits:\n%s", - xlabel, ylabel, path, xdiff, ydiff) + return nil } // needFacts reports whether any analysis required by the specified set diff --git a/go/analysis/internal/checker/checker_test.go b/go/analysis/internal/checker/checker_test.go index 77a57f5119c..fcf5f66e03e 100644 --- a/go/analysis/internal/checker/checker_test.go +++ b/go/analysis/internal/checker/checker_test.go @@ -5,8 +5,6 @@ package checker_test import ( - "fmt" - "go/ast" "os" "path/filepath" "reflect" @@ -17,7 +15,6 @@ import ( "golang.org/x/tools/go/analysis/analysistest" "golang.org/x/tools/go/analysis/internal/checker" "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/internal/testenv" "golang.org/x/tools/internal/testfiles" "golang.org/x/tools/txtar" @@ -25,6 +22,7 @@ import ( func TestApplyFixes(t *testing.T) { testenv.NeedsGoPackages(t) + testenv.RedirectStderr(t) // associated checker.Run output with this test files := map[string]string{ "rename/test.go": `package rename @@ -67,72 +65,9 @@ func Foo() { defer cleanup() } -var renameAnalyzer = &analysis.Analyzer{ - Name: "rename", - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: run, - Doc: "renames symbols named bar to baz", - RunDespiteErrors: true, -} - -var otherAnalyzer = &analysis.Analyzer{ // like analyzer but with a different Name. - Name: "other", - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: run, - Doc: "renames symbols named bar to baz only in package 'other'", -} - -func run(pass *analysis.Pass) (interface{}, error) { - const ( - from = "bar" - to = "baz" - conflict = "conflict" // add conflicting edits to package conflict. - duplicate = "duplicate" // add duplicate edits to package conflict. - other = "other" // add conflicting edits to package other from different analyzers. - ) - - if pass.Analyzer.Name == other { - if pass.Pkg.Name() != other { - return nil, nil // only apply Analyzer other to packages named other - } - } - - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - nodeFilter := []ast.Node{(*ast.Ident)(nil)} - inspect.Preorder(nodeFilter, func(n ast.Node) { - ident := n.(*ast.Ident) - if ident.Name == from { - msg := fmt.Sprintf("renaming %q to %q", from, to) - edits := []analysis.TextEdit{ - {Pos: ident.Pos(), End: ident.End(), NewText: []byte(to)}, - } - switch pass.Pkg.Name() { - case conflict: - edits = append(edits, []analysis.TextEdit{ - {Pos: ident.Pos() - 1, End: ident.End(), NewText: []byte(to)}, - {Pos: ident.Pos(), End: ident.End() - 1, NewText: []byte(to)}, - {Pos: ident.Pos(), End: ident.End(), NewText: []byte("lorem ipsum")}, - }...) - case duplicate: - edits = append(edits, edits...) - case other: - if pass.Analyzer.Name == other { - edits[0].Pos = edits[0].Pos + 1 // shift by one to mismatch analyzer and other - } - } - pass.Report(analysis.Diagnostic{ - Pos: ident.Pos(), - End: ident.End(), - Message: msg, - SuggestedFixes: []analysis.SuggestedFix{{Message: msg, TextEdits: edits}}}) - } - }) - - return nil, nil -} - func TestRunDespiteErrors(t *testing.T) { testenv.NeedsGoPackages(t) + testenv.RedirectStderr(t) // associate checker.Run output with this test files := map[string]string{ "rderr/test.go": `package rderr @@ -360,4 +295,7 @@ hello from other if !ran { t.Error("analyzer did not run") } + + // TODO(adonovan): test that fixes are applied to the + // pass.ReadFile virtual file tree. } diff --git a/go/analysis/internal/checker/fix_test.go b/go/analysis/internal/checker/fix_test.go index b169d79a087..8fb7506ac70 100644 --- a/go/analysis/internal/checker/fix_test.go +++ b/go/analysis/internal/checker/fix_test.go @@ -5,45 +5,44 @@ package checker_test import ( + "bytes" "flag" "fmt" + "go/ast" "go/token" "log" "os" "os/exec" - "path" + "path/filepath" "regexp" "runtime" + "slices" "strings" "testing" "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/analysistest" + "golang.org/x/tools/go/analysis/checker" "golang.org/x/tools/go/analysis/multichecker" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/go/expect" + "golang.org/x/tools/go/packages" + "golang.org/x/tools/internal/diff" "golang.org/x/tools/internal/testenv" + "golang.org/x/tools/internal/testfiles" + "golang.org/x/tools/txtar" ) -// These are the analyzers available to the multichecker. -// (Tests may add more in init functions as needed.) -var candidates = map[string]*analysis.Analyzer{ - renameAnalyzer.Name: renameAnalyzer, - otherAnalyzer.Name: otherAnalyzer, -} - func TestMain(m *testing.M) { - // If the ANALYZERS=a,..,z environment is set, then this - // process should behave like a multichecker with the - // named analyzers. - if s, ok := os.LookupEnv("ANALYZERS"); ok { - var analyzers []*analysis.Analyzer - for _, name := range strings.Split(s, ",") { - a := candidates[name] - if a == nil { - log.Fatalf("no such analyzer: %q", name) - } - analyzers = append(analyzers, a) - } - multichecker.Main(analyzers...) + // If the CHECKER_TEST_CHILD environment variable is set, + // this process should behave like a multichecker. + // Analyzers are selected by flags. + if _, ok := os.LookupEnv("CHECKER_TEST_CHILD"); ok { + multichecker.Main( + markerAnalyzer, + noendAnalyzer, + renameAnalyzer, + ) panic("unreachable") } @@ -58,270 +57,553 @@ const ( exitCodeDiagnostics = 3 // diagnostics were reported ) -// fix runs a multichecker subprocess with -fix in the specified -// directory, applying the comma-separated list of named analyzers to -// the packages matching the patterns. It returns the CombinedOutput. -func fix(t *testing.T, dir, analyzers string, wantExit int, patterns ...string) string { - testenv.NeedsExec(t) - testenv.NeedsTool(t, "go") - - cmd := exec.Command(os.Args[0], "-fix") - cmd.Args = append(cmd.Args, patterns...) - cmd.Env = append(os.Environ(), - "ANALYZERS="+analyzers, - "GOPATH="+dir, - "GO111MODULE=off", - "GOPROXY=off") - - clean := func(s string) string { - return strings.ReplaceAll(s, os.TempDir(), "os.TempDir/") - } - outBytes, err := cmd.CombinedOutput() - out := clean(string(outBytes)) - t.Logf("$ %s\n%s", clean(fmt.Sprint(cmd)), out) - if err, ok := err.(*exec.ExitError); !ok { - t.Fatalf("failed to execute multichecker: %v", err) - } else if err.ExitCode() != wantExit { - // plan9 ExitCode() currently only returns 0 for success or 1 for failure - if !(runtime.GOOS == "plan9" && wantExit != exitCodeSuccess && err.ExitCode() != exitCodeSuccess) { - t.Errorf("exit code was %d, want %d", err.ExitCode(), wantExit) - } - } - return out -} - -// TestFixes ensures that checker.Run applies fixes correctly. -// This test fork/execs the main function above. -func TestFixes(t *testing.T) { - files := map[string]string{ - "rename/foo.go": `package rename - -func Foo() { - bar := 12 - _ = bar -} - -// the end -`, - "rename/intestfile_test.go": `package rename - -func InTestFile() { - bar := 13 - _ = bar -} - -// the end -`, - "rename/foo_test.go": `package rename_test - -func Foo() { - bar := 14 - _ = bar -} - -// the end -`, - "duplicate/dup.go": `package duplicate - -func Foo() { - bar := 14 - _ = bar -} - -// the end -`, - } - fixed := map[string]string{ - "rename/foo.go": `package rename - -func Foo() { - baz := 12 - _ = baz -} - -// the end -`, - "rename/intestfile_test.go": `package rename - -func InTestFile() { - baz := 13 - _ = baz -} +// TestReportInvalidDiagnostic tests that a call to pass.Report with +// certain kind of invalid diagnostic (e.g. conflicting fixes) +// promptly results in a panic. +func TestReportInvalidDiagnostic(t *testing.T) { + testenv.NeedsGoPackages(t) -// the end -`, - "rename/foo_test.go": `package rename_test - -func Foo() { - baz := 14 - _ = baz -} - -// the end -`, - "duplicate/dup.go": `package duplicate - -func Foo() { - baz := 14 - _ = baz -} - -// the end -`, - } - dir, cleanup, err := analysistest.WriteFiles(files) + // Load the errors package. + cfg := &packages.Config{Mode: packages.LoadAllSyntax} + initial, err := packages.Load(cfg, "errors") if err != nil { - t.Fatalf("Creating test files failed with %s", err) + t.Fatal(err) } - defer cleanup() - - fix(t, dir, "rename,other", exitCodeDiagnostics, "rename", "duplicate") - for name, want := range fixed { - path := path.Join(dir, "src", name) - contents, err := os.ReadFile(path) - if err != nil { - t.Errorf("error reading %s: %v", path, err) - } - if got := string(contents); got != want { - t.Errorf("contents of %s file did not match expectations. got=%s, want=%s", path, got, want) - } + for _, test := range []struct { + name string + want string + diag func(pos token.Pos) analysis.Diagnostic + }{ + // Diagnostic has two alternative fixes with the same Message. + { + "duplicate message", + `analyzer "a" suggests two fixes with same Message \(fix\)`, + func(pos token.Pos) analysis.Diagnostic { + return analysis.Diagnostic{ + Pos: pos, + Message: "oops", + SuggestedFixes: []analysis.SuggestedFix{ + {Message: "fix"}, + {Message: "fix"}, + }, + } + }, + }, + // TextEdit has invalid Pos. + { + "bad Pos", + `analyzer "a" suggests invalid fix .*: missing file info for pos`, + func(pos token.Pos) analysis.Diagnostic { + return analysis.Diagnostic{ + Pos: pos, + Message: "oops", + SuggestedFixes: []analysis.SuggestedFix{ + { + Message: "fix", + TextEdits: []analysis.TextEdit{{}}, + }, + }, + } + }, + }, + // TextEdit has invalid End. + { + "End < Pos", + `analyzer "a" suggests invalid fix .*: pos .* > end`, + func(pos token.Pos) analysis.Diagnostic { + return analysis.Diagnostic{ + Pos: pos, + Message: "oops", + SuggestedFixes: []analysis.SuggestedFix{ + { + Message: "fix", + TextEdits: []analysis.TextEdit{{ + Pos: pos + 2, + End: pos, + }}, + }, + }, + } + }, + }, + // Two TextEdits overlap. + { + "overlapping edits", + `analyzer "a" suggests invalid fix .*: overlapping edits to .*errors.go \(1:1-1:3 and 1:2-1:4\)`, + func(pos token.Pos) analysis.Diagnostic { + return analysis.Diagnostic{ + Pos: pos, + Message: "oops", + SuggestedFixes: []analysis.SuggestedFix{ + { + Message: "fix", + TextEdits: []analysis.TextEdit{ + {Pos: pos, End: pos + 2}, + {Pos: pos + 1, End: pos + 3}, + }, + }, + }, + } + }, + }, + } { + t.Run(test.name, func(t *testing.T) { + reached := false + a := &analysis.Analyzer{Name: "a", Doc: "doc", Run: func(pass *analysis.Pass) (any, error) { + reached = true + panics(t, test.want, func() { + pos := pass.Files[0].FileStart + pass.Report(test.diag(pos)) + }) + return nil, nil + }} + if _, err := checker.Analyze([]*analysis.Analyzer{a}, initial, &checker.Options{}); err != nil { + t.Fatalf("Analyze failed: %v", err) + } + if !reached { + t.Error("analyzer was never invoked") + } + }) } } -// TestConflict ensures that checker.Run detects conflicts correctly. -// This test fork/execs the main function above. -func TestConflict(t *testing.T) { - files := map[string]string{ - "conflict/foo.go": `package conflict - -func Foo() { - bar := 12 - _ = bar -} +// TestScript runs script-driven tests in testdata/*.txt. +// Each file is a txtar archive, expanded to a temporary directory. +// +// The comment section of the archive is a script, with the following +// commands: +// +// # comment +// ignored +// blank line +// ignored +// skip k=v... +// Skip the test if any k=v string is a substring of the string +// "GOOS=darwin GOARCH=arm64" appropriate to the current build. +// checker args... +// Run the checker command with the specified space-separated +// arguments; this fork+execs the [TestMain] function above. +// If the archive has a "stdout" section, its contents must +// match the stdout output of the checker command. +// Do NOT use this for testing -diff: tests should not +// rely on the particulars of the diff algorithm. +// exit int +// Assert that previous checker command had this exit code. +// stderr regexp +// Assert that stderr output from previous checker run matches this pattern. +// +// The script must include at least one 'checker' command. +func TestScript(t *testing.T) { + testenv.NeedsExec(t) + testenv.NeedsGoPackages(t) -// the end -`, - } - dir, cleanup, err := analysistest.WriteFiles(files) + txtfiles, err := filepath.Glob("testdata/*.txt") if err != nil { - t.Fatalf("Creating test files failed with %s", err) + t.Fatal(err) } - defer cleanup() - - out := fix(t, dir, "rename,other", exitCodeFailed, "conflict") - - pattern := `conflicting edits from rename and rename on .*foo.go` - matched, err := regexp.MatchString(pattern, out) - if err != nil { - t.Errorf("error matching pattern %s: %v", pattern, err) - } else if !matched { - t.Errorf("output did not match pattern: %s", pattern) + for _, txtfile := range txtfiles { + t.Run(txtfile, func(t *testing.T) { + t.Parallel() + + // Expand archive into tmp tree. + ar, err := txtar.ParseFile(txtfile) + if err != nil { + t.Fatal(err) + } + fs, err := txtar.FS(ar) + if err != nil { + t.Fatal(err) + } + dir := testfiles.CopyToTmp(t, fs) + + // Parse txtar comment as a script. + const noExitCode = -999 + var ( + // state variables operated on by script + lastExitCode = noExitCode + lastStderr string + ) + for i, line := range strings.Split(string(ar.Comment), "\n") { + line = strings.TrimSpace(line) + if line == "" || line[0] == '#' { + continue // skip blanks and comments + } + + command, rest, _ := strings.Cut(line, " ") + prefix := fmt.Sprintf("%s:%d: %s", txtfile, i+1, command) // for error messages + switch command { + case "checker": + cmd := exec.Command(os.Args[0], strings.Fields(rest)...) + cmd.Dir = dir + cmd.Stdout = new(strings.Builder) + cmd.Stderr = new(strings.Builder) + cmd.Env = append(os.Environ(), "CHECKER_TEST_CHILD=1", "GOPROXY=off") + if err := cmd.Run(); err != nil { + if err, ok := err.(*exec.ExitError); ok { + lastExitCode = err.ExitCode() + // fall through + } else { + t.Fatalf("%s: failed to execute checker: %v (%s)", prefix, err, cmd) + } + } else { + lastExitCode = 0 // success + } + + // Eliminate nondeterministic strings from the output. + clean := func(x any) string { + s := fmt.Sprint(x) + pwd, _ := os.Getwd() + if realDir, err := filepath.EvalSymlinks(dir); err == nil { + // Work around checker's packages.Load failing to + // set Config.Dir to dir, causing the filenames + // of loaded packages not to be a subdir of dir. + s = strings.ReplaceAll(s, realDir, dir) + } + s = strings.ReplaceAll(s, dir, string(os.PathSeparator)+"TMP") + s = strings.ReplaceAll(s, pwd, string(os.PathSeparator)+"PWD") + s = strings.ReplaceAll(s, cmd.Path, filepath.Base(cmd.Path)) + return s + } + + lastStderr = clean(cmd.Stderr) + stdout := clean(cmd.Stdout) + + // Detect bad markers out of band: + // though they cause a non-zero exit, + // that may be expected. + if strings.Contains(lastStderr, badMarker) { + t.Errorf("marker analyzer encountered errors; stderr=%s", lastStderr) + } + + // debugging + if false { + t.Logf("%s: $ %s\nstdout:\n%s\nstderr:\n%s", prefix, clean(cmd), stdout, lastStderr) + } + + unified := func(xlabel, ylabel string, x, y []byte) string { + x = append(slices.Clip(bytes.TrimSpace(x)), '\n') + y = append(slices.Clip(bytes.TrimSpace(y)), '\n') + return diff.Unified(xlabel, ylabel, string(x), string(y)) + } + + // Check stdout, if there's a section of that name. + // + // Do not use this for testing -diff! It exposes tests to the + // internals of our (often suboptimal) diff algorithm. + // Instead, use the want/ mechanism. + if f := section(ar, "stdout"); f != nil { + got, want := []byte(stdout), f.Data + if diff := unified("got", "want", got, want); diff != "" { + t.Errorf("%s: unexpected stdout: -- got --\n%s-- want --\n%s-- diff --\n%s", + prefix, + got, want, diff) + } + } + + for _, f := range ar.Files { + // For each file named want/X, assert that the + // current content of X now equals want/X. + if filename, ok := strings.CutPrefix(f.Name, "want/"); ok { + fixed, err := os.ReadFile(filepath.Join(dir, filename)) + if err != nil { + t.Errorf("reading %s: %v", filename, err) + continue + } + var original []byte + if f := section(ar, filename); f != nil { + original = f.Data + } + want := f.Data + if diff := unified(filename+" (fixed)", filename+" (want)", fixed, want); diff != "" { + t.Errorf("%s: unexpected %s content:\n"+ + "-- original --\n%s\n"+ + "-- fixed --\n%s\n"+ + "-- want --\n%s\n"+ + "-- diff original fixed --\n%s\n"+ + "-- diff fixed want --\n%s", + prefix, filename, + original, + fixed, + want, + unified(filename+" (original)", filename+" (fixed)", original, fixed), + diff) + } + } + } + + case "skip": + config := fmt.Sprintf("GOOS=%s GOARCH=%s", runtime.GOOS, runtime.GOARCH) + for _, word := range strings.Fields(rest) { + if strings.Contains(config, word) { + t.Skip(word) + } + } + + case "exit": + if lastExitCode == noExitCode { + t.Fatalf("%s: no prior 'checker' command", prefix) + } + var want int + if _, err := fmt.Sscanf(rest, "%d", &want); err != nil { + t.Fatalf("%s: requires one numeric operand", prefix) + } + if want != lastExitCode { + // plan9 ExitCode() currently only returns 0 for success or 1 for failure + if !(runtime.GOOS == "plan9" && want != exitCodeSuccess && lastExitCode != exitCodeSuccess) { + t.Errorf("%s: exit code was %d, want %d", prefix, lastExitCode, want) + } + } + + case "stderr": + if lastExitCode == noExitCode { + t.Fatalf("%s: no prior 'checker' command", prefix) + } + if matched, err := regexp.MatchString(rest, lastStderr); err != nil { + t.Fatalf("%s: invalid regexp: %v", prefix, err) + } else if !matched { + t.Errorf("%s: output didn't match pattern %q:\n%s", prefix, rest, lastStderr) + } + + default: + t.Errorf("%s: unknown command", prefix) + } + } + if lastExitCode == noExitCode { + t.Errorf("test script contains no 'checker' command") + } + }) } +} - // No files updated - for name, want := range files { - path := path.Join(dir, "src", name) - contents, err := os.ReadFile(path) - if err != nil { - t.Errorf("error reading %s: %v", path, err) +const badMarker = "[bad marker]" + +// The marker analyzer generates fixes from @marker annotations in the +// source. Each marker is of the form: +// +// @message("pattern", "replacement) +// +// The "message" is used for both the Diagnostic.Message and +// SuggestedFix.Message field. Multiple markers with the same +// message form a single diagnostic and fix with a list of textedits. +// +// The "pattern" is a regular expression that must match on the +// current line (though it may extend beyond if the pattern starts +// with "(?s)"), and whose extent forms the TextEdit.{Pos,End} +// deletion. If the pattern contains one subgroup, its range will be +// used; this allows contextual matching. +// +// The "replacement" is a literal string that forms the +// TextEdit.NewText. +// +// Fixes are applied in the order they are first mentioned in the +// source. +var markerAnalyzer = &analysis.Analyzer{ + Name: "marker", + Doc: "doc", + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Run: func(pass *analysis.Pass) (_ any, err error) { + // Errors returned by this analyzer cause the + // checker command to exit non-zero, but that + // may be the expected outcome for other reasons + // (e.g. there were diagnostics). + // + // So, we report these errors out of band by logging + // them with a special badMarker string that the + // TestScript harness looks for, to ensure that the + // test fails in that case. + defer func() { + if err != nil { + log.Printf("%s: %v", badMarker, err) + } + }() + + // Parse all notes in the files. + var keys []string + edits := make(map[string][]analysis.TextEdit) + for _, file := range pass.Files { + tokFile := pass.Fset.File(file.FileStart) + content, err := pass.ReadFile(tokFile.Name()) + if err != nil { + return nil, err + } + notes, err := expect.ExtractGo(pass.Fset, file) + if err != nil { + return nil, err + } + for _, note := range notes { + edit, err := markerEdit(tokFile, content, note) + if err != nil { + return nil, fmt.Errorf("%s: %v", tokFile.Position(note.Pos), err) + } + // Preserve note order as it determines fix order. + if edits[note.Name] == nil { + keys = append(keys, note.Name) + } + edits[note.Name] = append(edits[note.Name], edit) + } } - if got := string(contents); got != want { - t.Errorf("contents of %s file updated. got=%s, want=%s", path, got, want) + + // Report each fix in its own Diagnostic. + for _, key := range keys { + edits := edits[key] + // debugging + if false { + log.Printf("%s: marker: @%s: %+v", pass.Fset.Position(edits[0].Pos), key, edits) + } + pass.Report(analysis.Diagnostic{ + Pos: edits[0].Pos, + End: edits[0].Pos, + Message: key, + SuggestedFixes: []analysis.SuggestedFix{{ + Message: key, + TextEdits: edits, + }}, + }) } - } + return nil, nil + }, } -// TestOther ensures that checker.Run reports conflicts from -// distinct actions correctly. -// This test fork/execs the main function above. -func TestOther(t *testing.T) { - files := map[string]string{ - "other/foo.go": `package other - -func Foo() { - bar := 12 - _ = bar -} +// markerEdit returns the TextEdit denoted by note. +func markerEdit(tokFile *token.File, content []byte, note *expect.Note) (analysis.TextEdit, error) { + if len(note.Args) != 2 { + return analysis.TextEdit{}, fmt.Errorf("got %d args, want @%s(pattern, replacement)", len(note.Args), note.Name) + } -// the end -`, + pattern, ok := note.Args[0].(string) + if !ok { + return analysis.TextEdit{}, fmt.Errorf("got %T for pattern, want string", note.Args[0]) } - dir, cleanup, err := analysistest.WriteFiles(files) + rx, err := regexp.Compile(pattern) if err != nil { - t.Fatalf("Creating test files failed with %s", err) + return analysis.TextEdit{}, fmt.Errorf("invalid pattern regexp: %v", err) } - defer cleanup() - - out := fix(t, dir, "rename,other", exitCodeFailed, "other") - pattern := `.*conflicting edits from other and rename on .*foo.go` - matched, err := regexp.MatchString(pattern, out) - if err != nil { - t.Errorf("error matching pattern %s: %v", pattern, err) - } else if !matched { - t.Errorf("output did not match pattern: %s", pattern) + // Match the pattern against the current line. + lineStart := tokFile.LineStart(tokFile.Position(note.Pos).Line) + lineStartOff := tokFile.Offset(lineStart) + lineEndOff := tokFile.Offset(note.Pos) + matches := rx.FindSubmatchIndex(content[lineStartOff:]) + if len(matches) == 0 { + return analysis.TextEdit{}, fmt.Errorf("no match for regexp %q", rx) } - - // No files updated - for name, want := range files { - path := path.Join(dir, "src", name) - contents, err := os.ReadFile(path) - if err != nil { - t.Errorf("error reading %s: %v", path, err) - } - if got := string(contents); got != want { - t.Errorf("contents of %s file updated. got=%s, want=%s", path, got, want) - } + var start, end int // line-relative offset + switch len(matches) { + case 2: + // no subgroups: return the range of the regexp expression + start, end = matches[0], matches[1] + case 4: + // one subgroup: return its range + start, end = matches[2], matches[3] + default: + return analysis.TextEdit{}, fmt.Errorf("invalid location regexp %q: expect either 0 or 1 subgroups, got %d", rx, len(matches)/2-1) + } + if start > lineEndOff-lineStartOff { + // The start of the match must be between the start of the line and the + // marker position (inclusive). + return analysis.TextEdit{}, fmt.Errorf("no matching range found starting on the current line") } -} -// TestNoEnd tests that a missing SuggestedFix.End position is -// correctly interpreted as if equal to SuggestedFix.Pos (see issue #64199). -func TestNoEnd(t *testing.T) { - files := map[string]string{ - "a/a.go": "package a\n\nfunc F() {}", + replacement, ok := note.Args[1].(string) + if !ok { + return analysis.TextEdit{}, fmt.Errorf("second argument must be pattern, got %T", note.Args[1]) } - dir, cleanup, err := analysistest.WriteFiles(files) - if err != nil { - t.Fatalf("Creating test files failed with %s", err) + + // debugging: show matched portion + if false { + log.Printf("%s: %s: r%q (%q) -> %q", + tokFile.Position(note.Pos), + note.Name, + pattern, + content[lineStartOff+start:lineStartOff+end], + replacement) } - defer cleanup() - fix(t, dir, "noend", exitCodeDiagnostics, "a") + return analysis.TextEdit{ + Pos: lineStart + token.Pos(start), + End: lineStart + token.Pos(end), + NewText: []byte(replacement), + }, nil +} - got, err := os.ReadFile(path.Join(dir, "src/a/a.go")) - if err != nil { - t.Fatal(err) - } - const want = "package a\n\n/*hello*/\nfunc F() {}\n" - if string(got) != want { - t.Errorf("new file contents were <<%s>>, want <<%s>>", got, want) - } +var renameAnalyzer = &analysis.Analyzer{ + Name: "rename", + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Doc: "renames symbols named bar to baz", + RunDespiteErrors: true, + Run: func(pass *analysis.Pass) (any, error) { + const ( + from = "bar" + to = "baz" + ) + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + nodeFilter := []ast.Node{(*ast.Ident)(nil)} + inspect.Preorder(nodeFilter, func(n ast.Node) { + ident := n.(*ast.Ident) + if ident.Name == from { + msg := fmt.Sprintf("renaming %q to %q", from, to) + pass.Report(analysis.Diagnostic{ + Pos: ident.Pos(), + End: ident.End(), + Message: msg, + SuggestedFixes: []analysis.SuggestedFix{{ + Message: msg, + TextEdits: []analysis.TextEdit{{ + Pos: ident.Pos(), + End: ident.End(), + NewText: []byte(to), + }}, + }}, + }) + } + }) + return nil, nil + }, } -func init() { - candidates["noend"] = &analysis.Analyzer{ - Name: "noend", - Doc: "inserts /*hello*/ before first decl", - Run: func(pass *analysis.Pass) (any, error) { - decl := pass.Files[0].Decls[0] - pass.Report(analysis.Diagnostic{ - Pos: decl.Pos(), - End: token.NoPos, +var noendAnalyzer = &analysis.Analyzer{ + Name: "noend", + Doc: "inserts /*hello*/ before first decl", + Run: func(pass *analysis.Pass) (any, error) { + decl := pass.Files[0].Decls[0] + pass.Report(analysis.Diagnostic{ + Pos: decl.Pos(), + End: token.NoPos, + Message: "say hello", + SuggestedFixes: []analysis.SuggestedFix{{ Message: "say hello", - SuggestedFixes: []analysis.SuggestedFix{{ - Message: "say hello", - TextEdits: []analysis.TextEdit{ - { - Pos: decl.Pos(), - End: token.NoPos, - NewText: []byte("/*hello*/"), - }, - }, + TextEdits: []analysis.TextEdit{{ + Pos: decl.Pos(), + End: token.NoPos, + NewText: []byte("/*hello*/"), }}, - }) - return nil, nil - }, + }}, + }) + return nil, nil + }, +} + +// panics asserts that f() panics with with a value whose printed form matches the regexp want. +func panics(t *testing.T, want string, f func()) { + defer func() { + if x := recover(); x == nil { + t.Errorf("function returned normally, wanted panic") + } else if m, err := regexp.MatchString(want, fmt.Sprint(x)); err != nil { + t.Errorf("panics: invalid regexp %q", want) + } else if !m { + t.Errorf("function panicked with value %q, want match for %q", x, want) + } + }() + f() +} + +// section returns the named archive section, or nil. +func section(ar *txtar.Archive, name string) *txtar.File { + for i, f := range ar.Files { + if f.Name == name { + return &ar.Files[i] + } } + return nil } diff --git a/go/analysis/internal/checker/start_test.go b/go/analysis/internal/checker/start_test.go index af4dc42c85c..618ccd09b93 100644 --- a/go/analysis/internal/checker/start_test.go +++ b/go/analysis/internal/checker/start_test.go @@ -22,6 +22,7 @@ import ( // of the file takes effect. func TestStartFixes(t *testing.T) { testenv.NeedsGoPackages(t) + testenv.RedirectStderr(t) // associated checker.Run output with this test files := map[string]string{ "comment/doc.go": `/* Package comment */ diff --git a/go/analysis/internal/checker/testdata/conflict.txt b/go/analysis/internal/checker/testdata/conflict.txt new file mode 100644 index 00000000000..c4a4b13b9ab --- /dev/null +++ b/go/analysis/internal/checker/testdata/conflict.txt @@ -0,0 +1,30 @@ +# Conflicting edits are legal, so long as they appear in different fixes. +# The driver will apply them in some order, and discard those that conflict. +# +# fix1 appears first, so is applied first; it succeeds. +# fix2 and fix3 conflict with it and are rejected. + +checker -marker -fix example.com/a +exit 1 +stderr applied 1 of 3 fixes; 1 files updated...Re-run + +-- go.mod -- +module example.com + +go 1.22 + +-- a/a.go -- +package a + +func f() { + bar := 12 //@ fix1("\tbar", "baz"), fix2("ar ", "baz"), fix3("bar", "lorem ipsum") + _ = bar //@ fix1(" bar", "baz") +} + +-- want/a/a.go -- +package a + +func f() { + baz := 12 //@ fix1("\tbar", "baz"), fix2("ar ", "baz"), fix3("bar", "lorem ipsum") + _ = baz //@ fix1(" bar", "baz") +} diff --git a/go/analysis/internal/checker/testdata/diff.txt b/go/analysis/internal/checker/testdata/diff.txt new file mode 100644 index 00000000000..5a0c9c2a3b2 --- /dev/null +++ b/go/analysis/internal/checker/testdata/diff.txt @@ -0,0 +1,36 @@ +# Basic test of -diff: ensure that stdout contains a diff, +# and the file system is unchanged. +# +# (Most tests of fixes should use want/* not -diff + stdout +# to avoid dependency on the diff algorithm.) +# +# File slashes assume non-Windows. + +skip GOOS=windows +checker -rename -fix -diff example.com/p +exit 3 +stderr renaming "bar" to "baz" + +-- go.mod -- +module example.com +go 1.22 + +-- p/p.go -- +package p + +var bar int + +-- want/p/p.go -- +package p + +var bar int + +-- stdout -- +--- /TMP/p/p.go (old) ++++ /TMP/p/p.go (new) +@@ -1,4 +1,3 @@ + package p + +-var bar int +- ++var baz int diff --git a/go/analysis/internal/checker/testdata/fixes.txt b/go/analysis/internal/checker/testdata/fixes.txt new file mode 100644 index 00000000000..89f245f9ace --- /dev/null +++ b/go/analysis/internal/checker/testdata/fixes.txt @@ -0,0 +1,59 @@ +# Ensure that fixes are applied correctly, in +# particular when processing duplicate fixes for overlapping packages +# in the same directory ("p", "p [p.test]", "p_test [p.test]"). + +checker -rename -fix example.com/p +exit 3 +stderr renaming "bar" to "baz" + +-- go.mod -- +module example.com +go 1.22 + +-- p/p.go -- +package p + +func Foo() { + bar := 12 + _ = bar +} + +-- p/p_test.go -- +package p + +func InTestFile() { + bar := 13 + _ = bar +} + +-- p/p_x_test.go -- +package p_test + +func Foo() { + bar := 14 + _ = bar +} + +-- want/p/p.go -- +package p + +func Foo() { + baz := 12 + _ = baz +} + +-- want/p/p_test.go -- +package p + +func InTestFile() { + baz := 13 + _ = baz +} + +-- want/p/p_x_test.go -- +package p_test + +func Foo() { + baz := 14 + _ = baz +} diff --git a/go/analysis/internal/checker/testdata/importdup.txt b/go/analysis/internal/checker/testdata/importdup.txt new file mode 100644 index 00000000000..e1783777858 --- /dev/null +++ b/go/analysis/internal/checker/testdata/importdup.txt @@ -0,0 +1,29 @@ +# Test that duplicate imports--and, more generally, duplicate +# identical insertions--are coalesced. + +checker -marker -fix example.com/a +exit 3 + +-- go.mod -- +module example.com +go 1.22 + +-- a/a.go -- +package a + +import ( + _ "errors" + //@ fix1("()//", `"foo"`), fix2("()//", `"foo"`) +) + +func f() {} //@ fix1("()}", "n++"), fix2("()}", "n++") + +-- want/a/a.go -- +package a + +import ( + _ "errors" + "foo" //@ fix1("()//", `"foo"`), fix2("()//", `"foo"`) +) + +func f() { n++ } //@ fix1("()}", "n++"), fix2("()}", "n++") diff --git a/go/analysis/internal/checker/testdata/importdup2.txt b/go/analysis/internal/checker/testdata/importdup2.txt new file mode 100644 index 00000000000..118fdc0184b --- /dev/null +++ b/go/analysis/internal/checker/testdata/importdup2.txt @@ -0,0 +1,60 @@ +# Test of import de-duplication behavior. +# +# In packages a and b, there are three fixes, +# each adding one of two imports, but in different order. +# +# In package a, the fixes are [foo, foo, bar], +# and they are resolved as follows: +# - foo is applied -> [foo] +# - foo is coalesced -> [foo] +# - bar is applied -> [foo bar] +# The result is then formatted to [bar foo]. +# +# In package b, the fixes are [foo, bar, foo]: +# - foo is applied -> [foo] +# - bar is applied -> [foo bar] +# - foo is coalesced -> [foo bar] +# The same result is again formatted to [bar foo]. +# +# In more complex examples, the result +# may be more subtly order-dependent. + +checker -marker -fix example.com/a example.com/b +exit 3 + +-- go.mod -- +module example.com +go 1.22 + +-- a/a.go -- +package a + +import ( + //@ fix1("()//", "\"foo\"\n"), fix2("()//", "\"foo\"\n"), fix3("()//", "\"bar\"\n") +) + +-- want/a/a.go -- +package a + +import ( + "bar" + "foo" + // @ fix1("()//", "\"foo\"\n"), fix2("()//", "\"foo\"\n"), fix3("()//", "\"bar\"\n") +) + +-- b/b.go -- +package b + +import ( + //@ fix1("()//", "\"foo\"\n"), fix2("()//", "\"bar\"\n"), fix3("()//", "\"foo\"\n") +) + +-- want/b/b.go -- +package b + +import ( + "bar" + "foo" + // @ fix1("()//", "\"foo\"\n"), fix2("()//", "\"bar\"\n"), fix3("()//", "\"foo\"\n") +) + diff --git a/go/analysis/internal/checker/testdata/json.txt b/go/analysis/internal/checker/testdata/json.txt new file mode 100644 index 00000000000..8e6091aebbc --- /dev/null +++ b/go/analysis/internal/checker/testdata/json.txt @@ -0,0 +1,42 @@ +# Test basic JSON output. +# +# File slashes assume non-Windows. + +skip GOOS=windows +checker -rename -json example.com/p +exit 0 + +-- go.mod -- +module example.com +go 1.22 + +-- p/p.go -- +package p + +func f(bar int) {} + +-- stdout -- +{ + "example.com/p": { + "rename": [ + { + "posn": "/TMP/p/p.go:3:8", + "message": "renaming \"bar\" to \"baz\"", + "suggested_fixes": [ + { + "message": "renaming \"bar\" to \"baz\"", + "edits": [ + { + "filename": "/TMP/p/p.go", + "start": 18, + "end": 21, + "new": "baz" + } + ] + } + ] + } + ] + } +} + diff --git a/go/analysis/internal/checker/testdata/noend.txt b/go/analysis/internal/checker/testdata/noend.txt new file mode 100644 index 00000000000..2d6be074565 --- /dev/null +++ b/go/analysis/internal/checker/testdata/noend.txt @@ -0,0 +1,21 @@ +# Test that a missing SuggestedFix.End position is correctly +# interpreted as if equal to SuggestedFix.Pos (see issue #64199). + +checker -noend -fix example.com/a +exit 3 +stderr say hello + +-- go.mod -- +module example.com +go 1.22 + +-- a/a.go -- +package a + +func f() {} + +-- want/a/a.go -- +package a + +/*hello*/ +func f() {} diff --git a/go/analysis/internal/checker/testdata/overlap.txt b/go/analysis/internal/checker/testdata/overlap.txt new file mode 100644 index 00000000000..f556ef308b9 --- /dev/null +++ b/go/analysis/internal/checker/testdata/overlap.txt @@ -0,0 +1,34 @@ +# This test exercises an edge case of merging. +# +# Two analyzers generate overlapping fixes for this package: +# - 'rename' changes "bar" to "baz" +# - 'marker' changes "ar" to "baz" +# Historically this used to cause a conflict, but as it happens, +# the new merge algorithm splits the rename fix, since it overlaps +# the marker fix, into two subedits: +# - a deletion of "b" and +# - an edit from "ar" to "baz". +# The deletion is of course nonoverlapping, and the edit, +# by happy chance, is identical to the marker fix, so the two +# are coalesced. +# +# (This is a pretty unlikely situation, but it corresponds +# to a historical test, TestOther, that used to check for +# a conflict, and it seemed wrong to delete it without explanation.) + +checker -rename -marker -fix example.com/a +exit 3 + +-- go.mod -- +module example.com +go 1.22 + +-- a/a.go -- +package a + +func f(bar int) {} //@ fix("ar", "baz") + +-- want/a/a.go -- +package a + +func f(baz int) {} //@ fix("ar", "baz") diff --git a/go/analysis/passes/asmdecl/asmdecl.go b/go/analysis/passes/asmdecl/asmdecl.go index b622dfdf3a0..a47ecbae731 100644 --- a/go/analysis/passes/asmdecl/asmdecl.go +++ b/go/analysis/passes/asmdecl/asmdecl.go @@ -542,8 +542,8 @@ func appendComponentsRecursive(arch *asmArch, t types.Type, cc []component, suff elem := tu.Elem() // Calculate offset of each element array. fields := []*types.Var{ - types.NewVar(token.NoPos, nil, "fake0", elem), - types.NewVar(token.NoPos, nil, "fake1", elem), + types.NewField(token.NoPos, nil, "fake0", elem, false), + types.NewField(token.NoPos, nil, "fake1", elem, false), } offsets := arch.sizes.Offsetsof(fields) elemoff := int(offsets[1]) diff --git a/go/analysis/passes/assign/assign.go b/go/analysis/passes/assign/assign.go index 0d95fefcb5a..1413ee13d29 100644 --- a/go/analysis/passes/assign/assign.go +++ b/go/analysis/passes/assign/assign.go @@ -19,6 +19,7 @@ import ( "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/analysisinternal" ) //go:embed doc.go @@ -32,7 +33,7 @@ var Analyzer = &analysis.Analyzer{ Run: run, } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) nodeFilter := []ast.Node{ @@ -57,15 +58,17 @@ func run(pass *analysis.Pass) (interface{}, error) { if reflect.TypeOf(lhs) != reflect.TypeOf(rhs) { continue // short-circuit the heavy-weight gofmt check } - le := analysisutil.Format(pass.Fset, lhs) - re := analysisutil.Format(pass.Fset, rhs) + le := analysisinternal.Format(pass.Fset, lhs) + re := analysisinternal.Format(pass.Fset, rhs) if le == re { pass.Report(analysis.Diagnostic{ Pos: stmt.Pos(), Message: fmt.Sprintf("self-assignment of %s to %s", re, le), - SuggestedFixes: []analysis.SuggestedFix{ - {Message: "Remove", TextEdits: []analysis.TextEdit{ - {Pos: stmt.Pos(), End: stmt.End(), NewText: []byte{}}, - }}, + SuggestedFixes: []analysis.SuggestedFix{{ + Message: "Remove self-assignment", + TextEdits: []analysis.TextEdit{{ + Pos: stmt.Pos(), + End: stmt.End(), + }}}, }, }) } diff --git a/go/analysis/passes/atomic/atomic.go b/go/analysis/passes/atomic/atomic.go index 931f9ca7540..82d5439ce57 100644 --- a/go/analysis/passes/atomic/atomic.go +++ b/go/analysis/passes/atomic/atomic.go @@ -14,6 +14,7 @@ import ( "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/analysisinternal" ) //go:embed doc.go @@ -28,8 +29,8 @@ var Analyzer = &analysis.Analyzer{ Run: run, } -func run(pass *analysis.Pass) (interface{}, error) { - if !analysisutil.Imports(pass.Pkg, "sync/atomic") { +func run(pass *analysis.Pass) (any, error) { + if !analysisinternal.Imports(pass.Pkg, "sync/atomic") { return nil, nil // doesn't directly import sync/atomic } @@ -52,8 +53,8 @@ func run(pass *analysis.Pass) (interface{}, error) { if !ok { continue } - fn := typeutil.StaticCallee(pass.TypesInfo, call) - if analysisutil.IsFunctionNamed(fn, "sync/atomic", "AddInt32", "AddInt64", "AddUint32", "AddUint64", "AddUintptr") { + obj := typeutil.Callee(pass.TypesInfo, call) + if analysisinternal.IsFunctionNamed(obj, "sync/atomic", "AddInt32", "AddInt64", "AddUint32", "AddUint64", "AddUintptr") { checkAtomicAddAssignment(pass, n.Lhs[i], call) } } @@ -71,7 +72,7 @@ func checkAtomicAddAssignment(pass *analysis.Pass, left ast.Expr, call *ast.Call arg := call.Args[0] broken := false - gofmt := func(e ast.Expr) string { return analysisutil.Format(pass.Fset, e) } + gofmt := func(e ast.Expr) string { return analysisinternal.Format(pass.Fset, e) } if uarg, ok := arg.(*ast.UnaryExpr); ok && uarg.Op == token.AND { broken = gofmt(left) == gofmt(uarg.X) diff --git a/go/analysis/passes/atomicalign/atomicalign.go b/go/analysis/passes/atomicalign/atomicalign.go index aff6d25b3e1..2508b41f661 100644 --- a/go/analysis/passes/atomicalign/atomicalign.go +++ b/go/analysis/passes/atomicalign/atomicalign.go @@ -16,9 +16,9 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/analysisinternal" ) const Doc = "check for non-64-bits-aligned arguments to sync/atomic functions" @@ -31,11 +31,11 @@ var Analyzer = &analysis.Analyzer{ Run: run, } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { if 8*pass.TypesSizes.Sizeof(types.Typ[types.Uintptr]) == 64 { return nil, nil // 64-bit platform } - if !analysisutil.Imports(pass.Pkg, "sync/atomic") { + if !analysisinternal.Imports(pass.Pkg, "sync/atomic") { return nil, nil // doesn't directly import sync/atomic } @@ -53,10 +53,10 @@ func run(pass *analysis.Pass) (interface{}, error) { inspect.Preorder(nodeFilter, func(node ast.Node) { call := node.(*ast.CallExpr) - fn := typeutil.StaticCallee(pass.TypesInfo, call) - if analysisutil.IsFunctionNamed(fn, "sync/atomic", funcNames...) { + obj := typeutil.Callee(pass.TypesInfo, call) + if analysisinternal.IsFunctionNamed(obj, "sync/atomic", funcNames...) { // For all the listed functions, the expression to check is always the first function argument. - check64BitAlignment(pass, fn.Name(), call.Args[0]) + check64BitAlignment(pass, obj.Name(), call.Args[0]) } }) diff --git a/go/analysis/passes/bools/bools.go b/go/analysis/passes/bools/bools.go index 8cec6e8224a..e1cf9f9b7ad 100644 --- a/go/analysis/passes/bools/bools.go +++ b/go/analysis/passes/bools/bools.go @@ -15,6 +15,7 @@ import ( "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/analysisinternal" ) const Doc = "check for common mistakes involving boolean operators" @@ -27,7 +28,7 @@ var Analyzer = &analysis.Analyzer{ Run: run, } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) nodeFilter := []ast.Node{ @@ -103,7 +104,7 @@ func (op boolOp) commutativeSets(info *types.Info, e *ast.BinaryExpr, seen map[* func (op boolOp) checkRedundant(pass *analysis.Pass, exprs []ast.Expr) { seen := make(map[string]bool) for _, e := range exprs { - efmt := analysisutil.Format(pass.Fset, e) + efmt := analysisinternal.Format(pass.Fset, e) if seen[efmt] { pass.ReportRangef(e, "redundant %s: %s %s %s", op.name, efmt, op.tok, efmt) } else { @@ -149,8 +150,8 @@ func (op boolOp) checkSuspect(pass *analysis.Pass, exprs []ast.Expr) { } // e is of the form 'x != c' or 'x == c'. - xfmt := analysisutil.Format(pass.Fset, x) - efmt := analysisutil.Format(pass.Fset, e) + xfmt := analysisinternal.Format(pass.Fset, x) + efmt := analysisinternal.Format(pass.Fset, e) if prev, found := seen[xfmt]; found { // checkRedundant handles the case in which efmt == prev. if efmt != prev { diff --git a/go/analysis/passes/cgocall/cgocall.go b/go/analysis/passes/cgocall/cgocall.go index 613583a1a64..4f3bb035d65 100644 --- a/go/analysis/passes/cgocall/cgocall.go +++ b/go/analysis/passes/cgocall/cgocall.go @@ -18,7 +18,7 @@ import ( "strconv" "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" + "golang.org/x/tools/internal/analysisinternal" ) const debug = false @@ -40,8 +40,8 @@ var Analyzer = &analysis.Analyzer{ Run: run, } -func run(pass *analysis.Pass) (interface{}, error) { - if !analysisutil.Imports(pass.Pkg, "runtime/cgo") { +func run(pass *analysis.Pass) (any, error) { + if !analysisinternal.Imports(pass.Pkg, "runtime/cgo") { return nil, nil // doesn't use cgo } diff --git a/go/analysis/passes/copylock/copylock.go b/go/analysis/passes/copylock/copylock.go index 03496cb3037..a9f02ac62e6 100644 --- a/go/analysis/passes/copylock/copylock.go +++ b/go/analysis/passes/copylock/copylock.go @@ -15,8 +15,8 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/analysisinternal" "golang.org/x/tools/internal/typeparams" "golang.org/x/tools/internal/versions" ) @@ -86,7 +86,7 @@ func checkCopyLocksAssign(pass *analysis.Pass, assign *ast.AssignStmt, goversion lhs := assign.Lhs for i, x := range assign.Rhs { if path := lockPathRhs(pass, x); path != nil { - pass.ReportRangef(x, "assignment copies lock value to %v: %v", analysisutil.Format(pass.Fset, assign.Lhs[i]), path) + pass.ReportRangef(x, "assignment copies lock value to %v: %v", analysisinternal.Format(pass.Fset, assign.Lhs[i]), path) lhs = nil // An lhs has been reported. We prefer the assignment warning and do not report twice. } } @@ -100,7 +100,7 @@ func checkCopyLocksAssign(pass *analysis.Pass, assign *ast.AssignStmt, goversion if id, ok := l.(*ast.Ident); ok && id.Name != "_" { if obj := pass.TypesInfo.Defs[id]; obj != nil && obj.Type() != nil { if path := lockPath(pass.Pkg, obj.Type(), nil); path != nil { - pass.ReportRangef(l, "for loop iteration copies lock value to %v: %v", analysisutil.Format(pass.Fset, l), path) + pass.ReportRangef(l, "for loop iteration copies lock value to %v: %v", analysisinternal.Format(pass.Fset, l), path) } } } @@ -132,7 +132,7 @@ func checkCopyLocksCompositeLit(pass *analysis.Pass, cl *ast.CompositeLit) { x = node.Value } if path := lockPathRhs(pass, x); path != nil { - pass.ReportRangef(x, "literal copies lock value from %v: %v", analysisutil.Format(pass.Fset, x), path) + pass.ReportRangef(x, "literal copies lock value from %v: %v", analysisinternal.Format(pass.Fset, x), path) } } } @@ -163,7 +163,7 @@ func checkCopyLocksCallExpr(pass *analysis.Pass, ce *ast.CallExpr) { } for _, x := range ce.Args { if path := lockPathRhs(pass, x); path != nil { - pass.ReportRangef(x, "call of %s copies lock value: %v", analysisutil.Format(pass.Fset, ce.Fun), path) + pass.ReportRangef(x, "call of %s copies lock value: %v", analysisinternal.Format(pass.Fset, ce.Fun), path) } } } @@ -230,7 +230,7 @@ func checkCopyLocksRangeVar(pass *analysis.Pass, rtok token.Token, e ast.Expr) { return } if path := lockPath(pass.Pkg, typ, nil); path != nil { - pass.Reportf(e.Pos(), "range var %s copies lock: %v", analysisutil.Format(pass.Fset, e), path) + pass.Reportf(e.Pos(), "range var %s copies lock: %v", analysisinternal.Format(pass.Fset, e), path) } } @@ -350,7 +350,7 @@ func lockPath(tpkg *types.Package, typ types.Type, seen map[types.Type]bool) typ // In go1.10, sync.noCopy did not implement Locker. // (The Unlock method was added only in CL 121876.) // TODO(adonovan): remove workaround when we drop go1.10. - if analysisutil.IsNamedType(typ, "sync", "noCopy") { + if analysisinternal.IsTypeNamed(typ, "sync", "noCopy") { return []string{typ.String()} } diff --git a/go/analysis/passes/deepequalerrors/deepequalerrors.go b/go/analysis/passes/deepequalerrors/deepequalerrors.go index 70b5e39ecf8..d15e3bc59ba 100644 --- a/go/analysis/passes/deepequalerrors/deepequalerrors.go +++ b/go/analysis/passes/deepequalerrors/deepequalerrors.go @@ -12,9 +12,9 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/analysisinternal" ) const Doc = `check for calls of reflect.DeepEqual on error values @@ -34,8 +34,8 @@ var Analyzer = &analysis.Analyzer{ Run: run, } -func run(pass *analysis.Pass) (interface{}, error) { - if !analysisutil.Imports(pass.Pkg, "reflect") { +func run(pass *analysis.Pass) (any, error) { + if !analysisinternal.Imports(pass.Pkg, "reflect") { return nil, nil // doesn't directly import reflect } @@ -46,8 +46,8 @@ func run(pass *analysis.Pass) (interface{}, error) { } inspect.Preorder(nodeFilter, func(n ast.Node) { call := n.(*ast.CallExpr) - fn, _ := typeutil.Callee(pass.TypesInfo, call).(*types.Func) - if analysisutil.IsFunctionNamed(fn, "reflect", "DeepEqual") && hasError(pass, call.Args[0]) && hasError(pass, call.Args[1]) { + obj := typeutil.Callee(pass.TypesInfo, call) + if analysisinternal.IsFunctionNamed(obj, "reflect", "DeepEqual") && hasError(pass, call.Args[0]) && hasError(pass, call.Args[1]) { pass.ReportRangef(call, "avoid using reflect.DeepEqual with errors") } }) diff --git a/go/analysis/passes/defers/defers.go b/go/analysis/passes/defers/defers.go index 5e8e80a6a77..e11957f2d09 100644 --- a/go/analysis/passes/defers/defers.go +++ b/go/analysis/passes/defers/defers.go @@ -13,6 +13,7 @@ import ( "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/analysisinternal" ) //go:embed doc.go @@ -27,15 +28,15 @@ var Analyzer = &analysis.Analyzer{ Run: run, } -func run(pass *analysis.Pass) (interface{}, error) { - if !analysisutil.Imports(pass.Pkg, "time") { +func run(pass *analysis.Pass) (any, error) { + if !analysisinternal.Imports(pass.Pkg, "time") { return nil, nil } checkDeferCall := func(node ast.Node) bool { switch v := node.(type) { case *ast.CallExpr: - if analysisutil.IsFunctionNamed(typeutil.StaticCallee(pass.TypesInfo, v), "time", "Since") { + if analysisinternal.IsFunctionNamed(typeutil.Callee(pass.TypesInfo, v), "time", "Since") { pass.Reportf(v.Pos(), "call to time.Since is not deferred") } case *ast.FuncLit: diff --git a/go/analysis/passes/errorsas/errorsas.go b/go/analysis/passes/errorsas/errorsas.go index 7f62ad4c825..b8d29d019db 100644 --- a/go/analysis/passes/errorsas/errorsas.go +++ b/go/analysis/passes/errorsas/errorsas.go @@ -13,9 +13,9 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/analysisinternal" ) const Doc = `report passing non-pointer or non-error values to errors.As @@ -31,7 +31,7 @@ var Analyzer = &analysis.Analyzer{ Run: run, } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { switch pass.Pkg.Path() { case "errors", "errors_test": // These packages know how to use their own APIs. @@ -39,7 +39,7 @@ func run(pass *analysis.Pass) (interface{}, error) { return nil, nil } - if !analysisutil.Imports(pass.Pkg, "errors") { + if !analysisinternal.Imports(pass.Pkg, "errors") { return nil, nil // doesn't directly import errors } @@ -50,8 +50,8 @@ func run(pass *analysis.Pass) (interface{}, error) { } inspect.Preorder(nodeFilter, func(n ast.Node) { call := n.(*ast.CallExpr) - fn := typeutil.StaticCallee(pass.TypesInfo, call) - if !analysisutil.IsFunctionNamed(fn, "errors", "As") { + obj := typeutil.Callee(pass.TypesInfo, call) + if !analysisinternal.IsFunctionNamed(obj, "errors", "As") { return } if len(call.Args) < 2 { diff --git a/go/analysis/passes/framepointer/framepointer.go b/go/analysis/passes/framepointer/framepointer.go index 6eff3a20fea..8012de99daa 100644 --- a/go/analysis/passes/framepointer/framepointer.go +++ b/go/analysis/passes/framepointer/framepointer.go @@ -10,6 +10,7 @@ import ( "go/build" "regexp" "strings" + "unicode" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/internal/analysisutil" @@ -24,15 +25,97 @@ var Analyzer = &analysis.Analyzer{ Run: run, } -var ( - re = regexp.MustCompile - asmWriteBP = re(`,\s*BP$`) // TODO: can have false positive, e.g. for TESTQ BP,BP. Seems unlikely. - asmMentionBP = re(`\bBP\b`) - asmControlFlow = re(`^(J|RET)`) -) +// Per-architecture checks for instructions. +// Assume comments, leading and trailing spaces are removed. +type arch struct { + isFPWrite func(string) bool + isFPRead func(string) bool + isBranch func(string) bool +} + +var re = regexp.MustCompile + +func hasAnyPrefix(s string, prefixes ...string) bool { + for _, p := range prefixes { + if strings.HasPrefix(s, p) { + return true + } + } + return false +} + +var arches = map[string]arch{ + "amd64": { + isFPWrite: re(`,\s*BP$`).MatchString, // TODO: can have false positive, e.g. for TESTQ BP,BP. Seems unlikely. + isFPRead: re(`\bBP\b`).MatchString, + isBranch: func(s string) bool { + return hasAnyPrefix(s, "J", "RET") + }, + }, + "arm64": { + isFPWrite: func(s string) bool { + if i := strings.LastIndex(s, ","); i > 0 && strings.HasSuffix(s[i:], "R29") { + return true + } + if hasAnyPrefix(s, "LDP", "LDAXP", "LDXP", "CASP") { + // Instructions which write to a pair of registers, e.g. + // LDP 8(R0), (R26, R29) + // CASPD (R2, R3), (R2), (R26, R29) + lp := strings.LastIndex(s, "(") + rp := strings.LastIndex(s, ")") + if lp > -1 && lp < rp { + return strings.Contains(s[lp:rp], ",") && strings.Contains(s[lp:rp], "R29") + } + } + return false + }, + isFPRead: re(`\bR29\b`).MatchString, + isBranch: func(s string) bool { + // Get just the instruction + if i := strings.IndexFunc(s, unicode.IsSpace); i > 0 { + s = s[:i] + } + return arm64Branch[s] + }, + }, +} + +// arm64 has many control flow instructions. +// ^(B|RET) isn't sufficient or correct (e.g. BIC, BFI aren't control flow.) +// It's easier to explicitly enumerate them in a map than to write a regex. +// Borrowed from Go tree, cmd/asm/internal/arch/arm64.go +var arm64Branch = map[string]bool{ + "B": true, + "BL": true, + "BEQ": true, + "BNE": true, + "BCS": true, + "BHS": true, + "BCC": true, + "BLO": true, + "BMI": true, + "BPL": true, + "BVS": true, + "BVC": true, + "BHI": true, + "BLS": true, + "BGE": true, + "BLT": true, + "BGT": true, + "BLE": true, + "CBZ": true, + "CBZW": true, + "CBNZ": true, + "CBNZW": true, + "JMP": true, + "TBNZ": true, + "TBZ": true, + "RET": true, +} func run(pass *analysis.Pass) (interface{}, error) { - if build.Default.GOARCH != "amd64" { // TODO: arm64 also? + arch, ok := arches[build.Default.GOARCH] + if !ok { return nil, nil } if build.Default.GOOS != "linux" && build.Default.GOOS != "darwin" { @@ -63,6 +146,9 @@ func run(pass *analysis.Pass) (interface{}, error) { line = line[:i] } line = strings.TrimSpace(line) + if line == "" { + continue + } // We start checking code at a TEXT line for a frameless function. if strings.HasPrefix(line, "TEXT") && strings.Contains(line, "(SB)") && strings.Contains(line, "$0") { @@ -73,16 +159,12 @@ func run(pass *analysis.Pass) (interface{}, error) { continue } - if asmWriteBP.MatchString(line) { // clobber of BP, function is not OK + if arch.isFPWrite(line) { pass.Reportf(analysisutil.LineStart(tf, lineno), "frame pointer is clobbered before saving") active = false continue } - if asmMentionBP.MatchString(line) { // any other use of BP might be a read, so function is OK - active = false - continue - } - if asmControlFlow.MatchString(line) { // give up after any branch instruction + if arch.isFPRead(line) || arch.isBranch(line) { active = false continue } diff --git a/go/analysis/passes/framepointer/testdata/src/a/asm_arm64.s b/go/analysis/passes/framepointer/testdata/src/a/asm_arm64.s new file mode 100644 index 00000000000..f2be7bdb9e9 --- /dev/null +++ b/go/analysis/passes/framepointer/testdata/src/a/asm_arm64.s @@ -0,0 +1,42 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +TEXT ·bad1(SB), 0, $0 + MOVD $0, R29 // want `frame pointer is clobbered before saving` + RET +TEXT ·bad2(SB), 0, $0 + MOVD R1, R29 // want `frame pointer is clobbered before saving` + RET +TEXT ·bad3(SB), 0, $0 + MOVD 6(R2), R29 // want `frame pointer is clobbered before saving` + RET +TEXT ·bad4(SB), 0, $0 + LDP 0(R1), (R26, R29) // want `frame pointer is clobbered before saving` + RET +TEXT ·bad5(SB), 0, $0 + AND $0x1, R3, R29 // want `frame pointer is clobbered before saving` + RET +TEXT ·good1(SB), 0, $0 + STPW (R29, R30), -32(RSP) + MOVD $0, R29 // this is ok + LDPW 32(RSP), (R29, R30) + RET +TEXT ·good2(SB), 0, $0 + MOVD R29, R1 + MOVD $0, R29 // this is ok + MOVD R1, R29 + RET +TEXT ·good3(SB), 0, $0 + CMP R1, R2 + BEQ skip + MOVD $0, R29 // this is ok +skip: + RET +TEXT ·good4(SB), 0, $0 + RET + MOVD $0, R29 // this is ok + RET +TEXT ·good5(SB), 0, $8 + MOVD $0, R29 // this is ok + RET diff --git a/go/analysis/passes/httpmux/httpmux.go b/go/analysis/passes/httpmux/httpmux.go index 78748c5c12e..58d3ed5daca 100644 --- a/go/analysis/passes/httpmux/httpmux.go +++ b/go/analysis/passes/httpmux/httpmux.go @@ -14,9 +14,9 @@ import ( "golang.org/x/mod/semver" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/analysisinternal" "golang.org/x/tools/internal/typesinternal" ) @@ -45,7 +45,7 @@ func run(pass *analysis.Pass) (any, error) { return nil, nil } } - if !analysisutil.Imports(pass.Pkg, "net/http") { + if !analysisinternal.Imports(pass.Pkg, "net/http") { return nil, nil } // Look for calls to ServeMux.Handle or ServeMux.HandleFunc. @@ -78,7 +78,7 @@ func isServeMuxRegisterCall(pass *analysis.Pass, call *ast.CallExpr) bool { if fn == nil { return false } - if analysisutil.IsFunctionNamed(fn, "net/http", "Handle", "HandleFunc") { + if analysisinternal.IsFunctionNamed(fn, "net/http", "Handle", "HandleFunc") { return true } if !isMethodNamed(fn, "net/http", "Handle", "HandleFunc") { @@ -86,11 +86,13 @@ func isServeMuxRegisterCall(pass *analysis.Pass, call *ast.CallExpr) bool { } recv := fn.Type().(*types.Signature).Recv() // isMethodNamed() -> non-nil isPtr, named := typesinternal.ReceiverNamed(recv) - return isPtr && analysisutil.IsNamedType(named, "net/http", "ServeMux") + return isPtr && analysisinternal.IsTypeNamed(named, "net/http", "ServeMux") } // isMethodNamed reports when a function f is a method, // in a package with the path pkgPath and the name of f is in names. +// +// (Unlike [analysisinternal.IsMethodNamed], it ignores the receiver type name.) func isMethodNamed(f *types.Func, pkgPath string, names ...string) bool { if f == nil { return false diff --git a/go/analysis/passes/httpresponse/httpresponse.go b/go/analysis/passes/httpresponse/httpresponse.go index 91ebe29de11..e9acd96547e 100644 --- a/go/analysis/passes/httpresponse/httpresponse.go +++ b/go/analysis/passes/httpresponse/httpresponse.go @@ -12,8 +12,8 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/analysisinternal" "golang.org/x/tools/internal/typesinternal" ) @@ -41,12 +41,12 @@ var Analyzer = &analysis.Analyzer{ Run: run, } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) // Fast path: if the package doesn't import net/http, // skip the traversal. - if !analysisutil.Imports(pass.Pkg, "net/http") { + if !analysisinternal.Imports(pass.Pkg, "net/http") { return nil, nil } @@ -118,7 +118,7 @@ func isHTTPFuncOrMethodOnClient(info *types.Info, expr *ast.CallExpr) bool { return false // the function called does not return two values. } isPtr, named := typesinternal.ReceiverNamed(res.At(0)) - if !isPtr || named == nil || !analysisutil.IsNamedType(named, "net/http", "Response") { + if !isPtr || named == nil || !analysisinternal.IsTypeNamed(named, "net/http", "Response") { return false // the first return type is not *http.Response. } @@ -133,11 +133,11 @@ func isHTTPFuncOrMethodOnClient(info *types.Info, expr *ast.CallExpr) bool { return ok && id.Name == "http" // function in net/http package. } - if analysisutil.IsNamedType(typ, "net/http", "Client") { + if analysisinternal.IsTypeNamed(typ, "net/http", "Client") { return true // method on http.Client. } ptr, ok := types.Unalias(typ).(*types.Pointer) - return ok && analysisutil.IsNamedType(ptr.Elem(), "net/http", "Client") // method on *http.Client. + return ok && analysisinternal.IsTypeNamed(ptr.Elem(), "net/http", "Client") // method on *http.Client. } // restOfBlock, given a traversal stack, finds the innermost containing diff --git a/go/analysis/passes/internal/analysisutil/util.go b/go/analysis/passes/internal/analysisutil/util.go index a4fa8d31c4e..d3df898d301 100644 --- a/go/analysis/passes/internal/analysisutil/util.go +++ b/go/analysis/passes/internal/analysisutil/util.go @@ -7,9 +7,7 @@ package analysisutil import ( - "bytes" "go/ast" - "go/printer" "go/token" "go/types" "os" @@ -18,13 +16,6 @@ import ( "golang.org/x/tools/internal/analysisinternal" ) -// Format returns a string representation of the expression. -func Format(fset *token.FileSet, x ast.Expr) string { - var b bytes.Buffer - printer.Fprint(&b, fset, x) - return b.String() -} - // HasSideEffects reports whether evaluation of e has side effects. func HasSideEffects(info *types.Info, e ast.Expr) bool { safe := true @@ -105,57 +96,4 @@ func LineStart(f *token.File, line int) token.Pos { } } -// Imports returns true if path is imported by pkg. -func Imports(pkg *types.Package, path string) bool { - for _, imp := range pkg.Imports() { - if imp.Path() == path { - return true - } - } - return false -} - -// IsNamedType reports whether t is the named type with the given package path -// and one of the given names. -// This function avoids allocating the concatenation of "pkg.Name", -// which is important for the performance of syntax matching. -func IsNamedType(t types.Type, pkgPath string, names ...string) bool { - n, ok := types.Unalias(t).(*types.Named) - if !ok { - return false - } - obj := n.Obj() - if obj == nil || obj.Pkg() == nil || obj.Pkg().Path() != pkgPath { - return false - } - name := obj.Name() - for _, n := range names { - if name == n { - return true - } - } - return false -} - -// IsFunctionNamed reports whether f is a top-level function defined in the -// given package and has one of the given names. -// It returns false if f is nil or a method. -func IsFunctionNamed(f *types.Func, pkgPath string, names ...string) bool { - if f == nil { - return false - } - if f.Pkg() == nil || f.Pkg().Path() != pkgPath { - return false - } - if f.Type().(*types.Signature).Recv() != nil { - return false - } - for _, n := range names { - if f.Name() == n { - return true - } - } - return false -} - var MustExtractDoc = analysisinternal.MustExtractDoc diff --git a/go/analysis/passes/loopclosure/loopclosure.go b/go/analysis/passes/loopclosure/loopclosure.go index fe05eda44e4..d3181242153 100644 --- a/go/analysis/passes/loopclosure/loopclosure.go +++ b/go/analysis/passes/loopclosure/loopclosure.go @@ -14,6 +14,7 @@ import ( "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/analysisinternal" "golang.org/x/tools/internal/typesinternal" "golang.org/x/tools/internal/versions" ) @@ -368,5 +369,5 @@ func isMethodCall(info *types.Info, expr ast.Expr, pkgPath, typeName, method str // Check that the receiver is a . or // *.. _, named := typesinternal.ReceiverNamed(recv) - return analysisutil.IsNamedType(named, pkgPath, typeName) + return analysisinternal.IsTypeNamed(named, pkgPath, typeName) } diff --git a/go/analysis/passes/lostcancel/lostcancel.go b/go/analysis/passes/lostcancel/lostcancel.go index 26fdc1206f8..f8a661aa5db 100644 --- a/go/analysis/passes/lostcancel/lostcancel.go +++ b/go/analysis/passes/lostcancel/lostcancel.go @@ -16,6 +16,7 @@ import ( "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/cfg" + "golang.org/x/tools/internal/analysisinternal" ) //go:embed doc.go @@ -48,7 +49,7 @@ var contextPackage = "context" // checkLostCancel analyzes a single named or literal function. func run(pass *analysis.Pass) (interface{}, error) { // Fast path: bypass check if file doesn't use context.WithCancel. - if !analysisutil.Imports(pass.Pkg, contextPackage) { + if !analysisinternal.Imports(pass.Pkg, contextPackage) { return nil, nil } diff --git a/go/analysis/passes/pkgfact/pkgfact.go b/go/analysis/passes/pkgfact/pkgfact.go index 4bf33d45f50..077c8780815 100644 --- a/go/analysis/passes/pkgfact/pkgfact.go +++ b/go/analysis/passes/pkgfact/pkgfact.go @@ -45,7 +45,7 @@ var Analyzer = &analysis.Analyzer{ } // A pairsFact is a package-level fact that records -// an set of key=value strings accumulated from constant +// a set of key=value strings accumulated from constant // declarations in this package and its dependencies. // Elements are ordered by keys, which are unique. type pairsFact []string diff --git a/go/analysis/passes/printf/printf.go b/go/analysis/passes/printf/printf.go index 171ad201372..81600a283aa 100644 --- a/go/analysis/passes/printf/printf.go +++ b/go/analysis/passes/printf/printf.go @@ -5,7 +5,6 @@ package printf import ( - "bytes" _ "embed" "fmt" "go/ast" @@ -15,16 +14,17 @@ import ( "reflect" "regexp" "sort" - "strconv" "strings" - "unicode/utf8" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/fmtstr" "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/internal/versions" ) func init() { @@ -108,12 +108,12 @@ func (f *isWrapper) String() string { } } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { res := &Result{ funcs: make(map[*types.Func]Kind), } findPrintfLike(pass, res) - checkCall(pass) + checkCalls(pass) return res, nil } @@ -182,7 +182,7 @@ func maybePrintfWrapper(info *types.Info, decl ast.Decl) *printfWrapper { } // findPrintfLike scans the entire package to find printf-like functions. -func findPrintfLike(pass *analysis.Pass, res *Result) (interface{}, error) { +func findPrintfLike(pass *analysis.Pass, res *Result) (any, error) { // Gather potential wrappers and call graph between them. byObj := make(map[*types.Func]*printfWrapper) var wrappers []*printfWrapper @@ -409,20 +409,29 @@ func stringConstantExpr(pass *analysis.Pass, expr ast.Expr) (string, bool) { return "", false } -// checkCall triggers the print-specific checks if the call invokes a print function. -func checkCall(pass *analysis.Pass) { +// checkCalls triggers the print-specific checks for calls that invoke a print +// function. +func checkCalls(pass *analysis.Pass) { inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) nodeFilter := []ast.Node{ + (*ast.File)(nil), (*ast.CallExpr)(nil), } + + var fileVersion string // for selectively suppressing checks; "" if unknown. inspect.Preorder(nodeFilter, func(n ast.Node) { - call := n.(*ast.CallExpr) - fn, kind := printfNameAndKind(pass, call) - switch kind { - case KindPrintf, KindErrorf: - checkPrintf(pass, kind, call, fn) - case KindPrint: - checkPrint(pass, call, fn) + switch n := n.(type) { + case *ast.File: + fileVersion = versions.Lang(versions.FileVersion(pass.TypesInfo, n)) + + case *ast.CallExpr: + fn, kind := printfNameAndKind(pass, n) + switch kind { + case KindPrintf, KindErrorf: + checkPrintf(pass, fileVersion, kind, n, fn.FullName()) + case KindPrint: + checkPrint(pass, n, fn.FullName()) + } } }) } @@ -480,30 +489,12 @@ func isFormatter(typ types.Type) bool { sig := fn.Type().(*types.Signature) return sig.Params().Len() == 2 && sig.Results().Len() == 0 && - analysisutil.IsNamedType(sig.Params().At(0).Type(), "fmt", "State") && + analysisinternal.IsTypeNamed(sig.Params().At(0).Type(), "fmt", "State") && types.Identical(sig.Params().At(1).Type(), types.Typ[types.Rune]) } -// formatState holds the parsed representation of a printf directive such as "%3.*[4]d". -// It is constructed by parsePrintfVerb. -type formatState struct { - verb rune // the format verb: 'd' for "%d" - format string // the full format directive from % through verb, "%.3d". - name string // Printf, Sprintf etc. - flags []byte // the list of # + etc. - argNums []int // the successive argument numbers that are consumed, adjusted to refer to actual arg in call - firstArg int // Index of first argument after the format in the Printf call. - // Used only during parse. - pass *analysis.Pass - call *ast.CallExpr - argNum int // Which argument we're expecting to format now. - hasIndex bool // Whether the argument is indexed. - indexPending bool // Whether we have an indexed argument that has not resolved. - nbytes int // number of bytes of the format string consumed. -} - // checkPrintf checks a call to a formatted print routine such as Printf. -func checkPrintf(pass *analysis.Pass, kind Kind, call *ast.CallExpr, fn *types.Func) { +func checkPrintf(pass *analysis.Pass, fileVersion string, kind Kind, call *ast.CallExpr, name string) { idx := formatStringIndex(pass, call) if idx < 0 || idx >= len(call.Args) { return @@ -517,12 +508,22 @@ func checkPrintf(pass *analysis.Pass, kind Kind, call *ast.CallExpr, fn *types.F // non-constant format string and no arguments: // if msg contains "%", misformatting occurs. // Report the problem and suggest a fix: fmt.Printf("%s", msg). - if !suppressNonconstants && idx == len(call.Args)-1 { + // + // However, as described in golang/go#71485, this analysis can produce a + // significant number of diagnostics in existing code, and the bugs it + // finds are sometimes unlikely or inconsequential, and may not be worth + // fixing for some users. Gating on language version allows us to avoid + // breaking existing tests and CI scripts. + if !suppressNonconstants && + idx == len(call.Args)-1 && + fileVersion != "" && // fail open + versions.AtLeast(fileVersion, "go1.24") { + pass.Report(analysis.Diagnostic{ Pos: formatArg.Pos(), End: formatArg.End(), Message: fmt.Sprintf("non-constant format string in call to %s", - fn.FullName()), + name), SuggestedFixes: []analysis.SuggestedFix{{ Message: `Insert "%s" format string`, TextEdits: []analysis.TextEdit{{ @@ -539,49 +540,46 @@ func checkPrintf(pass *analysis.Pass, kind Kind, call *ast.CallExpr, fn *types.F firstArg := idx + 1 // Arguments are immediately after format string. if !strings.Contains(format, "%") { if len(call.Args) > firstArg { - pass.Reportf(call.Lparen, "%s call has arguments but no formatting directives", fn.FullName()) + pass.Reportf(call.Lparen, "%s call has arguments but no formatting directives", name) } return } - // Hard part: check formats against args. - argNum := firstArg - maxArgNum := firstArg + + // Pass the string constant value so + // fmt.Sprintf("%"+("s"), "hi", 3) can be reported as + // "fmt.Sprintf call needs 1 arg but has 2 args". + operations, err := fmtstr.Parse(format, idx) + if err != nil { + // All error messages are in predicate form ("call has a problem") + // so that they may be affixed into a subject ("log.Printf "). + pass.ReportRangef(call.Args[idx], "%s %s", name, err) + return + } + + // index of the highest used index. + maxArgIndex := firstArg - 1 anyIndex := false - for i, w := 0, 0; i < len(format); i += w { - w = 1 - if format[i] != '%' { - continue - } - state := parsePrintfVerb(pass, call, fn.FullName(), format[i:], firstArg, argNum) - if state == nil { - return + // Check formats against args. + for _, operation := range operations { + if operation.Prec.Index != -1 || + operation.Width.Index != -1 || + operation.Verb.Index != -1 { + anyIndex = true } - w = len(state.format) - if !okPrintfArg(pass, call, state) { // One error per format is enough. + if !okPrintfArg(pass, call, &maxArgIndex, firstArg, name, operation) { + // One error per format is enough. return } - if state.hasIndex { - anyIndex = true - } - if state.verb == 'w' { + if operation.Verb.Verb == 'w' { switch kind { case KindNone, KindPrint, KindPrintf: - pass.Reportf(call.Pos(), "%s does not support error-wrapping directive %%w", state.name) + pass.Reportf(call.Pos(), "%s does not support error-wrapping directive %%w", name) return } } - if len(state.argNums) > 0 { - // Continue with the next sequential argument. - argNum = state.argNums[len(state.argNums)-1] + 1 - } - for _, n := range state.argNums { - if n >= maxArgNum { - maxArgNum = n + 1 - } - } } // Dotdotdot is hard. - if call.Ellipsis.IsValid() && maxArgNum >= len(call.Args)-1 { + if call.Ellipsis.IsValid() && maxArgIndex >= len(call.Args)-2 { return } // If any formats are indexed, extra arguments are ignored. @@ -589,145 +587,11 @@ func checkPrintf(pass *analysis.Pass, kind Kind, call *ast.CallExpr, fn *types.F return } // There should be no leftover arguments. - if maxArgNum != len(call.Args) { - expect := maxArgNum - firstArg + if maxArgIndex+1 < len(call.Args) { + expect := maxArgIndex + 1 - firstArg numArgs := len(call.Args) - firstArg - pass.ReportRangef(call, "%s call needs %v but has %v", fn.FullName(), count(expect, "arg"), count(numArgs, "arg")) - } -} - -// parseFlags accepts any printf flags. -func (s *formatState) parseFlags() { - for s.nbytes < len(s.format) { - switch c := s.format[s.nbytes]; c { - case '#', '0', '+', '-', ' ': - s.flags = append(s.flags, c) - s.nbytes++ - default: - return - } - } -} - -// scanNum advances through a decimal number if present. -func (s *formatState) scanNum() { - for ; s.nbytes < len(s.format); s.nbytes++ { - c := s.format[s.nbytes] - if c < '0' || '9' < c { - return - } - } -} - -// parseIndex scans an index expression. It returns false if there is a syntax error. -func (s *formatState) parseIndex() bool { - if s.nbytes == len(s.format) || s.format[s.nbytes] != '[' { - return true - } - // Argument index present. - s.nbytes++ // skip '[' - start := s.nbytes - s.scanNum() - ok := true - if s.nbytes == len(s.format) || s.nbytes == start || s.format[s.nbytes] != ']' { - ok = false // syntax error is either missing "]" or invalid index. - s.nbytes = strings.Index(s.format[start:], "]") - if s.nbytes < 0 { - s.pass.ReportRangef(s.call, "%s format %s is missing closing ]", s.name, s.format) - return false - } - s.nbytes = s.nbytes + start + pass.ReportRangef(call, "%s call needs %v but has %v", name, count(expect, "arg"), count(numArgs, "arg")) } - arg32, err := strconv.ParseInt(s.format[start:s.nbytes], 10, 32) - if err != nil || !ok || arg32 <= 0 || arg32 > int64(len(s.call.Args)-s.firstArg) { - s.pass.ReportRangef(s.call, "%s format has invalid argument index [%s]", s.name, s.format[start:s.nbytes]) - return false - } - s.nbytes++ // skip ']' - arg := int(arg32) - arg += s.firstArg - 1 // We want to zero-index the actual arguments. - s.argNum = arg - s.hasIndex = true - s.indexPending = true - return true -} - -// parseNum scans a width or precision (or *). It returns false if there's a bad index expression. -func (s *formatState) parseNum() bool { - if s.nbytes < len(s.format) && s.format[s.nbytes] == '*' { - if s.indexPending { // Absorb it. - s.indexPending = false - } - s.nbytes++ - s.argNums = append(s.argNums, s.argNum) - s.argNum++ - } else { - s.scanNum() - } - return true -} - -// parsePrecision scans for a precision. It returns false if there's a bad index expression. -func (s *formatState) parsePrecision() bool { - // If there's a period, there may be a precision. - if s.nbytes < len(s.format) && s.format[s.nbytes] == '.' { - s.flags = append(s.flags, '.') // Treat precision as a flag. - s.nbytes++ - if !s.parseIndex() { - return false - } - if !s.parseNum() { - return false - } - } - return true -} - -// parsePrintfVerb looks the formatting directive that begins the format string -// and returns a formatState that encodes what the directive wants, without looking -// at the actual arguments present in the call. The result is nil if there is an error. -func parsePrintfVerb(pass *analysis.Pass, call *ast.CallExpr, name, format string, firstArg, argNum int) *formatState { - state := &formatState{ - format: format, - name: name, - flags: make([]byte, 0, 5), - argNum: argNum, - argNums: make([]int, 0, 1), - nbytes: 1, // There's guaranteed to be a percent sign. - firstArg: firstArg, - pass: pass, - call: call, - } - // There may be flags. - state.parseFlags() - // There may be an index. - if !state.parseIndex() { - return nil - } - // There may be a width. - if !state.parseNum() { - return nil - } - // There may be a precision. - if !state.parsePrecision() { - return nil - } - // Now a verb, possibly prefixed by an index (which we may already have). - if !state.indexPending && !state.parseIndex() { - return nil - } - if state.nbytes == len(state.format) { - pass.ReportRangef(call.Fun, "%s format %s is missing verb at end of string", name, state.format) - return nil - } - verb, w := utf8.DecodeRuneInString(state.format[state.nbytes:]) - state.verb = verb - state.nbytes += w - if verb != '%' { - state.argNums = append(state.argNums, state.argNum) - } - state.format = state.format[:state.nbytes] - return state } // printfArgType encodes the types of expressions a printf verb accepts. It is a bitmask. @@ -790,79 +654,96 @@ var printVerbs = []printVerb{ {'X', sharpNumFlag, argRune | argInt | argString | argPointer | argFloat | argComplex}, } -// okPrintfArg compares the formatState to the arguments actually present, -// reporting any discrepancies it can discern. If the final argument is ellipsissed, -// there's little it can do for that. -func okPrintfArg(pass *analysis.Pass, call *ast.CallExpr, state *formatState) (ok bool) { +// okPrintfArg compares the operation to the arguments actually present, +// reporting any discrepancies it can discern, maxArgIndex was the index of the highest used index. +// If the final argument is ellipsissed, there's little it can do for that. +func okPrintfArg(pass *analysis.Pass, call *ast.CallExpr, maxArgIndex *int, firstArg int, name string, operation *fmtstr.Operation) (ok bool) { + verb := operation.Verb.Verb var v printVerb found := false // Linear scan is fast enough for a small list. for _, v = range printVerbs { - if v.verb == state.verb { + if v.verb == verb { found = true break } } - // Could current arg implement fmt.Formatter? + // Could verb's arg implement fmt.Formatter? // Skip check for the %w verb, which requires an error. formatter := false - if v.typ != argError && state.argNum < len(call.Args) { - if tv, ok := pass.TypesInfo.Types[call.Args[state.argNum]]; ok { + if v.typ != argError && operation.Verb.ArgIndex < len(call.Args) { + if tv, ok := pass.TypesInfo.Types[call.Args[operation.Verb.ArgIndex]]; ok { formatter = isFormatter(tv.Type) } } if !formatter { if !found { - pass.ReportRangef(call, "%s format %s has unknown verb %c", state.name, state.format, state.verb) + pass.ReportRangef(call, "%s format %s has unknown verb %c", name, operation.Text, verb) return false } - for _, flag := range state.flags { + for _, flag := range operation.Flags { // TODO: Disable complaint about '0' for Go 1.10. To be fixed properly in 1.11. // See issues 23598 and 23605. if flag == '0' { continue } if !strings.ContainsRune(v.flags, rune(flag)) { - pass.ReportRangef(call, "%s format %s has unrecognized flag %c", state.name, state.format, flag) + pass.ReportRangef(call, "%s format %s has unrecognized flag %c", name, operation.Text, flag) return false } } } - // Verb is good. If len(state.argNums)>trueArgs, we have something like %.*s and all - // but the final arg must be an integer. - trueArgs := 1 - if state.verb == '%' { - trueArgs = 0 + + var argIndexes []int + // First check for *. + if operation.Width.Dynamic != -1 { + argIndexes = append(argIndexes, operation.Width.Dynamic) + } + if operation.Prec.Dynamic != -1 { + argIndexes = append(argIndexes, operation.Prec.Dynamic) } - nargs := len(state.argNums) - for i := 0; i < nargs-trueArgs; i++ { - argNum := state.argNums[i] - if !argCanBeChecked(pass, call, i, state) { + // If len(argIndexes)>0, we have something like %.*s and all + // indexes in argIndexes must be an integer. + for _, argIndex := range argIndexes { + if !argCanBeChecked(pass, call, argIndex, firstArg, operation, name) { return } - arg := call.Args[argNum] + arg := call.Args[argIndex] if reason, ok := matchArgType(pass, argInt, arg); !ok { details := "" if reason != "" { details = " (" + reason + ")" } - pass.ReportRangef(call, "%s format %s uses non-int %s%s as argument of *", state.name, state.format, analysisutil.Format(pass.Fset, arg), details) + pass.ReportRangef(call, "%s format %s uses non-int %s%s as argument of *", name, operation.Text, analysisinternal.Format(pass.Fset, arg), details) return false } } - if state.verb == '%' || formatter { + // Collect to update maxArgNum in one loop. + if operation.Verb.ArgIndex != -1 && verb != '%' { + argIndexes = append(argIndexes, operation.Verb.ArgIndex) + } + for _, index := range argIndexes { + *maxArgIndex = max(*maxArgIndex, index) + } + + // Special case for '%', go will print "fmt.Printf("%10.2%%dhello", 4)" + // as "%4hello", discard any runes between the two '%'s, and treat the verb '%' + // as an ordinary rune, so early return to skip the type check. + if verb == '%' || formatter { return true } - argNum := state.argNums[len(state.argNums)-1] - if !argCanBeChecked(pass, call, len(state.argNums)-1, state) { + + // Now check verb's type. + verbArgIndex := operation.Verb.ArgIndex + if !argCanBeChecked(pass, call, verbArgIndex, firstArg, operation, name) { return false } - arg := call.Args[argNum] - if isFunctionValue(pass, arg) && state.verb != 'p' && state.verb != 'T' { - pass.ReportRangef(call, "%s format %s arg %s is a func value, not called", state.name, state.format, analysisutil.Format(pass.Fset, arg)) + arg := call.Args[verbArgIndex] + if isFunctionValue(pass, arg) && verb != 'p' && verb != 'T' { + pass.ReportRangef(call, "%s format %s arg %s is a func value, not called", name, operation.Text, analysisinternal.Format(pass.Fset, arg)) return false } if reason, ok := matchArgType(pass, v.typ, arg); !ok { @@ -874,12 +755,12 @@ func okPrintfArg(pass *analysis.Pass, call *ast.CallExpr, state *formatState) (o if reason != "" { details = " (" + reason + ")" } - pass.ReportRangef(call, "%s format %s has arg %s of wrong type %s%s", state.name, state.format, analysisutil.Format(pass.Fset, arg), typeString, details) + pass.ReportRangef(call, "%s format %s has arg %s of wrong type %s%s", name, operation.Text, analysisinternal.Format(pass.Fset, arg), typeString, details) return false } - if v.typ&argString != 0 && v.verb != 'T' && !bytes.Contains(state.flags, []byte{'#'}) { + if v.typ&argString != 0 && v.verb != 'T' && !strings.Contains(operation.Flags, "#") { if methodName, ok := recursiveStringer(pass, arg); ok { - pass.ReportRangef(call, "%s format %s with arg %s causes recursive %s method call", state.name, state.format, analysisutil.Format(pass.Fset, arg), methodName) + pass.ReportRangef(call, "%s format %s with arg %s causes recursive %s method call", name, operation.Text, analysisinternal.Format(pass.Fset, arg), methodName) return false } } @@ -963,25 +844,24 @@ func isFunctionValue(pass *analysis.Pass, e ast.Expr) bool { // argCanBeChecked reports whether the specified argument is statically present; // it may be beyond the list of arguments or in a terminal slice... argument, which // means we can't see it. -func argCanBeChecked(pass *analysis.Pass, call *ast.CallExpr, formatArg int, state *formatState) bool { - argNum := state.argNums[formatArg] - if argNum <= 0 { +func argCanBeChecked(pass *analysis.Pass, call *ast.CallExpr, argIndex, firstArg int, operation *fmtstr.Operation, name string) bool { + if argIndex <= 0 { // Shouldn't happen, so catch it with prejudice. - panic("negative arg num") + panic("negative argIndex") } - if argNum < len(call.Args)-1 { + if argIndex < len(call.Args)-1 { return true // Always OK. } if call.Ellipsis.IsValid() { return false // We just can't tell; there could be many more arguments. } - if argNum < len(call.Args) { + if argIndex < len(call.Args) { return true } // There are bad indexes in the format or there are fewer arguments than the format needs. // This is the argument number relative to the format: Printf("%s", "hi") will give 1 for the "hi". - arg := argNum - state.firstArg + 1 // People think of arguments as 1-indexed. - pass.ReportRangef(call, "%s format %s reads arg #%d, but call has %v", state.name, state.format, arg, count(len(call.Args)-state.firstArg, "arg")) + arg := argIndex - firstArg + 1 // People think of arguments as 1-indexed. + pass.ReportRangef(call, "%s format %s reads arg #%d, but call has %v", name, operation.Text, arg, count(len(call.Args)-firstArg, "arg")) return false } @@ -998,7 +878,7 @@ const ( ) // checkPrint checks a call to an unformatted print routine such as Println. -func checkPrint(pass *analysis.Pass, call *ast.CallExpr, fn *types.Func) { +func checkPrint(pass *analysis.Pass, call *ast.CallExpr, name string) { firstArg := 0 typ := pass.TypesInfo.Types[call.Fun].Type if typ == nil { @@ -1032,7 +912,7 @@ func checkPrint(pass *analysis.Pass, call *ast.CallExpr, fn *types.Func) { if sel, ok := call.Args[0].(*ast.SelectorExpr); ok { if x, ok := sel.X.(*ast.Ident); ok { if x.Name == "os" && strings.HasPrefix(sel.Sel.Name, "Std") { - pass.ReportRangef(call, "%s does not take io.Writer but has first arg %s", fn.FullName(), analysisutil.Format(pass.Fset, call.Args[0])) + pass.ReportRangef(call, "%s does not take io.Writer but has first arg %s", name, analysisinternal.Format(pass.Fset, call.Args[0])) } } } @@ -1046,25 +926,25 @@ func checkPrint(pass *analysis.Pass, call *ast.CallExpr, fn *types.Func) { if strings.Contains(s, "%") { m := printFormatRE.FindStringSubmatch(s) if m != nil { - pass.ReportRangef(call, "%s call has possible Printf formatting directive %s", fn.FullName(), m[0]) + pass.ReportRangef(call, "%s call has possible Printf formatting directive %s", name, m[0]) } } } - if strings.HasSuffix(fn.Name(), "ln") { + if strings.HasSuffix(name, "ln") { // The last item, if a string, should not have a newline. arg = args[len(args)-1] if s, ok := stringConstantExpr(pass, arg); ok { if strings.HasSuffix(s, "\n") { - pass.ReportRangef(call, "%s arg list ends with redundant newline", fn.FullName()) + pass.ReportRangef(call, "%s arg list ends with redundant newline", name) } } } for _, arg := range args { if isFunctionValue(pass, arg) { - pass.ReportRangef(call, "%s arg %s is a func value, not called", fn.FullName(), analysisutil.Format(pass.Fset, arg)) + pass.ReportRangef(call, "%s arg %s is a func value, not called", name, analysisinternal.Format(pass.Fset, arg)) } if methodName, ok := recursiveStringer(pass, arg); ok { - pass.ReportRangef(call, "%s arg %s causes recursive call to %s method", fn.FullName(), analysisutil.Format(pass.Fset, arg), methodName) + pass.ReportRangef(call, "%s arg %s causes recursive call to %s method", name, analysisinternal.Format(pass.Fset, arg), methodName) } } } diff --git a/go/analysis/passes/printf/printf_test.go b/go/analysis/passes/printf/printf_test.go index 198cf6ec549..1ce9c28c103 100644 --- a/go/analysis/passes/printf/printf_test.go +++ b/go/analysis/passes/printf/printf_test.go @@ -5,10 +5,13 @@ package printf_test import ( + "path/filepath" "testing" "golang.org/x/tools/go/analysis/analysistest" "golang.org/x/tools/go/analysis/passes/printf" + "golang.org/x/tools/internal/testenv" + "golang.org/x/tools/internal/testfiles" ) func Test(t *testing.T) { @@ -16,6 +19,19 @@ func Test(t *testing.T) { printf.Analyzer.Flags.Set("funcs", "Warn,Warnf") analysistest.Run(t, testdata, printf.Analyzer, - "a", "b", "nofmt", "typeparams", "issue68744", "issue70572") - analysistest.RunWithSuggestedFixes(t, testdata, printf.Analyzer, "fix") + "a", "b", "nofmt", "nonconst", "typeparams", "issue68744", "issue70572") +} + +func TestNonConstantFmtString_Go123(t *testing.T) { + testenv.NeedsGo1Point(t, 23) + + dir := testfiles.ExtractTxtarFileToTmp(t, filepath.Join(analysistest.TestData(), "nonconst_go123.txtar")) + analysistest.RunWithSuggestedFixes(t, dir, printf.Analyzer, "example.com/nonconst") +} + +func TestNonConstantFmtString_Go124(t *testing.T) { + testenv.NeedsGo1Point(t, 24) + + dir := testfiles.ExtractTxtarFileToTmp(t, filepath.Join(analysistest.TestData(), "nonconst_go124.txtar")) + analysistest.RunWithSuggestedFixes(t, dir, printf.Analyzer, "example.com/nonconst") } diff --git a/go/analysis/passes/printf/testdata/nonconst_go123.txtar b/go/analysis/passes/printf/testdata/nonconst_go123.txtar new file mode 100644 index 00000000000..87982917d9e --- /dev/null +++ b/go/analysis/passes/printf/testdata/nonconst_go123.txtar @@ -0,0 +1,61 @@ +This test checks for the correct suppression (or activation) of the +non-constant format string check (golang/go#60529), in a go1.23 module. + +See golang/go#71485 for details. + +-- go.mod -- +module example.com/nonconst + +go 1.23 + +-- nonconst.go -- +package nonconst + +import ( + "fmt" + "log" + "os" +) + +func _(s string) { + fmt.Printf(s) + fmt.Printf(s, "arg") + fmt.Fprintf(os.Stderr, s) + log.Printf(s) +} + +-- nonconst_go124.go -- +//go:build go1.24 +package nonconst + +import ( + "fmt" + "log" + "os" +) + +// With Go 1.24, the analyzer should be activated, as this is a go1.24 file. +func _(s string) { + fmt.Printf(s) // want `non-constant format string in call to fmt.Printf` + fmt.Printf(s, "arg") + fmt.Fprintf(os.Stderr, s) // want `non-constant format string in call to fmt.Fprintf` + log.Printf(s) // want `non-constant format string in call to log.Printf` +} + +-- nonconst_go124.go.golden -- +//go:build go1.24 +package nonconst + +import ( + "fmt" + "log" + "os" +) + +// With Go 1.24, the analyzer should be activated, as this is a go1.24 file. +func _(s string) { + fmt.Printf("%s", s) // want `non-constant format string in call to fmt.Printf` + fmt.Printf(s, "arg") + fmt.Fprintf(os.Stderr, "%s", s) // want `non-constant format string in call to fmt.Fprintf` + log.Printf("%s", s) // want `non-constant format string in call to log.Printf` +} diff --git a/go/analysis/passes/printf/testdata/nonconst_go124.txtar b/go/analysis/passes/printf/testdata/nonconst_go124.txtar new file mode 100644 index 00000000000..34d944ce970 --- /dev/null +++ b/go/analysis/passes/printf/testdata/nonconst_go124.txtar @@ -0,0 +1,59 @@ +This test checks for the correct suppression (or activation) of the +non-constant format string check (golang/go#60529), in a go1.24 module. + +See golang/go#71485 for details. + +-- go.mod -- +module example.com/nonconst + +go 1.24 + +-- nonconst.go -- +package nonconst + +import ( + "fmt" + "log" + "os" +) + +func _(s string) { + fmt.Printf(s) // want `non-constant format string in call to fmt.Printf` + fmt.Printf(s, "arg") + fmt.Fprintf(os.Stderr, s) // want `non-constant format string in call to fmt.Fprintf` + log.Printf(s) // want `non-constant format string in call to log.Printf` +} + +-- nonconst.go.golden -- +package nonconst + +import ( + "fmt" + "log" + "os" +) + +func _(s string) { + fmt.Printf("%s", s) // want `non-constant format string in call to fmt.Printf` + fmt.Printf(s, "arg") + fmt.Fprintf(os.Stderr, "%s", s) // want `non-constant format string in call to fmt.Fprintf` + log.Printf("%s", s) // want `non-constant format string in call to log.Printf` +} + +-- nonconst_go123.go -- +//go:build go1.23 +package nonconst + +import ( + "fmt" + "log" + "os" +) + +// The analyzer should be silent, as this is a go1.23 file. +func _(s string) { + fmt.Printf(s) + fmt.Printf(s, "arg") + fmt.Fprintf(os.Stderr, s) + log.Printf(s) +} diff --git a/go/analysis/passes/printf/testdata/src/a/a.go b/go/analysis/passes/printf/testdata/src/a/a.go index 18b9e3be2b9..02ce425f8a3 100644 --- a/go/analysis/passes/printf/testdata/src/a/a.go +++ b/go/analysis/passes/printf/testdata/src/a/a.go @@ -212,8 +212,8 @@ func PrintfTests() { // Bad argument reorderings. Printf("%[xd", 3) // want `a.Printf format %\[xd is missing closing \]` Printf("%[x]d x", 3) // want `a.Printf format has invalid argument index \[x\]` - Printf("%[3]*s x", "hi", 2) // want `a.Printf format has invalid argument index \[3\]` - _ = fmt.Sprintf("%[3]d x", 2) // want `fmt.Sprintf format has invalid argument index \[3\]` + Printf("%[3]*s x", "hi", 2) // want `a.Printf format %\[3]\*s reads arg #3, but call has 2 args` + _ = fmt.Sprintf("%[3]d x", 2) // want `fmt.Sprintf format %\[3]d reads arg #3, but call has 1 arg` Printf("%[2]*.[1]*[3]d x", 2, "hi", 4) // want `a.Printf format %\[2]\*\.\[1\]\*\[3\]d uses non-int \x22hi\x22 as argument of \*` Printf("%[0]s x", "arg1") // want `a.Printf format has invalid argument index \[0\]` Printf("%[0]d x", 1) // want `a.Printf format has invalid argument index \[0\]` diff --git a/go/analysis/passes/printf/testdata/src/fix/fix.go b/go/analysis/passes/printf/testdata/src/fix/fix.go deleted file mode 100644 index f5c9f654165..00000000000 --- a/go/analysis/passes/printf/testdata/src/fix/fix.go +++ /dev/null @@ -1,20 +0,0 @@ -// Copyright 2024 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// This file contains tests of the printf checker's suggested fixes. - -package fix - -import ( - "fmt" - "log" - "os" -) - -func nonConstantFormat(s string) { // #60529 - fmt.Printf(s) // want `non-constant format string in call to fmt.Printf` - fmt.Printf(s, "arg") - fmt.Fprintf(os.Stderr, s) // want `non-constant format string in call to fmt.Fprintf` - log.Printf(s) // want `non-constant format string in call to log.Printf` -} diff --git a/go/analysis/passes/printf/testdata/src/fix/fix.go.golden b/go/analysis/passes/printf/testdata/src/fix/fix.go.golden deleted file mode 100644 index 57e5bb7db91..00000000000 --- a/go/analysis/passes/printf/testdata/src/fix/fix.go.golden +++ /dev/null @@ -1,20 +0,0 @@ -// Copyright 2024 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// This file contains tests of the printf checker's suggested fixes. - -package fix - -import ( - "fmt" - "log" - "os" -) - -func nonConstantFormat(s string) { // #60529 - fmt.Printf("%s", s) // want `non-constant format string in call to fmt.Printf` - fmt.Printf(s, "arg") - fmt.Fprintf(os.Stderr, "%s", s) // want `non-constant format string in call to fmt.Fprintf` - log.Printf("%s", s) // want `non-constant format string in call to log.Printf` -} diff --git a/go/analysis/passes/printf/testdata/src/nonconst/nonconst.go b/go/analysis/passes/printf/testdata/src/nonconst/nonconst.go new file mode 100644 index 00000000000..40779123a52 --- /dev/null +++ b/go/analysis/passes/printf/testdata/src/nonconst/nonconst.go @@ -0,0 +1,23 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file contains tests of the printf checker's handling of non-constant +// format strings (golang/go#60529). + +package nonconst + +import ( + "fmt" + "log" + "os" +) + +// As the language version is empty here, and the new check is gated on go1.24, +// diagnostics are suppressed here. +func nonConstantFormat(s string) { + fmt.Printf(s) + fmt.Printf(s, "arg") + fmt.Fprintf(os.Stderr, s) + log.Printf(s) +} diff --git a/go/analysis/passes/reflectvaluecompare/reflectvaluecompare.go b/go/analysis/passes/reflectvaluecompare/reflectvaluecompare.go index 6789d73579a..72435b2fc7a 100644 --- a/go/analysis/passes/reflectvaluecompare/reflectvaluecompare.go +++ b/go/analysis/passes/reflectvaluecompare/reflectvaluecompare.go @@ -8,13 +8,13 @@ import ( _ "embed" "go/ast" "go/token" - "go/types" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/analysisinternal" ) //go:embed doc.go @@ -49,8 +49,8 @@ func run(pass *analysis.Pass) (interface{}, error) { } } case *ast.CallExpr: - fn, _ := typeutil.Callee(pass.TypesInfo, n).(*types.Func) - if analysisutil.IsFunctionNamed(fn, "reflect", "DeepEqual") && (isReflectValue(pass, n.Args[0]) || isReflectValue(pass, n.Args[1])) { + obj := typeutil.Callee(pass.TypesInfo, n) + if analysisinternal.IsFunctionNamed(obj, "reflect", "DeepEqual") && (isReflectValue(pass, n.Args[0]) || isReflectValue(pass, n.Args[1])) { pass.ReportRangef(n, "avoid using reflect.DeepEqual with reflect.Value") } } @@ -65,7 +65,7 @@ func isReflectValue(pass *analysis.Pass, e ast.Expr) bool { return false } // See if the type is reflect.Value - if !analysisutil.IsNamedType(tv.Type, "reflect", "Value") { + if !analysisinternal.IsTypeNamed(tv.Type, "reflect", "Value") { return false } if _, ok := e.(*ast.CompositeLit); ok { diff --git a/go/analysis/passes/shift/shift.go b/go/analysis/passes/shift/shift.go index 759ed0043ff..46b5f6d68c6 100644 --- a/go/analysis/passes/shift/shift.go +++ b/go/analysis/passes/shift/shift.go @@ -19,8 +19,8 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/analysisinternal" "golang.org/x/tools/internal/typeparams" ) @@ -123,7 +123,7 @@ func checkLongShift(pass *analysis.Pass, node ast.Node, x, y ast.Expr) { } } if amt >= minSize { - ident := analysisutil.Format(pass.Fset, x) + ident := analysisinternal.Format(pass.Fset, x) qualifier := "" if len(sizes) > 1 { qualifier = "may be " diff --git a/go/analysis/passes/sigchanyzer/sigchanyzer.go b/go/analysis/passes/sigchanyzer/sigchanyzer.go index 5f121f720d8..78a2fa5ea3b 100644 --- a/go/analysis/passes/sigchanyzer/sigchanyzer.go +++ b/go/analysis/passes/sigchanyzer/sigchanyzer.go @@ -8,6 +8,8 @@ package sigchanyzer import ( "bytes" + "slices" + _ "embed" "go/ast" "go/format" @@ -18,6 +20,7 @@ import ( "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/analysisinternal" ) //go:embed doc.go @@ -32,8 +35,8 @@ var Analyzer = &analysis.Analyzer{ Run: run, } -func run(pass *analysis.Pass) (interface{}, error) { - if !analysisutil.Imports(pass.Pkg, "os/signal") { +func run(pass *analysis.Pass) (any, error) { + if !analysisinternal.Imports(pass.Pkg, "os/signal") { return nil, nil // doesn't directly import signal } @@ -69,7 +72,7 @@ func run(pass *analysis.Pass) (interface{}, error) { // mutating the AST. See https://golang.org/issue/46129. chanDeclCopy := &ast.CallExpr{} *chanDeclCopy = *chanDecl - chanDeclCopy.Args = append([]ast.Expr(nil), chanDecl.Args...) + chanDeclCopy.Args = slices.Clone(chanDecl.Args) chanDeclCopy.Args = append(chanDeclCopy.Args, &ast.BasicLit{ Kind: token.INT, Value: "1", diff --git a/go/analysis/passes/slog/slog.go b/go/analysis/passes/slog/slog.go index 0129102a336..c1ac960435d 100644 --- a/go/analysis/passes/slog/slog.go +++ b/go/analysis/passes/slog/slog.go @@ -20,6 +20,7 @@ import ( "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/analysisinternal" "golang.org/x/tools/internal/typesinternal" ) @@ -114,10 +115,10 @@ func run(pass *analysis.Pass) (any, error) { default: if unknownArg == nil { pass.ReportRangef(arg, "%s arg %q should be a string or a slog.Attr (possible missing key or value)", - shortName(fn), analysisutil.Format(pass.Fset, arg)) + shortName(fn), analysisinternal.Format(pass.Fset, arg)) } else { pass.ReportRangef(arg, "%s arg %q should probably be a string or a slog.Attr (previous arg %q cannot be a key)", - shortName(fn), analysisutil.Format(pass.Fset, arg), analysisutil.Format(pass.Fset, unknownArg)) + shortName(fn), analysisinternal.Format(pass.Fset, arg), analysisinternal.Format(pass.Fset, unknownArg)) } // Stop here so we report at most one missing key per call. return @@ -157,7 +158,7 @@ func run(pass *analysis.Pass) (any, error) { } func isAttr(t types.Type) bool { - return analysisutil.IsNamedType(t, "log/slog", "Attr") + return analysisinternal.IsTypeNamed(t, "log/slog", "Attr") } // shortName returns a name for the function that is shorter than FullName. diff --git a/go/analysis/passes/sortslice/analyzer.go b/go/analysis/passes/sortslice/analyzer.go index 6c151a02c16..9fe0d209289 100644 --- a/go/analysis/passes/sortslice/analyzer.go +++ b/go/analysis/passes/sortslice/analyzer.go @@ -15,9 +15,9 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/analysisinternal" ) const Doc = `check the argument type of sort.Slice @@ -33,8 +33,8 @@ var Analyzer = &analysis.Analyzer{ Run: run, } -func run(pass *analysis.Pass) (interface{}, error) { - if !analysisutil.Imports(pass.Pkg, "sort") { +func run(pass *analysis.Pass) (any, error) { + if !analysisinternal.Imports(pass.Pkg, "sort") { return nil, nil // doesn't directly import sort } @@ -46,10 +46,11 @@ func run(pass *analysis.Pass) (interface{}, error) { inspect.Preorder(nodeFilter, func(n ast.Node) { call := n.(*ast.CallExpr) - fn, _ := typeutil.Callee(pass.TypesInfo, call).(*types.Func) - if !analysisutil.IsFunctionNamed(fn, "sort", "Slice", "SliceStable", "SliceIsSorted") { + obj := typeutil.Callee(pass.TypesInfo, call) + if !analysisinternal.IsFunctionNamed(obj, "sort", "Slice", "SliceStable", "SliceIsSorted") { return } + callee := obj.(*types.Func) arg := call.Args[0] typ := pass.TypesInfo.Types[arg].Type @@ -126,7 +127,7 @@ func run(pass *analysis.Pass) (interface{}, error) { pass.Report(analysis.Diagnostic{ Pos: call.Pos(), End: call.End(), - Message: fmt.Sprintf("%s's argument must be a slice; is called with %s", fn.FullName(), typ.String()), + Message: fmt.Sprintf("%s's argument must be a slice; is called with %s", callee.FullName(), typ.String()), SuggestedFixes: fixes, }) }) diff --git a/go/analysis/passes/stdversion/stdversion.go b/go/analysis/passes/stdversion/stdversion.go index 75d8697759e..429125a8b7d 100644 --- a/go/analysis/passes/stdversion/stdversion.go +++ b/go/analysis/passes/stdversion/stdversion.go @@ -11,6 +11,7 @@ import ( "go/build" "go/types" "regexp" + "slices" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" @@ -46,16 +47,14 @@ func run(pass *analysis.Pass) (any, error) { // Prior to go1.22, versions.FileVersion returns only the // toolchain version, which is of no use to us, so // disable this analyzer on earlier versions. - if !slicesContains(build.Default.ReleaseTags, "go1.22") { + if !slices.Contains(build.Default.ReleaseTags, "go1.22") { return nil, nil } // Don't report diagnostics for modules marked before go1.21, // since at that time the go directive wasn't clearly // specified as a toolchain requirement. - // - // TODO(adonovan): after go1.21, call GoVersion directly. - pkgVersion := any(pass.Pkg).(interface{ GoVersion() string }).GoVersion() + pkgVersion := pass.Pkg.GoVersion() if !versions.AtLeast(pkgVersion, "go1.21") { return nil, nil } @@ -88,7 +87,7 @@ func run(pass *analysis.Pass) (any, error) { inspect.Preorder(nodeFilter, func(n ast.Node) { switch n := n.(type) { case *ast.File: - if isGenerated(n) { + if ast.IsGenerated(n) { // Suppress diagnostics in generated files (such as cgo). fileVersion = "" } else { @@ -115,19 +114,6 @@ func run(pass *analysis.Pass) (any, error) { return nil, nil } -// Reduced from x/tools/gopls/internal/golang/util.go. Good enough for now. -// TODO(adonovan): use ast.IsGenerated in go1.21. -func isGenerated(f *ast.File) bool { - for _, group := range f.Comments { - for _, comment := range group.List { - if matched := generatedRx.MatchString(comment.Text); matched { - return true - } - } - } - return false -} - // Matches cgo generated comment as well as the proposed standard: // // https://golang.org/s/generatedcode @@ -147,13 +133,3 @@ func origin(obj types.Object) types.Object { } return obj } - -// TODO(adonovan): use go1.21 slices.Contains. -func slicesContains[S ~[]E, E comparable](slice S, x E) bool { - for _, elem := range slice { - if elem == x { - return true - } - } - return false -} diff --git a/go/analysis/passes/stringintconv/string.go b/go/analysis/passes/stringintconv/string.go index 108600a2baf..f56e6ecaa29 100644 --- a/go/analysis/passes/stringintconv/string.go +++ b/go/analysis/passes/stringintconv/string.go @@ -198,14 +198,14 @@ func run(pass *analysis.Pass) (interface{}, error) { // the type has methods, as some {String,GoString,Format} // may change the behavior of fmt.Sprint. if len(ttypes) == 1 && len(vtypes) == 1 && types.NewMethodSet(V0).Len() == 0 { - fmtName, importEdits := analysisinternal.AddImport(pass.TypesInfo, file, arg.Pos(), "fmt", "fmt") + _, prefix, importEdits := analysisinternal.AddImport(pass.TypesInfo, file, "fmt", "fmt", "Sprint", arg.Pos()) if types.Identical(T0, types.Typ[types.String]) { // string(x) -> fmt.Sprint(x) addFix("Format the number as a decimal", append(importEdits, analysis.TextEdit{ Pos: call.Fun.Pos(), End: call.Fun.End(), - NewText: []byte(fmtName + ".Sprint"), + NewText: []byte(prefix + "Sprint"), }), ) } else { @@ -214,7 +214,7 @@ func run(pass *analysis.Pass) (interface{}, error) { analysis.TextEdit{ Pos: call.Lparen + 1, End: call.Lparen + 1, - NewText: []byte(fmtName + ".Sprint("), + NewText: []byte(prefix + "Sprint("), }, analysis.TextEdit{ Pos: call.Rparen, diff --git a/go/analysis/passes/stringintconv/testdata/src/fix/fixdot.go b/go/analysis/passes/stringintconv/testdata/src/fix/fixdot.go new file mode 100644 index 00000000000..d89ca94af82 --- /dev/null +++ b/go/analysis/passes/stringintconv/testdata/src/fix/fixdot.go @@ -0,0 +1,7 @@ +package fix + +import . "fmt" + +func _(x uint64) { + Println(string(x)) // want `conversion from uint64 to string yields...` +} diff --git a/go/analysis/passes/stringintconv/testdata/src/fix/fixdot.go.golden b/go/analysis/passes/stringintconv/testdata/src/fix/fixdot.go.golden new file mode 100644 index 00000000000..18aec2d027a --- /dev/null +++ b/go/analysis/passes/stringintconv/testdata/src/fix/fixdot.go.golden @@ -0,0 +1,18 @@ +-- Format the number as a decimal -- +package fix + +import . "fmt" + +func _(x uint64) { + Println(Sprint(x)) // want `conversion from uint64 to string yields...` +} + +-- Convert a single rune to a string -- +package fix + +import . "fmt" + +func _(x uint64) { + Println(string(rune(x))) // want `conversion from uint64 to string yields...` +} + diff --git a/go/analysis/passes/testinggoroutine/testinggoroutine.go b/go/analysis/passes/testinggoroutine/testinggoroutine.go index effcdc5700b..fef5a6014c4 100644 --- a/go/analysis/passes/testinggoroutine/testinggoroutine.go +++ b/go/analysis/passes/testinggoroutine/testinggoroutine.go @@ -16,6 +16,7 @@ import ( "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/analysisinternal" ) //go:embed doc.go @@ -38,7 +39,7 @@ var Analyzer = &analysis.Analyzer{ func run(pass *analysis.Pass) (interface{}, error) { inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - if !analysisutil.Imports(pass.Pkg, "testing") { + if !analysisinternal.Imports(pass.Pkg, "testing") { return nil, nil } diff --git a/go/analysis/passes/testinggoroutine/util.go b/go/analysis/passes/testinggoroutine/util.go index 8c7a51ca525..027c99e6b0f 100644 --- a/go/analysis/passes/testinggoroutine/util.go +++ b/go/analysis/passes/testinggoroutine/util.go @@ -36,6 +36,8 @@ func localFunctionDecls(info *types.Info, files []*ast.File) func(*types.Func) * // isMethodNamed returns true if f is a method defined // in package with the path pkgPath with a name in names. +// +// (Unlike [analysisinternal.IsMethodNamed], it ignores the receiver type name.) func isMethodNamed(f *types.Func, pkgPath string, names ...string) bool { if f == nil { return false diff --git a/go/analysis/passes/tests/tests.go b/go/analysis/passes/tests/tests.go index 36f2c43eb64..285b34218c3 100644 --- a/go/analysis/passes/tests/tests.go +++ b/go/analysis/passes/tests/tests.go @@ -16,6 +16,7 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/internal/analysisutil" + "golang.org/x/tools/internal/analysisinternal" ) //go:embed doc.go @@ -257,7 +258,7 @@ func isTestingType(typ types.Type, testingType string) bool { if !ok { return false } - return analysisutil.IsNamedType(ptr.Elem(), "testing", testingType) + return analysisinternal.IsTypeNamed(ptr.Elem(), "testing", testingType) } // Validate that fuzz target function's arguments are of accepted types. diff --git a/go/analysis/passes/timeformat/timeformat.go b/go/analysis/passes/timeformat/timeformat.go index 4a6c6b8bc6c..4fdbb2b5415 100644 --- a/go/analysis/passes/timeformat/timeformat.go +++ b/go/analysis/passes/timeformat/timeformat.go @@ -19,6 +19,7 @@ import ( "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/analysisinternal" ) const badFormat = "2006-02-01" @@ -35,7 +36,7 @@ var Analyzer = &analysis.Analyzer{ Run: run, } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { // Note: (time.Time).Format is a method and can be a typeutil.Callee // without directly importing "time". So we cannot just skip this package // when !analysisutil.Imports(pass.Pkg, "time"). @@ -48,11 +49,9 @@ func run(pass *analysis.Pass) (interface{}, error) { } inspect.Preorder(nodeFilter, func(n ast.Node) { call := n.(*ast.CallExpr) - fn, ok := typeutil.Callee(pass.TypesInfo, call).(*types.Func) - if !ok { - return - } - if !isTimeDotFormat(fn) && !isTimeDotParse(fn) { + obj := typeutil.Callee(pass.TypesInfo, call) + if !analysisinternal.IsMethodNamed(obj, "time", "Time", "Format") && + !analysisinternal.IsFunctionNamed(obj, "time", "Parse") { return } if len(call.Args) > 0 { @@ -87,19 +86,6 @@ func run(pass *analysis.Pass) (interface{}, error) { return nil, nil } -func isTimeDotFormat(f *types.Func) bool { - if f.Name() != "Format" || f.Pkg() == nil || f.Pkg().Path() != "time" { - return false - } - // Verify that the receiver is time.Time. - recv := f.Type().(*types.Signature).Recv() - return recv != nil && analysisutil.IsNamedType(recv.Type(), "time", "Time") -} - -func isTimeDotParse(f *types.Func) bool { - return analysisutil.IsFunctionNamed(f, "time", "Parse") -} - // badFormatAt return the start of a bad format in e or -1 if no bad format is found. func badFormatAt(info *types.Info, e ast.Expr) int { tv, ok := info.Types[e] diff --git a/go/analysis/passes/unmarshal/unmarshal.go b/go/analysis/passes/unmarshal/unmarshal.go index a7889fa4590..26e894bd400 100644 --- a/go/analysis/passes/unmarshal/unmarshal.go +++ b/go/analysis/passes/unmarshal/unmarshal.go @@ -28,7 +28,7 @@ var Analyzer = &analysis.Analyzer{ Run: run, } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { switch pass.Pkg.Path() { case "encoding/gob", "encoding/json", "encoding/xml", "encoding/asn1": // These packages know how to use their own APIs. diff --git a/go/analysis/passes/unreachable/doc.go b/go/analysis/passes/unreachable/doc.go index d17d0d9444e..325a15358d5 100644 --- a/go/analysis/passes/unreachable/doc.go +++ b/go/analysis/passes/unreachable/doc.go @@ -9,6 +9,6 @@ // unreachable: check for unreachable code // // The unreachable analyzer finds statements that execution can never reach -// because they are preceded by an return statement, a call to panic, an +// because they are preceded by a return statement, a call to panic, an // infinite loop, or similar constructs. package unreachable diff --git a/go/analysis/passes/unsafeptr/unsafeptr.go b/go/analysis/passes/unsafeptr/unsafeptr.go index 272ae7fe045..fb5b944faad 100644 --- a/go/analysis/passes/unsafeptr/unsafeptr.go +++ b/go/analysis/passes/unsafeptr/unsafeptr.go @@ -16,6 +16,7 @@ import ( "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/analysisinternal" ) //go:embed doc.go @@ -104,7 +105,7 @@ func isSafeUintptr(info *types.Info, x ast.Expr) bool { } switch sel.Sel.Name { case "Pointer", "UnsafeAddr": - if analysisutil.IsNamedType(info.Types[sel.X].Type, "reflect", "Value") { + if analysisinternal.IsTypeNamed(info.Types[sel.X].Type, "reflect", "Value") { return true } } @@ -152,5 +153,5 @@ func hasBasicType(info *types.Info, x ast.Expr, kind types.BasicKind) bool { // isReflectHeader reports whether t is reflect.SliceHeader or reflect.StringHeader. func isReflectHeader(t types.Type) bool { - return analysisutil.IsNamedType(t, "reflect", "SliceHeader", "StringHeader") + return analysisinternal.IsTypeNamed(t, "reflect", "SliceHeader", "StringHeader") } diff --git a/go/analysis/passes/unusedresult/unusedresult.go b/go/analysis/passes/unusedresult/unusedresult.go index c27d26dd6ec..d7cc1e6ae2c 100644 --- a/go/analysis/passes/unusedresult/unusedresult.go +++ b/go/analysis/passes/unusedresult/unusedresult.go @@ -131,7 +131,7 @@ func run(pass *analysis.Pass) (interface{}, error) { // func() string var sigNoArgsStringResult = types.NewSignature(nil, nil, - types.NewTuple(types.NewVar(token.NoPos, nil, "", types.Typ[types.String])), + types.NewTuple(types.NewParam(token.NoPos, nil, "", types.Typ[types.String])), false) type stringSetFlag map[string]bool diff --git a/go/analysis/passes/waitgroup/waitgroup.go b/go/analysis/passes/waitgroup/waitgroup.go index cbb0bfc9e6b..14c6986eaba 100644 --- a/go/analysis/passes/waitgroup/waitgroup.go +++ b/go/analysis/passes/waitgroup/waitgroup.go @@ -9,7 +9,6 @@ package waitgroup import ( _ "embed" "go/ast" - "go/types" "reflect" "golang.org/x/tools/go/analysis" @@ -17,7 +16,7 @@ import ( "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/internal/typesinternal" + "golang.org/x/tools/internal/analysisinternal" ) //go:embed doc.go @@ -32,7 +31,7 @@ var Analyzer = &analysis.Analyzer{ } func run(pass *analysis.Pass) (any, error) { - if !analysisutil.Imports(pass.Pkg, "sync") { + if !analysisinternal.Imports(pass.Pkg, "sync") { return nil, nil // doesn't directly import sync } @@ -44,8 +43,8 @@ func run(pass *analysis.Pass) (any, error) { inspect.WithStack(nodeFilter, func(n ast.Node, push bool, stack []ast.Node) (proceed bool) { if push { call := n.(*ast.CallExpr) - if fn, ok := typeutil.Callee(pass.TypesInfo, call).(*types.Func); ok && - isMethodNamed(fn, "sync", "WaitGroup", "Add") && + obj := typeutil.Callee(pass.TypesInfo, call) + if analysisinternal.IsMethodNamed(obj, "sync", "WaitGroup", "Add") && hasSuffix(stack, wantSuffix) && backindex(stack, 1) == backindex(stack, 2).(*ast.BlockStmt).List[0] { // ExprStmt must be Block's first stmt @@ -86,19 +85,6 @@ func hasSuffix(stack, suffix []ast.Node) bool { return true } -// isMethodNamed reports whether f is a method with the specified -// package, receiver type, and method names. -func isMethodNamed(fn *types.Func, pkg, recv, name string) bool { - if fn.Pkg() != nil && fn.Pkg().Path() == pkg && fn.Name() == name { - if r := fn.Type().(*types.Signature).Recv(); r != nil { - if _, gotRecv := typesinternal.ReceiverNamed(r); gotRecv != nil { - return gotRecv.Obj().Name() == recv - } - } - } - return false -} - // backindex is like [slices.Index] but from the back of the slice. func backindex[T any](slice []T, i int) T { return slice[len(slice)-1-i] diff --git a/go/analysis/unitchecker/unitchecker.go b/go/analysis/unitchecker/unitchecker.go index 1a9b3094e5e..82c3db6a39d 100644 --- a/go/analysis/unitchecker/unitchecker.go +++ b/go/analysis/unitchecker/unitchecker.go @@ -367,17 +367,26 @@ func run(fset *token.FileSet, cfg *Config, analyzers []*analysis.Analyzer) ([]re } pass := &analysis.Pass{ - Analyzer: a, - Fset: fset, - Files: files, - OtherFiles: cfg.NonGoFiles, - IgnoredFiles: cfg.IgnoredFiles, - Pkg: pkg, - TypesInfo: info, - TypesSizes: tc.Sizes, - TypeErrors: nil, // unitchecker doesn't RunDespiteErrors - ResultOf: inputs, - Report: func(d analysis.Diagnostic) { act.diagnostics = append(act.diagnostics, d) }, + Analyzer: a, + Fset: fset, + Files: files, + OtherFiles: cfg.NonGoFiles, + IgnoredFiles: cfg.IgnoredFiles, + Pkg: pkg, + TypesInfo: info, + TypesSizes: tc.Sizes, + TypeErrors: nil, // unitchecker doesn't RunDespiteErrors + ResultOf: inputs, + Report: func(d analysis.Diagnostic) { + // Unitchecker doesn't apply fixes, but it does report them in the JSON output. + if err := analysisinternal.ValidateFixes(fset, a, d.SuggestedFixes); err != nil { + // Since we have diagnostics, the exit code will be nonzero, + // so logging these errors is sufficient. + log.Println(err) + d.SuggestedFixes = nil + } + act.diagnostics = append(act.diagnostics, d) + }, ImportObjectFact: facts.ImportObjectFact, ExportObjectFact: facts.ExportObjectFact, AllObjectFacts: func() []analysis.ObjectFact { return facts.AllObjectFacts(factFilter) }, @@ -386,7 +395,7 @@ func run(fset *token.FileSet, cfg *Config, analyzers []*analysis.Analyzer) ([]re AllPackageFacts: func() []analysis.PackageFact { return facts.AllPackageFacts(factFilter) }, Module: module, } - pass.ReadFile = analysisinternal.MakeReadFile(pass) + pass.ReadFile = analysisinternal.CheckedReadFile(pass, os.ReadFile) t0 := time.Now() act.result, act.err = a.Run(pass) diff --git a/go/analysis/unitchecker/unitchecker_test.go b/go/analysis/unitchecker/unitchecker_test.go index 1801b49cfe8..173d76348f7 100644 --- a/go/analysis/unitchecker/unitchecker_test.go +++ b/go/analysis/unitchecker/unitchecker_test.go @@ -133,7 +133,7 @@ func _() { "message": "self-assignment of i to i", "suggested_fixes": \[ \{ - "message": "Remove", + "message": "Remove self-assignment", "edits": \[ \{ "filename": "([/._\-a-zA-Z0-9]+[\\/]fake[\\/])?c/c.go", diff --git a/go/ast/inspector/inspector.go b/go/ast/inspector/inspector.go index cfda8934332..0d5050fe405 100644 --- a/go/ast/inspector/inspector.go +++ b/go/ast/inspector/inspector.go @@ -37,6 +37,8 @@ package inspector import ( "go/ast" _ "unsafe" + + "golang.org/x/tools/internal/astutil/edge" ) // An Inspector provides methods for inspecting @@ -48,6 +50,21 @@ type Inspector struct { //go:linkname events func events(in *Inspector) []event { return in.events } +func packEdgeKindAndIndex(ek edge.Kind, index int) int32 { + return int32(uint32(index+1)<<7 | uint32(ek)) +} + +// unpackEdgeKindAndIndex unpacks the edge kind and edge index (within +// an []ast.Node slice) from the parent field of a pop event. +// +//go:linkname unpackEdgeKindAndIndex +func unpackEdgeKindAndIndex(x int32) (edge.Kind, int) { + // The "parent" field of a pop node holds the + // edge Kind in the lower 7 bits and the index+1 + // in the upper 25. + return edge.Kind(x & 0x7f), int(x>>7) - 1 +} + // New returns an Inspector for the specified syntax trees. func New(files []*ast.File) *Inspector { return &Inspector{traverse(files)} @@ -59,7 +76,7 @@ type event struct { node ast.Node typ uint64 // typeOf(node) on push event, or union of typ strictly between push and pop events on pop events index int32 // index of corresponding push or pop event - parent int32 // index of parent's push node (defined for push nodes only) + parent int32 // index of parent's push node (push nodes only), or packed edge kind/index (pop nodes only) } // TODO: Experiment with storing only the second word of event.node (unsafe.Pointer). @@ -194,49 +211,74 @@ func traverse(files []*ast.File) []event { extent += int(f.End() - f.Pos()) } // This estimate is based on the net/http package. - capacity := extent * 33 / 100 - if capacity > 1e6 { - capacity = 1e6 // impose some reasonable maximum + capacity := min(extent*33/100, 1e6) // impose some reasonable maximum (1M) + + v := &visitor{ + events: make([]event, 0, capacity), + stack: []item{{index: -1}}, // include an extra event so file nodes have a parent } - events := make([]event, 0, capacity) + for _, file := range files { + walk(v, edge.Invalid, -1, file) + } + return v.events +} - var stack []event - stack = append(stack, event{index: -1}) // include an extra event so file nodes have a parent - for _, f := range files { - ast.Inspect(f, func(n ast.Node) bool { - if n != nil { - // push - ev := event{ - node: n, - typ: 0, // temporarily used to accumulate type bits of subtree - index: int32(len(events)), // push event temporarily holds own index - parent: stack[len(stack)-1].index, - } - stack = append(stack, ev) - events = append(events, ev) +type visitor struct { + events []event + stack []item +} - // 2B nodes ought to be enough for anyone! - if int32(len(events)) < 0 { - panic("event index exceeded int32") - } - } else { - // pop - top := len(stack) - 1 - ev := stack[top] - typ := typeOf(ev.node) - push := ev.index - parent := top - 1 - - events[push].typ = typ // set type of push - stack[parent].typ |= typ | ev.typ // parent's typ contains push and pop's typs. - events[push].index = int32(len(events)) // make push refer to pop - - stack = stack[:top] - events = append(events, ev) - } - return true - }) +type item struct { + index int32 // index of current node's push event + parentIndex int32 // index of parent node's push event + typAccum uint64 // accumulated type bits of current node's descendents + edgeKindAndIndex int32 // edge.Kind and index, bit packed +} + +func (v *visitor) push(ek edge.Kind, eindex int, node ast.Node) { + var ( + index = int32(len(v.events)) + parentIndex = v.stack[len(v.stack)-1].index + ) + v.events = append(v.events, event{ + node: node, + parent: parentIndex, + typ: typeOf(node), + index: 0, // (pop index is set later by visitor.pop) + }) + v.stack = append(v.stack, item{ + index: index, + parentIndex: parentIndex, + edgeKindAndIndex: packEdgeKindAndIndex(ek, eindex), + }) + + // 2B nodes ought to be enough for anyone! + if int32(len(v.events)) < 0 { + panic("event index exceeded int32") + } + + // 32M elements in an []ast.Node ought to be enough for anyone! + if ek2, eindex2 := unpackEdgeKindAndIndex(packEdgeKindAndIndex(ek, eindex)); ek2 != ek || eindex2 != eindex { + panic("Node slice index exceeded uint25") } +} + +func (v *visitor) pop(node ast.Node) { + top := len(v.stack) - 1 + current := v.stack[top] + + push := &v.events[current.index] + parent := &v.stack[top-1] + + push.index = int32(len(v.events)) // make push event refer to pop + parent.typAccum |= current.typAccum | push.typ // accumulate type bits into parent + + v.stack = v.stack[:top] - return events + v.events = append(v.events, event{ + node: node, + typ: current.typAccum, + index: current.index, + parent: current.edgeKindAndIndex, // see [unpackEdgeKindAndIndex] + }) } diff --git a/go/ast/inspector/typeof.go b/go/ast/inspector/typeof.go index 40b1bfd7e62..97784484578 100644 --- a/go/ast/inspector/typeof.go +++ b/go/ast/inspector/typeof.go @@ -219,7 +219,7 @@ func typeOf(n ast.Node) uint64 { //go:linkname maskOf func maskOf(nodes []ast.Node) uint64 { - if nodes == nil { + if len(nodes) == 0 { return math.MaxUint64 // match all node types } var mask uint64 diff --git a/go/ast/inspector/walk.go b/go/ast/inspector/walk.go new file mode 100644 index 00000000000..5a42174a0a0 --- /dev/null +++ b/go/ast/inspector/walk.go @@ -0,0 +1,341 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package inspector + +// This file is a fork of ast.Inspect to reduce unnecessary dynamic +// calls and to gather edge information. +// +// Consistency with the original is ensured by TestInspectAllNodes. + +import ( + "fmt" + "go/ast" + + "golang.org/x/tools/internal/astutil/edge" +) + +func walkList[N ast.Node](v *visitor, ek edge.Kind, list []N) { + for i, node := range list { + walk(v, ek, i, node) + } +} + +func walk(v *visitor, ek edge.Kind, index int, node ast.Node) { + v.push(ek, index, node) + + // walk children + // (the order of the cases matches the order + // of the corresponding node types in ast.go) + switch n := node.(type) { + // Comments and fields + case *ast.Comment: + // nothing to do + + case *ast.CommentGroup: + walkList(v, edge.CommentGroup_List, n.List) + + case *ast.Field: + if n.Doc != nil { + walk(v, edge.Field_Doc, -1, n.Doc) + } + walkList(v, edge.Field_Names, n.Names) + if n.Type != nil { + walk(v, edge.Field_Type, -1, n.Type) + } + if n.Tag != nil { + walk(v, edge.Field_Tag, -1, n.Tag) + } + if n.Comment != nil { + walk(v, edge.Field_Comment, -1, n.Comment) + } + + case *ast.FieldList: + walkList(v, edge.FieldList_List, n.List) + + // Expressions + case *ast.BadExpr, *ast.Ident, *ast.BasicLit: + // nothing to do + + case *ast.Ellipsis: + if n.Elt != nil { + walk(v, edge.Ellipsis_Elt, -1, n.Elt) + } + + case *ast.FuncLit: + walk(v, edge.FuncLit_Type, -1, n.Type) + walk(v, edge.FuncLit_Body, -1, n.Body) + + case *ast.CompositeLit: + if n.Type != nil { + walk(v, edge.CompositeLit_Type, -1, n.Type) + } + walkList(v, edge.CompositeLit_Elts, n.Elts) + + case *ast.ParenExpr: + walk(v, edge.ParenExpr_X, -1, n.X) + + case *ast.SelectorExpr: + walk(v, edge.SelectorExpr_X, -1, n.X) + walk(v, edge.SelectorExpr_Sel, -1, n.Sel) + + case *ast.IndexExpr: + walk(v, edge.IndexExpr_X, -1, n.X) + walk(v, edge.IndexExpr_Index, -1, n.Index) + + case *ast.IndexListExpr: + walk(v, edge.IndexListExpr_X, -1, n.X) + walkList(v, edge.IndexListExpr_Indices, n.Indices) + + case *ast.SliceExpr: + walk(v, edge.SliceExpr_X, -1, n.X) + if n.Low != nil { + walk(v, edge.SliceExpr_Low, -1, n.Low) + } + if n.High != nil { + walk(v, edge.SliceExpr_High, -1, n.High) + } + if n.Max != nil { + walk(v, edge.SliceExpr_Max, -1, n.Max) + } + + case *ast.TypeAssertExpr: + walk(v, edge.TypeAssertExpr_X, -1, n.X) + if n.Type != nil { + walk(v, edge.TypeAssertExpr_Type, -1, n.Type) + } + + case *ast.CallExpr: + walk(v, edge.CallExpr_Fun, -1, n.Fun) + walkList(v, edge.CallExpr_Args, n.Args) + + case *ast.StarExpr: + walk(v, edge.StarExpr_X, -1, n.X) + + case *ast.UnaryExpr: + walk(v, edge.UnaryExpr_X, -1, n.X) + + case *ast.BinaryExpr: + walk(v, edge.BinaryExpr_X, -1, n.X) + walk(v, edge.BinaryExpr_Y, -1, n.Y) + + case *ast.KeyValueExpr: + walk(v, edge.KeyValueExpr_Key, -1, n.Key) + walk(v, edge.KeyValueExpr_Value, -1, n.Value) + + // Types + case *ast.ArrayType: + if n.Len != nil { + walk(v, edge.ArrayType_Len, -1, n.Len) + } + walk(v, edge.ArrayType_Elt, -1, n.Elt) + + case *ast.StructType: + walk(v, edge.StructType_Fields, -1, n.Fields) + + case *ast.FuncType: + if n.TypeParams != nil { + walk(v, edge.FuncType_TypeParams, -1, n.TypeParams) + } + if n.Params != nil { + walk(v, edge.FuncType_Params, -1, n.Params) + } + if n.Results != nil { + walk(v, edge.FuncType_Results, -1, n.Results) + } + + case *ast.InterfaceType: + walk(v, edge.InterfaceType_Methods, -1, n.Methods) + + case *ast.MapType: + walk(v, edge.MapType_Key, -1, n.Key) + walk(v, edge.MapType_Value, -1, n.Value) + + case *ast.ChanType: + walk(v, edge.ChanType_Value, -1, n.Value) + + // Statements + case *ast.BadStmt: + // nothing to do + + case *ast.DeclStmt: + walk(v, edge.DeclStmt_Decl, -1, n.Decl) + + case *ast.EmptyStmt: + // nothing to do + + case *ast.LabeledStmt: + walk(v, edge.LabeledStmt_Label, -1, n.Label) + walk(v, edge.LabeledStmt_Stmt, -1, n.Stmt) + + case *ast.ExprStmt: + walk(v, edge.ExprStmt_X, -1, n.X) + + case *ast.SendStmt: + walk(v, edge.SendStmt_Chan, -1, n.Chan) + walk(v, edge.SendStmt_Value, -1, n.Value) + + case *ast.IncDecStmt: + walk(v, edge.IncDecStmt_X, -1, n.X) + + case *ast.AssignStmt: + walkList(v, edge.AssignStmt_Lhs, n.Lhs) + walkList(v, edge.AssignStmt_Rhs, n.Rhs) + + case *ast.GoStmt: + walk(v, edge.GoStmt_Call, -1, n.Call) + + case *ast.DeferStmt: + walk(v, edge.DeferStmt_Call, -1, n.Call) + + case *ast.ReturnStmt: + walkList(v, edge.ReturnStmt_Results, n.Results) + + case *ast.BranchStmt: + if n.Label != nil { + walk(v, edge.BranchStmt_Label, -1, n.Label) + } + + case *ast.BlockStmt: + walkList(v, edge.BlockStmt_List, n.List) + + case *ast.IfStmt: + if n.Init != nil { + walk(v, edge.IfStmt_Init, -1, n.Init) + } + walk(v, edge.IfStmt_Cond, -1, n.Cond) + walk(v, edge.IfStmt_Body, -1, n.Body) + if n.Else != nil { + walk(v, edge.IfStmt_Else, -1, n.Else) + } + + case *ast.CaseClause: + walkList(v, edge.CaseClause_List, n.List) + walkList(v, edge.CaseClause_Body, n.Body) + + case *ast.SwitchStmt: + if n.Init != nil { + walk(v, edge.SwitchStmt_Init, -1, n.Init) + } + if n.Tag != nil { + walk(v, edge.SwitchStmt_Tag, -1, n.Tag) + } + walk(v, edge.SwitchStmt_Body, -1, n.Body) + + case *ast.TypeSwitchStmt: + if n.Init != nil { + walk(v, edge.TypeSwitchStmt_Init, -1, n.Init) + } + walk(v, edge.TypeSwitchStmt_Assign, -1, n.Assign) + walk(v, edge.TypeSwitchStmt_Body, -1, n.Body) + + case *ast.CommClause: + if n.Comm != nil { + walk(v, edge.CommClause_Comm, -1, n.Comm) + } + walkList(v, edge.CommClause_Body, n.Body) + + case *ast.SelectStmt: + walk(v, edge.SelectStmt_Body, -1, n.Body) + + case *ast.ForStmt: + if n.Init != nil { + walk(v, edge.ForStmt_Init, -1, n.Init) + } + if n.Cond != nil { + walk(v, edge.ForStmt_Cond, -1, n.Cond) + } + if n.Post != nil { + walk(v, edge.ForStmt_Post, -1, n.Post) + } + walk(v, edge.ForStmt_Body, -1, n.Body) + + case *ast.RangeStmt: + if n.Key != nil { + walk(v, edge.RangeStmt_Key, -1, n.Key) + } + if n.Value != nil { + walk(v, edge.RangeStmt_Value, -1, n.Value) + } + walk(v, edge.RangeStmt_X, -1, n.X) + walk(v, edge.RangeStmt_Body, -1, n.Body) + + // Declarations + case *ast.ImportSpec: + if n.Doc != nil { + walk(v, edge.ImportSpec_Doc, -1, n.Doc) + } + if n.Name != nil { + walk(v, edge.ImportSpec_Name, -1, n.Name) + } + walk(v, edge.ImportSpec_Path, -1, n.Path) + if n.Comment != nil { + walk(v, edge.ImportSpec_Comment, -1, n.Comment) + } + + case *ast.ValueSpec: + if n.Doc != nil { + walk(v, edge.ValueSpec_Doc, -1, n.Doc) + } + walkList(v, edge.ValueSpec_Names, n.Names) + if n.Type != nil { + walk(v, edge.ValueSpec_Type, -1, n.Type) + } + walkList(v, edge.ValueSpec_Values, n.Values) + if n.Comment != nil { + walk(v, edge.ValueSpec_Comment, -1, n.Comment) + } + + case *ast.TypeSpec: + if n.Doc != nil { + walk(v, edge.TypeSpec_Doc, -1, n.Doc) + } + walk(v, edge.TypeSpec_Name, -1, n.Name) + if n.TypeParams != nil { + walk(v, edge.TypeSpec_TypeParams, -1, n.TypeParams) + } + walk(v, edge.TypeSpec_Type, -1, n.Type) + if n.Comment != nil { + walk(v, edge.TypeSpec_Comment, -1, n.Comment) + } + + case *ast.BadDecl: + // nothing to do + + case *ast.GenDecl: + if n.Doc != nil { + walk(v, edge.GenDecl_Doc, -1, n.Doc) + } + walkList(v, edge.GenDecl_Specs, n.Specs) + + case *ast.FuncDecl: + if n.Doc != nil { + walk(v, edge.FuncDecl_Doc, -1, n.Doc) + } + if n.Recv != nil { + walk(v, edge.FuncDecl_Recv, -1, n.Recv) + } + walk(v, edge.FuncDecl_Name, -1, n.Name) + walk(v, edge.FuncDecl_Type, -1, n.Type) + if n.Body != nil { + walk(v, edge.FuncDecl_Body, -1, n.Body) + } + + case *ast.File: + if n.Doc != nil { + walk(v, edge.File_Doc, -1, n.Doc) + } + walk(v, edge.File_Name, -1, n.Name) + walkList(v, edge.File_Decls, n.Decls) + // don't walk n.Comments - they have been + // visited already through the individual + // nodes + + default: + // (includes *ast.Package) + panic(fmt.Sprintf("Walk: unexpected node type %T", n)) + } + + v.pop(node) +} diff --git a/go/callgraph/vta/propagation_test.go b/go/callgraph/vta/propagation_test.go index 1a274f38f84..492258f81e3 100644 --- a/go/callgraph/vta/propagation_test.go +++ b/go/callgraph/vta/propagation_test.go @@ -336,7 +336,7 @@ func TestPropagation(t *testing.T) { "Local(t2)": "A;B;C", }, }, - // The outer loop of subsumed-scc pushes A an B through the graph. + // The outer loop of subsumed-scc pushes A and B through the graph. {name: "subsumed-scc", graph: suite["subsumed-scc"], want: map[string]string{ "Local(t0)": "A;B", diff --git a/go/cfg/cfg_test.go b/go/cfg/cfg_test.go index 536d2fe5df7..d5f04ed5731 100644 --- a/go/cfg/cfg_test.go +++ b/go/cfg/cfg_test.go @@ -127,12 +127,6 @@ func f10(ch chan int) { } live() } - -func f11() { - goto; // mustn't crash - dead() -} - ` func TestDeadCode(t *testing.T) { diff --git a/go/expect/extract.go b/go/expect/extract.go index 1ca67d24958..902b1e806e4 100644 --- a/go/expect/extract.go +++ b/go/expect/extract.go @@ -21,7 +21,7 @@ import ( const commentStart = "@" const commentStartLen = len(commentStart) -// Identifier is the type for an identifier in an Note argument list. +// Identifier is the type for an identifier in a Note argument list. type Identifier string // Parse collects all the notes present in a file. diff --git a/go/internal/gccgoimporter/parser.go b/go/internal/gccgoimporter/parser.go index 7a021ebb4b2..f315ec41004 100644 --- a/go/internal/gccgoimporter/parser.go +++ b/go/internal/gccgoimporter/parser.go @@ -309,6 +309,7 @@ func (p *parser) parseParam(pkg *types.Package) (param *types.Var, isVariadic bo func (p *parser) parseVar(pkg *types.Package) *types.Var { name := p.parseName() v := types.NewVar(token.NoPos, pkg, name, p.parseType(pkg)) + typesinternal.SetVarKind(v, typesinternal.PackageVar) if name[0] == '.' || name[0] == '<' { // This is an unexported variable, // or a variable defined in a different package. diff --git a/go/loader/loader_test.go b/go/loader/loader_test.go index 4729ba34559..2276b49ad6f 100644 --- a/go/loader/loader_test.go +++ b/go/loader/loader_test.go @@ -558,7 +558,7 @@ func TestVendorCwdIssue16580(t *testing.T) { // - TypeCheckFuncBodies hook func TestTransitivelyErrorFreeFlag(t *testing.T) { - // Create an minimal custom build.Context + // Create a minimal custom build.Context // that fakes the following packages: // // a --> b --> c! c has an error diff --git a/go/packages/packages.go b/go/packages/packages.go index 0147d9080aa..c3a59b8ebf4 100644 --- a/go/packages/packages.go +++ b/go/packages/packages.go @@ -59,10 +59,10 @@ import ( // // Unfortunately there are a number of open bugs related to // interactions among the LoadMode bits: -// - https://github.com/golang/go/issues/56633 -// - https://github.com/golang/go/issues/56677 -// - https://github.com/golang/go/issues/58726 -// - https://github.com/golang/go/issues/63517 +// - https://go.dev/issue/56633 +// - https://go.dev/issue/56677 +// - https://go.dev/issue/58726 +// - https://go.dev/issue/63517 type LoadMode int const ( diff --git a/go/packages/packages_test.go b/go/packages/packages_test.go index fc420321c31..06fa488d1ed 100644 --- a/go/packages/packages_test.go +++ b/go/packages/packages_test.go @@ -3157,8 +3157,7 @@ func TestIssue69606b(t *testing.T) { // in another package (m/b) where the types for m/b are coming from the compiler, // e.g. `go list -compiled=true ... m/b`. func TestIssue70394(t *testing.T) { - // TODO(taking): backport https://go.dev/cl/604099 so that this works on 23. - testenv.NeedsGo1Point(t, 24) + testenv.NeedsGo1Point(t, 23) testenv.NeedsTool(t, "go") // requires go list. testenv.NeedsGoBuild(t) // requires the compiler for export data. @@ -3339,7 +3338,7 @@ func main() { pkgs, err := packages.Load(&packages.Config{ Mode: packages.NeedName | packages.NeedTarget, - Env: append(os.Environ(), "GOPATH=" + gopath, "GO111MODULE=off"), + Env: append(os.Environ(), "GOPATH="+gopath, "GO111MODULE=off"), }, filepath.Join(gopath, "src", "...")) if err != nil { t.Fatal(err) diff --git a/go/packages/packagestest/expect.go b/go/packages/packagestest/expect.go index 14a6446138f..dc41894a6ed 100644 --- a/go/packages/packagestest/expect.go +++ b/go/packages/packagestest/expect.go @@ -411,7 +411,7 @@ func (e *Exported) rangeConverter(n *expect.Note, args []interface{}) (Range, [] eof := tokFile.Pos(tokFile.Size()) return newRange(tokFile, eof, eof), args, nil default: - // look up an marker by name + // look up a marker by name mark, ok := e.markers[string(arg)] if !ok { return Range{}, nil, fmt.Errorf("cannot find marker %v", arg) diff --git a/go/ssa/builder.go b/go/ssa/builder.go index b109fbf3cd3..4cd71260b61 100644 --- a/go/ssa/builder.go +++ b/go/ssa/builder.go @@ -856,7 +856,7 @@ func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value { if recv, ok := types.Unalias(sel.recv).(*types.TypeParam); ok { // Emit a nil check if any possible instantiation of the // type parameter is an interface type. - if typeSetOf(recv).Len() > 0 { + if !typeSetIsEmpty(recv) { // recv has a concrete term its typeset. // So it cannot be instantiated as an interface. // diff --git a/go/ssa/builder_test.go b/go/ssa/builder_test.go index 59d8a91ea6a..2589cc82bb6 100644 --- a/go/ssa/builder_test.go +++ b/go/ssa/builder_test.go @@ -214,7 +214,7 @@ func TestRuntimeTypes(t *testing.T) { input string want []string }{ - // An package-level type is needed. + // A package-level type is needed. {`package A; type T struct{}; func (T) f() {}; var x any = T{}`, []string{"*p.T", "p.T"}, }, diff --git a/go/ssa/const.go b/go/ssa/const.go index 764b73529e3..91ed6f28647 100644 --- a/go/ssa/const.go +++ b/go/ssa/const.go @@ -45,7 +45,7 @@ func soleTypeKind(typ types.Type) types.BasicInfo { // Candidates (perhaps all) are eliminated during the type-set // iteration, which executes at least once. state := types.IsBoolean | types.IsInteger | types.IsString - underIs(typeSetOf(typ), func(ut types.Type) bool { + underIs(typ, func(ut types.Type) bool { var c types.BasicInfo if t, ok := ut.(*types.Basic); ok { c = t.Info() @@ -126,7 +126,7 @@ func (c *Const) IsNil() bool { // nillable reports whether *new(T) == nil is legal for type T. func nillable(t types.Type) bool { if typeparams.IsTypeParam(t) { - return underIs(typeSetOf(t), func(u types.Type) bool { + return underIs(t, func(u types.Type) bool { // empty type set (u==nil) => any underlying types => not nillable return u != nil && nillable(u) }) diff --git a/go/ssa/coretype_test.go b/go/ssa/coretype_test.go deleted file mode 100644 index 6fda54bf36a..00000000000 --- a/go/ssa/coretype_test.go +++ /dev/null @@ -1,101 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ssa_test - -import ( - "go/ast" - "go/parser" - "go/token" - "go/types" - "testing" - - "golang.org/x/tools/internal/typeparams" -) - -func TestCoreType(t *testing.T) { - const source = ` - package P - - type Named int - - type A any - type B interface{~int} - type C interface{int} - type D interface{Named} - type E interface{~int|interface{Named}} - type F interface{~int|~float32} - type G interface{chan int|interface{chan int}} - type H interface{chan int|chan float32} - type I interface{chan<- int|chan int} - type J interface{chan int|chan<- int} - type K interface{<-chan int|chan int} - type L interface{chan int|<-chan int} - type M interface{chan int|chan Named} - type N interface{<-chan int|chan<- int} - type O interface{chan int|bool} - type P struct{ Named } - type Q interface{ Foo() } - type R interface{ Foo() ; Named } - type S interface{ Foo() ; ~int } - - type T interface{chan int|interface{chan int}|<-chan int} -` - - fset := token.NewFileSet() - f, err := parser.ParseFile(fset, "hello.go", source, 0) - if err != nil { - t.Fatal(err) - } - - var conf types.Config - pkg, err := conf.Check("P", fset, []*ast.File{f}, nil) - if err != nil { - t.Fatal(err) - } - - for _, test := range []struct { - expr string // type expression of Named type - want string // expected core type (or "" if none) - }{ - {"Named", "int"}, // Underlying type is not interface. - {"A", ""}, // Interface has no terms. - {"B", "int"}, // Tilde term. - {"C", "int"}, // Non-tilde term. - {"D", "int"}, // Named term. - {"E", "int"}, // Identical underlying types. - {"F", ""}, // Differing underlying types. - {"G", "chan int"}, // Identical Element types. - {"H", ""}, // Element type int has differing underlying type to float32. - {"I", "chan<- int"}, // SendRecv followed by SendOnly - {"J", "chan<- int"}, // SendOnly followed by SendRecv - {"K", "<-chan int"}, // RecvOnly followed by SendRecv - {"L", "<-chan int"}, // SendRecv followed by RecvOnly - {"M", ""}, // Element type int is not *identical* to Named. - {"N", ""}, // Differing channel directions - {"O", ""}, // A channel followed by a non-channel. - {"P", "struct{P.Named}"}, // Embedded type. - {"Q", ""}, // interface type with no terms and functions - {"R", "int"}, // interface type with both terms and functions. - {"S", "int"}, // interface type with a tilde term - {"T", "<-chan int"}, // Prefix of 2 terms that are identical before switching to channel. - } { - // Eval() expr for its type. - tv, err := types.Eval(fset, pkg, 0, test.expr) - if err != nil { - t.Fatalf("Eval(%s) failed: %v", test.expr, err) - } - - ct := typeparams.CoreType(tv.Type) - var got string - if ct == nil { - got = "" - } else { - got = ct.String() - } - if got != test.want { - t.Errorf("CoreType(%s) = %v, want %v", test.expr, got, test.want) - } - } -} diff --git a/go/ssa/emit.go b/go/ssa/emit.go index 176c1e1a748..a3d41ad95a4 100644 --- a/go/ssa/emit.go +++ b/go/ssa/emit.go @@ -18,7 +18,7 @@ import ( // emitAlloc emits to f a new Alloc instruction allocating a variable // of type typ. // -// The caller must set Alloc.Heap=true (for an heap-allocated variable) +// The caller must set Alloc.Heap=true (for a heap-allocated variable) // or add the Alloc to f.Locals (for a frame-allocated variable). // // During building, a variable in f.Locals may have its Heap flag @@ -257,13 +257,6 @@ func emitConv(f *Function, val Value, typ types.Type) Value { return f.emit(mi) } - // In the common case, the typesets of src and dst are singletons - // and we emit an appropriate conversion. But if either contains - // a type parameter, the conversion may represent a cross product, - // in which case which we emit a MultiConvert. - dst_terms := typeSetOf(ut_dst) - src_terms := typeSetOf(ut_src) - // conversionCase describes an instruction pattern that maybe emitted to // model d <- s for d in dst_terms and s in src_terms. // Multiple conversions can match the same pattern. @@ -321,13 +314,14 @@ func emitConv(f *Function, val Value, typ types.Type) Value { } var classifications conversionCase - for _, s := range src_terms { - us := s.Type().Underlying() - for _, d := range dst_terms { - ud := d.Type().Underlying() - classifications |= classify(us, ud) - } - } + underIs(ut_src, func(us types.Type) bool { + return underIs(ut_dst, func(ud types.Type) bool { + if us != nil && ud != nil { + classifications |= classify(us, ud) + } + return classifications != 0 + }) + }) if classifications == 0 { panic(fmt.Sprintf("in %s: cannot convert %s (%s) to %s", f, val, val.Type(), typ)) } @@ -381,8 +375,8 @@ func emitConv(f *Function, val Value, typ types.Type) Value { c.setType(typ) return f.emit(c) - default: // multiple conversion - c := &MultiConvert{X: val, from: src_terms, to: dst_terms} + default: // The conversion represents a cross product. + c := &MultiConvert{X: val, from: t_src, to: typ} c.setType(typ) return f.emit(c) } diff --git a/go/ssa/interp/reflect.go b/go/ssa/interp/reflect.go index 3143c077790..8259e56d860 100644 --- a/go/ssa/interp/reflect.go +++ b/go/ssa/interp/reflect.go @@ -510,7 +510,7 @@ func newMethod(pkg *ssa.Package, recvType types.Type, name string) *ssa.Function // that is needed is the "pointerness" of Recv.Type, and for // now, we'll set it to always be false since we're only // concerned with rtype. Encapsulate this better. - sig := types.NewSignature(types.NewVar(token.NoPos, nil, "recv", recvType), nil, nil, false) + sig := types.NewSignature(types.NewParam(token.NoPos, nil, "recv", recvType), nil, nil, false) fn := pkg.Prog.NewFunction(name, sig, "fake reflect method") fn.Pkg = pkg return fn diff --git a/go/ssa/print.go b/go/ssa/print.go index ef32672a26a..432c4b05b6d 100644 --- a/go/ssa/print.go +++ b/go/ssa/print.go @@ -180,8 +180,8 @@ func (v *MultiConvert) String() string { var b strings.Builder b.WriteString(printConv("multiconvert", v, v.X)) b.WriteString(" [") - for i, s := range v.from { - for j, d := range v.to { + for i, s := range termListOf(v.from) { + for j, d := range termListOf(v.to) { if i != 0 || j != 0 { b.WriteString(" | ") } diff --git a/go/ssa/sanity.go b/go/ssa/sanity.go index ef2928e3b74..e35e4d79357 100644 --- a/go/ssa/sanity.go +++ b/go/ssa/sanity.go @@ -142,8 +142,8 @@ func (s *sanity) checkInstr(idx int, instr Instruction) { case *ChangeType: case *SliceToArrayPointer: case *Convert: - if from := instr.X.Type(); !isBasicConvTypes(typeSetOf(from)) { - if to := instr.Type(); !isBasicConvTypes(typeSetOf(to)) { + if from := instr.X.Type(); !isBasicConvTypes(from) { + if to := instr.Type(); !isBasicConvTypes(to) { s.errorf("convert %s -> %s: at least one type must be basic (or all basic, []byte, or []rune)", from, to) } } diff --git a/go/ssa/source.go b/go/ssa/source.go index 7b71c88d120..055a6b1ef5f 100644 --- a/go/ssa/source.go +++ b/go/ssa/source.go @@ -191,7 +191,7 @@ func (prog *Program) packageLevelMember(obj types.Object) Member { } // FuncValue returns the SSA function or (non-interface) method -// denoted by the specified func symbol. It returns nil id the symbol +// denoted by the specified func symbol. It returns nil if the symbol // denotes an interface method, or belongs to a package that was not // created by prog.CreatePackage. func (prog *Program) FuncValue(obj *types.Func) *Function { diff --git a/go/ssa/ssa.go b/go/ssa/ssa.go index 4fa9831079c..ecad99d0340 100644 --- a/go/ssa/ssa.go +++ b/go/ssa/ssa.go @@ -719,9 +719,8 @@ type Convert struct { // t1 = multiconvert D <- S (t0) [*[2]rune <- []rune | string <- []rune] type MultiConvert struct { register - X Value - from []*types.Term - to []*types.Term + X Value + from, to types.Type } // ChangeInterface constructs a value of one interface type from a diff --git a/go/ssa/subst.go b/go/ssa/subst.go index fc870235c42..bbe5796d703 100644 --- a/go/ssa/subst.go +++ b/go/ssa/subst.go @@ -227,7 +227,7 @@ func (subst *subster) var_(v *types.Var) *types.Var { if v.IsField() { return types.NewField(v.Pos(), v.Pkg(), v.Name(), typ, v.Embedded()) } - return types.NewVar(v.Pos(), v.Pkg(), v.Name(), typ) + return types.NewParam(v.Pos(), v.Pkg(), v.Name(), typ) } } return v diff --git a/go/ssa/coretype.go b/go/ssa/typeset.go similarity index 51% rename from go/ssa/coretype.go rename to go/ssa/typeset.go index d937134227d..d0106dc6874 100644 --- a/go/ssa/coretype.go +++ b/go/ssa/typeset.go @@ -10,7 +10,60 @@ import ( "golang.org/x/tools/internal/typeparams" ) -// Utilities for dealing with core types. +// Utilities for dealing with type sets. + +const debug = false + +// typeset is an iterator over the (type/underlying type) pairs of the +// specific type terms of the type set implied by t. +// If t is a type parameter, the implied type set is the type set of t's constraint. +// In that case, if there are no specific terms, typeset calls yield with (nil, nil). +// If t is not a type parameter, the implied type set consists of just t. +// In any case, typeset is guaranteed to call yield at least once. +func typeset(typ types.Type, yield func(t, u types.Type) bool) { + switch typ := types.Unalias(typ).(type) { + case *types.TypeParam, *types.Interface: + terms := termListOf(typ) + if len(terms) == 0 { + yield(nil, nil) + return + } + for _, term := range terms { + u := types.Unalias(term.Type()) + if !term.Tilde() { + u = u.Underlying() + } + if debug { + assert(types.Identical(u, u.Underlying()), "Unalias(x) == under(x) for ~x terms") + } + if !yield(term.Type(), u) { + break + } + } + return + default: + yield(typ, typ.Underlying()) + } +} + +// termListOf returns the type set of typ as a normalized term set. Returns an empty set on an error. +func termListOf(typ types.Type) []*types.Term { + terms, err := typeparams.NormalTerms(typ) + if err != nil { + return nil + } + return terms +} + +// typeSetIsEmpty returns true if a typeset is empty. +func typeSetIsEmpty(typ types.Type) bool { + var empty bool + typeset(typ, func(t, _ types.Type) bool { + empty = t == nil + return false + }) + return empty +} // isBytestring returns true if T has the same terms as interface{[]byte | string}. // These act like a core type for some operations: slice expressions, append and copy. @@ -22,72 +75,36 @@ func isBytestring(T types.Type) bool { return false } - tset := typeSetOf(U) - if tset.Len() != 2 { - return false - } hasBytes, hasString := false, false - underIs(tset, func(t types.Type) bool { + ok := underIs(U, func(t types.Type) bool { switch { case isString(t): hasString = true + return true case isByteSlice(t): hasBytes = true + return true + default: + return false } - return hasBytes || hasString }) - return hasBytes && hasString + return ok && hasBytes && hasString } -// termList is a list of types. -type termList []*types.Term // type terms of the type set -func (s termList) Len() int { return len(s) } -func (s termList) At(i int) types.Type { return s[i].Type() } - -// typeSetOf returns the type set of typ. Returns an empty typeset on an error. -func typeSetOf(typ types.Type) termList { - // This is a adaptation of x/exp/typeparams.NormalTerms which x/tools cannot depend on. - var terms []*types.Term - var err error - // typeSetOf(t) == typeSetOf(Unalias(t)) - switch typ := types.Unalias(typ).(type) { - case *types.TypeParam: - terms, err = typeparams.StructuralTerms(typ) - case *types.Union: - terms, err = typeparams.UnionTermSet(typ) - case *types.Interface: - terms, err = typeparams.InterfaceTermSet(typ) - default: - // Common case. - // Specializing the len=1 case to avoid a slice - // had no measurable space/time benefit. - terms = []*types.Term{types.NewTerm(false, typ)} - } - - if err != nil { - return termList(nil) - } - return termList(terms) -} - -// underIs calls f with the underlying types of the specific type terms -// of s and reports whether all calls to f returned true. If there are -// no specific terms, underIs returns the result of f(nil). -func underIs(s termList, f func(types.Type) bool) bool { - if s.Len() == 0 { - return f(nil) - } - for i := 0; i < s.Len(); i++ { - u := s.At(i).Underlying() - if !f(u) { - return false - } - } - return true +// underIs calls f with the underlying types of the type terms +// of the type set of typ and reports whether all calls to f returned true. +// If there are no specific terms, underIs returns the result of f(nil). +func underIs(typ types.Type, f func(types.Type) bool) bool { + var ok bool + typeset(typ, func(t, u types.Type) bool { + ok = f(u) + return ok + }) + return ok } // indexType returns the element type and index mode of a IndexExpr over a type. -// It returns (nil, invalid) if the type is not indexable; this should never occur in a well-typed program. +// It returns an invalid mode if the type is not indexable; this should never occur in a well-typed program. func indexType(typ types.Type) (types.Type, indexMode) { switch U := typ.Underlying().(type) { case *types.Array: @@ -103,23 +120,25 @@ func indexType(typ types.Type) (types.Type, indexMode) { case *types.Basic: return tByte, ixValue // must be a string case *types.Interface: - tset := typeSetOf(U) - if tset.Len() == 0 { - return nil, ixInvalid // no underlying terms or error is empty. - } - - elem, mode := indexType(tset.At(0)) - for i := 1; i < tset.Len() && mode != ixInvalid; i++ { - e, m := indexType(tset.At(i)) - if !types.Identical(elem, e) { // if type checked, just a sanity check - return nil, ixInvalid + var elem types.Type + mode := ixInvalid + typeset(typ, func(t, _ types.Type) bool { + if t == nil { + return false // empty set + } + e, m := indexType(t) + if elem == nil { + elem, mode = e, m + } + if debug && !types.Identical(elem, e) { // if type checked, just a sanity check + mode = ixInvalid + return false } // Update the mode to the most constrained address type. mode = mode.meet(m) - } - if mode != ixInvalid { - return elem, mode - } + return mode != ixInvalid + }) + return elem, mode } return nil, ixInvalid } diff --git a/go/ssa/util.go b/go/ssa/util.go index aa070eacdcb..4a056cbe0bd 100644 --- a/go/ssa/util.go +++ b/go/ssa/util.go @@ -85,21 +85,22 @@ func isRuneSlice(t types.Type) bool { return false } -// isBasicConvTypes returns true when a type set can be -// one side of a Convert operation. This is when: +// isBasicConvTypes returns true when the type set of a type +// can be one side of a Convert operation. This is when: // - All are basic, []byte, or []rune. // - At least 1 is basic. // - At most 1 is []byte or []rune. -func isBasicConvTypes(tset termList) bool { - basics := 0 - all := underIs(tset, func(t types.Type) bool { +func isBasicConvTypes(typ types.Type) bool { + basics, cnt := 0, 0 + ok := underIs(typ, func(t types.Type) bool { + cnt++ if isBasic(t) { basics++ return true } return isByteSlice(t) || isRuneSlice(t) }) - return all && basics >= 1 && tset.Len()-basics <= 1 + return ok && basics >= 1 && cnt-basics <= 1 } // isPointer reports whether t's underlying type is a pointer. diff --git a/go/types/typeutil/map.go b/go/types/typeutil/map.go index 93b3090c687..43261147c05 100644 --- a/go/types/typeutil/map.go +++ b/go/types/typeutil/map.go @@ -257,10 +257,13 @@ func (h hasher) hash(t types.Type) uint32 { } tparams := t.TypeParams() - for i := range tparams.Len() { - h.inGenericSig = true - tparam := tparams.At(i) - hash += 7 * h.hash(tparam.Constraint()) + if n := tparams.Len(); n > 0 { + h.inGenericSig = true // affects constraints, params, and results + + for i := range n { + tparam := tparams.At(i) + hash += 7 * h.hash(tparam.Constraint()) + } } return hash + 3*h.hashTuple(t.Params()) + 5*h.hashTuple(t.Results()) diff --git a/godoc/analysis/analysis.go b/godoc/analysis/analysis.go index 54611e87d96..54d692a59ec 100644 --- a/godoc/analysis/analysis.go +++ b/godoc/analysis/analysis.go @@ -62,15 +62,15 @@ type Link interface { // FileInfo holds analysis information for the source file view. // Clients must not mutate it. type FileInfo struct { - Data []interface{} // JSON serializable values - Links []Link // HTML link markup + Data []any // JSON serializable values + Links []Link // HTML link markup } // A fileInfo is the server's store of hyperlinks and JSON data for a // particular file. type fileInfo struct { mu sync.Mutex - data []interface{} // JSON objects + data []any // JSON objects links []Link sorted bool hasErrors bool // TODO(adonovan): surface this in the UI diff --git a/godoc/godoc.go b/godoc/godoc.go index a9d806f7e8b..ac6ab23a0a1 100644 --- a/godoc/godoc.go +++ b/godoc/godoc.go @@ -190,13 +190,13 @@ func (p *Presentation) infoSnippet_htmlFunc(info SpotInfo) string { return `no snippet text available` } -func (p *Presentation) nodeFunc(info *PageInfo, node interface{}) string { +func (p *Presentation) nodeFunc(info *PageInfo, node any) string { var buf bytes.Buffer p.writeNode(&buf, info, info.FSet, node) return buf.String() } -func (p *Presentation) node_htmlFunc(info *PageInfo, node interface{}, linkify bool) string { +func (p *Presentation) node_htmlFunc(info *PageInfo, node any, linkify bool) string { var buf1 bytes.Buffer p.writeNode(&buf1, info, info.FSet, node) @@ -477,9 +477,9 @@ func srcBreadcrumbFunc(relpath string) string { return buf.String() } -func newPosLink_urlFunc(srcPosLinkFunc func(s string, line, low, high int) string) func(info *PageInfo, n interface{}) string { +func newPosLink_urlFunc(srcPosLinkFunc func(s string, line, low, high int) string) func(info *PageInfo, n any) string { // n must be an ast.Node or a *doc.Note - return func(info *PageInfo, n interface{}) string { + return func(info *PageInfo, n any) string { var pos, end token.Pos switch n := n.(type) { @@ -839,7 +839,7 @@ func replaceLeadingIndentation(body, oldIndent, newIndent string) string { // The provided fset must be non-nil. The pageInfo is optional. If // present, the pageInfo is used to add comments to struct fields to // say which version of Go introduced them. -func (p *Presentation) writeNode(w io.Writer, pageInfo *PageInfo, fset *token.FileSet, x interface{}) { +func (p *Presentation) writeNode(w io.Writer, pageInfo *PageInfo, fset *token.FileSet, x any) { // convert trailing tabs into spaces using a tconv filter // to ensure a good outcome in most browsers (there may still // be tabs in comments and strings, but converting those into @@ -918,7 +918,7 @@ var slashSlash = []byte("//") // WriteNode writes x to w. // TODO(bgarcia) Is this method needed? It's just a wrapper for p.writeNode. -func (p *Presentation) WriteNode(w io.Writer, fset *token.FileSet, x interface{}) { +func (p *Presentation) WriteNode(w io.Writer, fset *token.FileSet, x any) { p.writeNode(w, nil, fset, x) } diff --git a/godoc/index.go b/godoc/index.go index 377837a0b36..05a1a9441ee 100644 --- a/godoc/index.go +++ b/godoc/index.go @@ -71,10 +71,10 @@ import ( // InterfaceSlice is a helper type for sorting interface // slices according to some slice-specific sort criteria. -type comparer func(x, y interface{}) bool +type comparer func(x, y any) bool type interfaceSlice struct { - slice []interface{} + slice []any less comparer } @@ -87,7 +87,7 @@ type interfaceSlice struct { // runs. For instance, a RunList containing pairs (x, y) may be compressed // into a RunList containing pair runs (x, {y}) where each run consists of // a list of y's with the same x. -type RunList []interface{} +type RunList []any func (h RunList) sort(less comparer) { sort.Sort(&interfaceSlice{h, less}) @@ -99,7 +99,7 @@ func (p *interfaceSlice) Swap(i, j int) { p.slice[i], p.slice[j] = p.slice[ // Compress entries which are the same according to a sort criteria // (specified by less) into "runs". -func (h RunList) reduce(less comparer, newRun func(h RunList) interface{}) RunList { +func (h RunList) reduce(less comparer, newRun func(h RunList) any) RunList { if len(h) == 0 { return nil } @@ -143,10 +143,10 @@ func (k KindRun) Less(i, j int) bool { return k[i].Lori() < k[j].Lori() } func (k KindRun) Swap(i, j int) { k[i], k[j] = k[j], k[i] } // FileRun contents are sorted by Kind for the reduction into KindRuns. -func lessKind(x, y interface{}) bool { return x.(SpotInfo).Kind() < y.(SpotInfo).Kind() } +func lessKind(x, y any) bool { return x.(SpotInfo).Kind() < y.(SpotInfo).Kind() } // newKindRun allocates a new KindRun from the SpotInfo run h. -func newKindRun(h RunList) interface{} { +func newKindRun(h RunList) any { run := make(KindRun, len(h)) for i, x := range h { run[i] = x.(SpotInfo) @@ -214,7 +214,7 @@ type FileRun struct { } // Spots are sorted by file path for the reduction into FileRuns. -func lessSpot(x, y interface{}) bool { +func lessSpot(x, y any) bool { fx := x.(Spot).File fy := y.(Spot).File // same as "return fx.Path() < fy.Path()" but w/o computing the file path first @@ -224,7 +224,7 @@ func lessSpot(x, y interface{}) bool { } // newFileRun allocates a new FileRun from the Spot run h. -func newFileRun(h RunList) interface{} { +func newFileRun(h RunList) any { file := h[0].(Spot).File // reduce the list of Spots into a list of KindRuns @@ -257,12 +257,12 @@ func (p *PakRun) Less(i, j int) bool { return p.Files[i].File.Name < p.Files[j]. func (p *PakRun) Swap(i, j int) { p.Files[i], p.Files[j] = p.Files[j], p.Files[i] } // FileRuns are sorted by package for the reduction into PakRuns. -func lessFileRun(x, y interface{}) bool { +func lessFileRun(x, y any) bool { return x.(*FileRun).File.Pak.less(y.(*FileRun).File.Pak) } // newPakRun allocates a new PakRun from the *FileRun run h. -func newPakRun(h RunList) interface{} { +func newPakRun(h RunList) any { pak := h[0].(*FileRun).File.Pak files := make([]*FileRun, len(h)) for i, x := range h { @@ -280,7 +280,7 @@ func newPakRun(h RunList) interface{} { type HitList []*PakRun // PakRuns are sorted by package. -func lessPakRun(x, y interface{}) bool { return x.(*PakRun).Pak.less(y.(*PakRun).Pak) } +func lessPakRun(x, y any) bool { return x.(*PakRun).Pak.less(y.(*PakRun).Pak) } func reduce(h0 RunList) HitList { // reduce a list of Spots into a list of FileRuns @@ -325,10 +325,10 @@ type AltWords struct { } // wordPairs are sorted by their canonical spelling. -func lessWordPair(x, y interface{}) bool { return x.(*wordPair).canon < y.(*wordPair).canon } +func lessWordPair(x, y any) bool { return x.(*wordPair).canon < y.(*wordPair).canon } // newAltWords allocates a new AltWords from the *wordPair run h. -func newAltWords(h RunList) interface{} { +func newAltWords(h RunList) any { canon := h[0].(*wordPair).canon alts := make([]string, len(h)) for i, x := range h { @@ -1159,7 +1159,7 @@ func (x *Index) WriteTo(w io.Writer) (n int64, err error) { return 0, err } if fulltext { - encode := func(x interface{}) error { + encode := func(x any) error { return gob.NewEncoder(w).Encode(x) } if err := x.fset.Write(encode); err != nil { @@ -1199,7 +1199,7 @@ func (x *Index) ReadFrom(r io.Reader) (n int64, err error) { x.opts = fx.Opts if fx.Fulltext { x.fset = token.NewFileSet() - decode := func(x interface{}) error { + decode := func(x any) error { return gob.NewDecoder(r).Decode(x) } if err := x.fset.Read(decode); err != nil { diff --git a/godoc/search.go b/godoc/search.go index 33e4febfaaa..a0afb8bf97b 100644 --- a/godoc/search.go +++ b/godoc/search.go @@ -36,7 +36,7 @@ func (c *Corpus) Lookup(query string) SearchResult { // identifier search if r, err := index.Lookup(query); err == nil { result = r - } else if err != nil && !c.IndexFullText { + } else if !c.IndexFullText { // ignore the error if full text search is enabled // since the query may be a valid regular expression result.Alert = "Error in query string: " + err.Error() @@ -127,7 +127,7 @@ func (p *Presentation) HandleSearch(w http.ResponseWriter, r *http.Request) { func (p *Presentation) serveSearchDesc(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "application/opensearchdescription+xml") - data := map[string]interface{}{ + data := map[string]any{ "BaseURL": fmt.Sprintf("http://%s", r.Host), } applyTemplateToResponseWriter(w, p.SearchDescXML, &data) diff --git a/godoc/server.go b/godoc/server.go index afb28e2e187..92d1ec48d61 100644 --- a/godoc/server.go +++ b/godoc/server.go @@ -502,7 +502,7 @@ func packageExports(fset *token.FileSet, pkg *ast.Package) { } } -func applyTemplate(t *template.Template, name string, data interface{}) []byte { +func applyTemplate(t *template.Template, name string, data any) []byte { var buf bytes.Buffer if err := t.Execute(&buf, data); err != nil { log.Printf("%s.Execute: %s", name, err) @@ -529,7 +529,7 @@ func (w *writerCapturesErr) Write(p []byte) (int, error) { // they come from the template processing and not the Writer; this avoid // polluting log files with error messages due to networking issues, such as // client disconnects and http HEAD protocol violations. -func applyTemplateToResponseWriter(rw http.ResponseWriter, t *template.Template, data interface{}) { +func applyTemplateToResponseWriter(rw http.ResponseWriter, t *template.Template, data any) { w := &writerCapturesErr{w: rw} err := t.Execute(w, data) // There are some cases where template.Execute does not return an error when @@ -839,7 +839,7 @@ func (p *Presentation) ServeText(w http.ResponseWriter, text []byte) { w.Write(text) } -func marshalJSON(x interface{}) []byte { +func marshalJSON(x any) []byte { var data []byte var err error const indentJSON = false // for easier debugging diff --git a/godoc/spec.go b/godoc/spec.go index 9ec94278db5..c8142363e9b 100644 --- a/godoc/spec.go +++ b/godoc/spec.go @@ -38,7 +38,7 @@ func (p *ebnfParser) next() { p.lit = p.scanner.TokenText() } -func (p *ebnfParser) printf(format string, args ...interface{}) { +func (p *ebnfParser) printf(format string, args ...any) { p.flush() fmt.Fprintf(p.out, format, args...) } diff --git a/godoc/template.go b/godoc/template.go index 1e4e42e30e5..4418bea09b5 100644 --- a/godoc/template.go +++ b/godoc/template.go @@ -55,7 +55,7 @@ func (c *Corpus) contents(name string) string { } // stringFor returns a textual representation of the arg, formatted according to its nature. -func stringFor(arg interface{}) string { +func stringFor(arg any) string { switch arg := arg.(type) { case int: return fmt.Sprintf("%d", arg) @@ -70,7 +70,7 @@ func stringFor(arg interface{}) string { return "" } -func (p *Presentation) code(file string, arg ...interface{}) (s string, err error) { +func (p *Presentation) code(file string, arg ...any) (s string, err error) { defer func() { if r := recover(); r != nil { err = fmt.Errorf("%v", r) @@ -85,7 +85,7 @@ func (p *Presentation) code(file string, arg ...interface{}) (s string, err erro command = fmt.Sprintf("code %q", file) case 1: command = fmt.Sprintf("code %q %s", file, stringFor(arg[0])) - text = p.Corpus.oneLine(file, text, arg[0]) + text = p.Corpus.oneLine(file, arg[0]) case 2: command = fmt.Sprintf("code %q %s %s", file, stringFor(arg[0]), stringFor(arg[1])) text = p.Corpus.multipleLines(file, text, arg[0], arg[1]) @@ -105,7 +105,7 @@ func (p *Presentation) code(file string, arg ...interface{}) (s string, err erro } // parseArg returns the integer or string value of the argument and tells which it is. -func parseArg(arg interface{}, file string, max int) (ival int, sval string, isInt bool) { +func parseArg(arg any, file string, max int) (ival int, sval string, isInt bool) { switch n := arg.(type) { case int: if n <= 0 || n > max { @@ -120,7 +120,7 @@ func parseArg(arg interface{}, file string, max int) (ival int, sval string, isI } // oneLine returns the single line generated by a two-argument code invocation. -func (c *Corpus) oneLine(file, text string, arg interface{}) string { +func (c *Corpus) oneLine(file string, arg any) string { lines := strings.SplitAfter(c.contents(file), "\n") line, pattern, isInt := parseArg(arg, file, len(lines)) if isInt { @@ -130,7 +130,7 @@ func (c *Corpus) oneLine(file, text string, arg interface{}) string { } // multipleLines returns the text generated by a three-argument code invocation. -func (c *Corpus) multipleLines(file, text string, arg1, arg2 interface{}) string { +func (c *Corpus) multipleLines(file, text string, arg1, arg2 any) string { lines := strings.SplitAfter(c.contents(file), "\n") line1, pattern1, isInt1 := parseArg(arg1, file, len(lines)) line2, pattern2, isInt2 := parseArg(arg2, file, len(lines)) diff --git a/godoc/util/util.go b/godoc/util/util.go index c08ca785fed..21390556e7f 100644 --- a/godoc/util/util.go +++ b/godoc/util/util.go @@ -18,18 +18,18 @@ import ( // access to it and records the time the value was last set. type RWValue struct { mutex sync.RWMutex - value interface{} + value any timestamp time.Time // time of last set() } -func (v *RWValue) Set(value interface{}) { +func (v *RWValue) Set(value any) { v.mutex.Lock() v.value = value v.timestamp = time.Now() v.mutex.Unlock() } -func (v *RWValue) Get() (interface{}, time.Time) { +func (v *RWValue) Get() (any, time.Time) { v.mutex.RLock() defer v.mutex.RUnlock() return v.value, v.timestamp diff --git a/godoc/vfs/emptyvfs.go b/godoc/vfs/emptyvfs.go index 521bf71a51b..4ab5c7c649e 100644 --- a/godoc/vfs/emptyvfs.go +++ b/godoc/vfs/emptyvfs.go @@ -84,6 +84,6 @@ func (e *emptyVFS) IsDir() bool { return true } -func (e *emptyVFS) Sys() interface{} { +func (e *emptyVFS) Sys() any { return nil } diff --git a/godoc/vfs/mapfs/mapfs.go b/godoc/vfs/mapfs/mapfs.go index 9d0f465eb5e..06fb4f09543 100644 --- a/godoc/vfs/mapfs/mapfs.go +++ b/godoc/vfs/mapfs/mapfs.go @@ -158,9 +158,9 @@ func (fi mapFI) Mode() os.FileMode { } return 0444 } -func (fi mapFI) Name() string { return pathpkg.Base(fi.name) } -func (fi mapFI) Size() int64 { return int64(fi.size) } -func (fi mapFI) Sys() interface{} { return nil } +func (fi mapFI) Name() string { return pathpkg.Base(fi.name) } +func (fi mapFI) Size() int64 { return int64(fi.size) } +func (fi mapFI) Sys() any { return nil } type nopCloser struct { io.ReadSeeker diff --git a/godoc/vfs/namespace.go b/godoc/vfs/namespace.go index 23dd9794312..2566051a293 100644 --- a/godoc/vfs/namespace.go +++ b/godoc/vfs/namespace.go @@ -275,7 +275,7 @@ func (d dirInfo) Size() int64 { return 0 } func (d dirInfo) Mode() os.FileMode { return os.ModeDir | 0555 } func (d dirInfo) ModTime() time.Time { return startTime } func (d dirInfo) IsDir() bool { return true } -func (d dirInfo) Sys() interface{} { return nil } +func (d dirInfo) Sys() any { return nil } var startTime = time.Now() diff --git a/godoc/vfs/zipfs/zipfs.go b/godoc/vfs/zipfs/zipfs.go index 14c9820a1c7..cdf231a1abd 100644 --- a/godoc/vfs/zipfs/zipfs.go +++ b/godoc/vfs/zipfs/zipfs.go @@ -68,7 +68,7 @@ func (fi zipFI) IsDir() bool { return fi.file == nil } -func (fi zipFI) Sys() interface{} { +func (fi zipFI) Sys() any { return nil } diff --git a/gopls/doc/analyzers.md b/gopls/doc/analyzers.md index 2905a0e5336..68465f9809d 100644 --- a/gopls/doc/analyzers.md +++ b/gopls/doc/analyzers.md @@ -290,6 +290,41 @@ Default: on. Package documentation: [framepointer](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/framepointer) + +## `gofix`: apply fixes based on go:fix comment directives + + +The gofix analyzer inlines functions and constants that are marked for inlining. + +Default: on. + +Package documentation: [gofix](https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/gofix) + + +## `hostport`: check format of addresses passed to net.Dial + + +This analyzer flags code that produce network address strings using +fmt.Sprintf, as in this example: + + addr := fmt.Sprintf("%s:%d", host, 12345) // "will not work with IPv6" + ... + conn, err := net.Dial("tcp", addr) // "when passed to dial here" + +The analyzer suggests a fix to use the correct approach, a call to +net.JoinHostPort: + + addr := net.JoinHostPort(host, "12345") + ... + conn, err := net.Dial("tcp", addr) + +A similar diagnostic and fix are produced for a format string of "%s:%s". + + +Default: on. + +Package documentation: [hostport](https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/hostport) + ## `httpresponse`: check for mistakes using HTTP responses @@ -455,6 +490,15 @@ existing code by using more modern features of Go, such as: from the maps package, added in go1.21; - replacing []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...), added in go1.19; + - replacing uses of context.WithCancel in tests with t.Context, added in + go1.24; + - replacing omitempty by omitzero on structs, added in go1.24; + - replacing append(s[:i], s[i+1]...) by slices.Delete(s, i, i+1), + added in go1.21 + - replacing a 3-clause for i := 0; i < n; i++ {} loop by + for i := range n {}, added in go1.22; + - replacing Split in "for range strings.Split(...)" by go1.24's + more efficient SplitSeq; Default: on. @@ -882,7 +926,7 @@ Package documentation: [unmarshal](https://pkg.go.dev/golang.org/x/tools/go/anal The unreachable analyzer finds statements that execution can never reach -because they are preceded by an return statement, a call to panic, an +because they are preceded by a return statement, a call to panic, an infinite loop, or similar constructs. Default: on. @@ -961,6 +1005,8 @@ arguments at call sites, while taking care to preserve any side effects in the argument expressions; see https://github.com/golang/tools/releases/tag/gopls%2Fv0.14. +This analyzer ignores generated code. + Default: on. Package documentation: [unusedparams](https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/unusedparams) @@ -986,7 +1032,7 @@ Package documentation: [unusedresult](https://pkg.go.dev/golang.org/x/tools/go/a -Default: off. Enable by setting `"analyses": {"unusedvariable": true}`. +Default: on. Package documentation: [unusedvariable](https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/unusedvariable) diff --git a/gopls/doc/features/diagnostics.md b/gopls/doc/features/diagnostics.md index 09b3cc33e90..ceec607c123 100644 --- a/gopls/doc/features/diagnostics.md +++ b/gopls/doc/features/diagnostics.md @@ -65,9 +65,13 @@ There is an optional third source of diagnostics: This source is disabled by default but can be enabled on a package-by-package basis by invoking the - `source.toggleCompilerOptDetails` ("Toggle compiler optimization + `source.toggleCompilerOptDetails` ("{Show,Hide} compiler optimization details") code action. + Remember that the compiler's optimizer runs only on packages that + are transitively free from errors, so optimization diagnostics + will not be shown on packages that do not build. + ## Recomputation of diagnostics diff --git a/gopls/doc/generate/generate.go b/gopls/doc/generate/generate.go index 7d92b2629d5..b0d3e8c49f6 100644 --- a/gopls/doc/generate/generate.go +++ b/gopls/doc/generate/generate.go @@ -44,6 +44,7 @@ import ( "golang.org/x/tools/gopls/internal/mod" "golang.org/x/tools/gopls/internal/settings" "golang.org/x/tools/gopls/internal/util/safetoken" + internalastutil "golang.org/x/tools/internal/astutil" ) func main() { @@ -221,11 +222,13 @@ func loadOptions(category reflect.Value, optsType types.Object, pkg *packages.Pa if len(path) < 2 { return nil, fmt.Errorf("could not find AST node for field %v", typesField) } + // The AST field gives us the doc. astField, ok := path[1].(*ast.Field) if !ok { return nil, fmt.Errorf("unexpected AST path %v", path) } + description, deprecation := astField.Doc.Text(), internalastutil.Deprecation(astField.Doc) // The reflect field gives us the default value. reflectField := category.FieldByName(typesField.Name()) @@ -285,14 +288,15 @@ func loadOptions(category reflect.Value, optsType types.Object, pkg *packages.Pa status := reflectStructField.Tag.Get("status") opts = append(opts, &doc.Option{ - Name: name, - Type: typ, - Doc: lowerFirst(astField.Doc.Text()), - Default: def, - EnumKeys: enumKeys, - EnumValues: enums[typesField.Type()], - Status: status, - Hierarchy: hierarchy, + Name: name, + Type: typ, + Doc: lowerFirst(description), + Default: def, + EnumKeys: enumKeys, + EnumValues: enums[typesField.Type()], + Status: status, + Hierarchy: hierarchy, + DeprecationMessage: lowerFirst(strings.TrimPrefix(deprecation, "Deprecated: ")), }) } return opts, nil @@ -410,7 +414,7 @@ func formatDefault(reflectField reflect.Value) (string, error) { return string(defBytes), err } -// valueDoc transforms a docstring documenting an constant identifier to a +// valueDoc transforms a docstring documenting a constant identifier to a // docstring documenting its value. // // If doc is of the form "Foo is a bar", it returns '`"fooValue"` is a bar'. If diff --git a/gopls/doc/release/v0.18.0.md b/gopls/doc/release/v0.18.0.md index 9f7ddd0909b..8d641a2104f 100644 --- a/gopls/doc/release/v0.18.0.md +++ b/gopls/doc/release/v0.18.0.md @@ -1,21 +1,38 @@ # Configuration Changes -- The experimental `hoverKind=Structured` setting is no longer supported. + -- The `gc_details` code lens has been deleted. (It was previously - disabled by default.) This functionality is now available through - the `settings.toggleCompilerOptDetails` code action (documented - below), as code actions are better supported than code lenses across - a range of clients. +- The experimental `Structured` value for the `hoverKind` option is no longer + supported. + +- The `gc_details` code lens has been deleted. (It was previously disabled by + default.) This functionality is now available through the + `toggleCompilerOptDetails` code action (documented below), as code + actions are better supported than code lenses across a range of clients. VS Code's special "Go: Toggle GC details" command continues to work. +- The experimental `semanticTokenTypes` and `semanticTokenModifiers` options + allow selectively disabling certain types of tokens or token modifiers in + `textDocument/semanticTokens` responses. + + These options supersede the `noSemanticString` and `noSemanticTokenNumber` + options, which are now deprecated. Users can instead set + `"semanticTokenTypes": {"string": false, "number": false}` to achieve the + same result. For now, gopls still honors `noSemanticTokenString` and + `noSemanticToken`, but will stop supporting them in a future release. + +- The new `workspaceFiles` option allows configuring glob patterns matching + files that define the logical build of the workspace. This option is only + needed in environments that use a custom golang.org/x/tools/go/packages + driver. + # New features -## "Toggle compiler optimization details" code action +## "{Show,Hide} compiler optimization details" code action This code action, accessible through the "Source Action" menu in VS -Code, toggles a per-package flag that causes Go compiler optimization +Code, toggles a per-directory flag that causes Go compiler optimization details to be reported as diagnostics. For example, it indicates which variables escape to the heap, and which array accesses require bounds checks. @@ -40,6 +57,46 @@ functions and methods are candidates. (For a more precise analysis that may report unused exported functions too, use the `golang.org/x/tools/cmd/deadcode` command.) +## New `hostport` analyzer + +With the growing use of IPv6, forming a "host:port" string using +`fmt.Sprintf("%s:%d")` is no longer appropriate because host names may +contain colons. Gopls now reports places where a string constructed in +this fashion (or with `%s` for the port) is passed to `net.Dial` or a +related function, and offers a fix to use `net.JoinHostPort` +instead. + +## Other analyzer changes + +- The `unusedvariable` quickfix is now on by default. +- The `unusedparams` analyzer no longer reports finding for generated files. + +## New `gofix` analyzer + +Gopls now reports when a function call or a use of a constant should be inlined. +These diagnostics and the associated code actions are triggered by "//go:fix inline" +directives at the function and constant definitions. +(See [the go:fix proposal](https://go.dev/issue/32816).) + +For example, consider a package `intmath` with a function `Square(int) int`. +Later the more general `Pow(int, int) int` is introduced, and `Square` is deprecated +in favor of calling `Pow` with a second argument of 2. The author of `intmath` +can write this: +``` +//go:fix inline +func Square(x int) int { return Pow(x, 2) } +``` +If gopls sees a call to `intmath.Square` in your code, it will suggest inlining +it, and will offer a code action to do so. + +The same feature works for constants. +With a constant definition like this: +``` +//go:fix inline +const Ptr = Pointer +``` +gopls will suggest replacing `Ptr` in your code with `Pointer`. + ## "Implementations" supports generics At long last, the "Go to Implementations" feature now fully supports @@ -83,4 +140,29 @@ The Definition query now supports additional locations: ## Improvements to "Hover" When invoked on a return statement, hover reports the types of - the function's result variables. +the function's result variables. + +## UX improvements to format strings + +### "DocumentHighlight" + +When your cursor is inside a printf-like function, gopls now highlights the relationship between +formatting verbs and arguments as visual cues to differentiate how operands are used in the format string. + +```go +fmt.Printf("Hello %s, you scored %d", name, score) +``` + +If the cursor is either on `%s` or `name`, gopls will highlight `%s` as a write operation, +and `name` as a read operation. + +### "SemanticHighlight" + +Similar to the improvements to DocumentHighlight, gopls also reports formatting verbs +as "format" modifier for token type "string" to better distinguish them with other parts of the format string. + +```go +fmt.Printf("Hello %s, you scored %d", name, score) +``` + +`%s` and `%d` will have token type "string" and modifier "format". diff --git a/gopls/doc/semantictokens.md b/gopls/doc/semantictokens.md index f17ea7f06d8..9856d3720a5 100644 --- a/gopls/doc/semantictokens.md +++ b/gopls/doc/semantictokens.md @@ -54,14 +54,15 @@ and change over time. (Nonetheless, a minimal implementation would not return `k `number`, `comment`, or `string`.) The maximal position isn't particularly well-specified either. To chose one example, a -format string might have formatting codes (`%[4]-3.6f`), escape sequences (`\U00010604`), and regular +format string might have formatting codes (`%-[4].6f`), escape sequences (`\U00010604`), and regular characters. Should these all be distinguished? One could even imagine distinguishing different runes by their Unicode language assignment, or some other Unicode property, such as -being [confusable](http://www.unicode.org/Public/security/10.0.0/confusables.txt). +being [confusable](http://www.unicode.org/Public/security/10.0.0/confusables.txt). While gopls does not fully adhere to such distinctions, +it does recognizes formatting directives within strings, decorating them with "format" modifiers, +providing more precise semantic highlighting in format strings. -Gopls does not come close to either of these principles. Semantic tokens are returned for -identifiers, keywords, operators, comments, and literals. (Semantic tokens do not -cover the file. They are not returned for +Semantic tokens are returned for identifiers, keywords, operators, comments, and literals. +(Semantic tokens do not cover the file. They are not returned for white space or punctuation, and there is no semantic token for labels.) The following describes more precisely what gopls does, with a few notes on possible alternative choices. diff --git a/gopls/doc/settings.md b/gopls/doc/settings.md index 1350e8f7840..d989b2d19b9 100644 --- a/gopls/doc/settings.md +++ b/gopls/doc/settings.md @@ -143,6 +143,18 @@ This setting is only supported when gopls is built with Go 1.16 or later. Default: `["ignore"]`. + +### `workspaceFiles []string` + +workspaceFiles configures the set of globs that match files defining the +logical build of the current workspace. Any on-disk changes to any files +matching a glob specified here will trigger a reload of the workspace. + +This setting need only be customized in environments with a custom +GOPACKAGESDRIVER. + +Default: `[]`. + ## Formatting @@ -208,6 +220,9 @@ Default: `false`. noSemanticString turns off the sending of the semantic token 'string' +Deprecated: Use SemanticTokenTypes["string"] = false instead. See +golang/vscode-go#3632 + Default: `false`. @@ -215,10 +230,35 @@ Default: `false`. **This setting is experimental and may be deleted.** -noSemanticNumber turns off the sending of the semantic token 'number' +noSemanticNumber turns off the sending of the semantic token 'number' + +Deprecated: Use SemanticTokenTypes["number"] = false instead. See +golang/vscode-go#3632. Default: `false`. + +### `semanticTokenTypes map[string]bool` + +**This setting is experimental and may be deleted.** + +semanticTokenTypes configures the semantic token types. It allows +disabling types by setting each value to false. +By default, all types are enabled. + +Default: `{}`. + + +### `semanticTokenModifiers map[string]bool` + +**This setting is experimental and may be deleted.** + +semanticTokenModifiers configures the semantic token modifiers. It allows +disabling modifiers by setting each value to false. +By default, all modifiers are enabled. + +Default: `{}`. + ## Completion diff --git a/gopls/go.mod b/gopls/go.mod index 173614714cc..83620720ae6 100644 --- a/gopls/go.mod +++ b/gopls/go.mod @@ -7,11 +7,11 @@ go 1.23.4 require ( github.com/google/go-cmp v0.6.0 github.com/jba/templatecheck v0.7.1 - golang.org/x/mod v0.22.0 - golang.org/x/sync v0.10.0 - golang.org/x/sys v0.29.0 + golang.org/x/mod v0.23.0 + golang.org/x/sync v0.11.0 + golang.org/x/sys v0.30.0 golang.org/x/telemetry v0.0.0-20241220003058-cc96b6e0d3d9 - golang.org/x/text v0.21.0 + golang.org/x/text v0.22.0 golang.org/x/tools v0.28.0 golang.org/x/vuln v1.1.3 gopkg.in/yaml.v3 v3.0.1 diff --git a/gopls/go.sum b/gopls/go.sum index bba08403559..b2b3d925a78 100644 --- a/gopls/go.sum +++ b/gopls/go.sum @@ -16,36 +16,36 @@ github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= -golang.org/x/crypto v0.32.0/go.mod h1:ZnnJkOaASj8g0AjIduWNlq2NRxL0PlBrbKVyZ6V/Ugc= +golang.org/x/crypto v0.33.0/go.mod h1:bVdXmD7IV/4GdElGPozy6U7lWdRXA4qyRVGJV57uQ5M= golang.org/x/exp/typeparams v0.0.0-20241210194714-1829a127f884 h1:1xaZTydL5Gsg78QharTwKfA9FY9CZ1VQj6D/AZEvHR0= golang.org/x/exp/typeparams v0.0.0-20241210194714-1829a127f884/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= -golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= -golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= +golang.org/x/mod v0.23.0 h1:Zb7khfcRGKk+kqfxFaP5tZqCnDZMjC5VtUBs87Hr6QM= +golang.org/x/mod v0.23.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= -golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k= +golang.org/x/net v0.35.0/go.mod h1:EglIi67kWsHKlRzzVMUD93VMSWGFOMSZgxFjparz1Qk= golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= -golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ= -golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.11.0 h1:GGz8+XQP4FvTTrjZPzNKTMFtSXH80RAzG+5ghFPgK9w= +golang.org/x/sync v0.11.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU= -golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc= +golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/telemetry v0.0.0-20240521205824-bda55230c457/go.mod h1:pRgIJT+bRLFKnoM1ldnzKoxTIn14Yxz928LQRYYgIN0= golang.org/x/telemetry v0.0.0-20241220003058-cc96b6e0d3d9 h1:L2k9GUV2TpQKVRGMjN94qfUMgUwOFimSQ6gipyJIjKw= golang.org/x/telemetry v0.0.0-20241220003058-cc96b6e0d3d9/go.mod h1:8h4Hgq+jcTvCDv2+i7NrfWwpYHcESleo2nGHxLbFLJ4= golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= -golang.org/x/term v0.28.0/go.mod h1:Sw/lC2IAUZ92udQNf3WodGtn4k/XoLyZoh8v/8uiwek= +golang.org/x/term v0.29.0/go.mod h1:6bl4lRlvVuDgSf3179VpIxBF0o10JUpXWOnI7nErv7s= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= -golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo= -golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= +golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM= +golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY= golang.org/x/vuln v1.1.3 h1:NPGnvPOTgnjBc9HTaUx+nj+EaUYxl5SJOWqaDYGaFYw= golang.org/x/vuln v1.1.3/go.mod h1:7Le6Fadm5FOqE9C926BCD0g12NWyhg7cxV4BwcPFuNY= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= diff --git a/gopls/internal/analysis/deprecated/deprecated.go b/gopls/internal/analysis/deprecated/deprecated.go index 1a8c4c56766..c6df00b4f50 100644 --- a/gopls/internal/analysis/deprecated/deprecated.go +++ b/gopls/internal/analysis/deprecated/deprecated.go @@ -19,6 +19,7 @@ import ( "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/internal/analysisinternal" + internalastutil "golang.org/x/tools/internal/astutil" ) //go:embed doc.go @@ -155,26 +156,8 @@ type deprecatedNames struct { // them both as Facts and the return value. This is a simplified copy // of staticcheck's fact_deprecated analyzer. func collectDeprecatedNames(pass *analysis.Pass, ins *inspector.Inspector) (deprecatedNames, error) { - extractDeprecatedMessage := func(docs []*ast.CommentGroup) string { - for _, doc := range docs { - if doc == nil { - continue - } - parts := strings.Split(doc.Text(), "\n\n") - for _, part := range parts { - if !strings.HasPrefix(part, "Deprecated: ") { - continue - } - alt := part[len("Deprecated: "):] - alt = strings.Replace(alt, "\n", " ", -1) - return strings.TrimSpace(alt) - } - } - return "" - } - doDocs := func(names []*ast.Ident, docs *ast.CommentGroup) { - alt := extractDeprecatedMessage([]*ast.CommentGroup{docs}) + alt := strings.TrimPrefix(internalastutil.Deprecation(docs), "Deprecated: ") if alt == "" { return } @@ -185,19 +168,21 @@ func collectDeprecatedNames(pass *analysis.Pass, ins *inspector.Inspector) (depr } } - var docs []*ast.CommentGroup - for _, f := range pass.Files { - docs = append(docs, f.Doc) - } - if alt := extractDeprecatedMessage(docs); alt != "" { - // Don't mark package syscall as deprecated, even though - // it is. A lot of people still use it for simple - // constants like SIGKILL, and I am not comfortable - // telling them to use x/sys for that. - if pass.Pkg.Path() != "syscall" { - pass.ExportPackageFact(&deprecationFact{alt}) + // Is package deprecated? + // + // Don't mark package syscall as deprecated, even though + // it is. A lot of people still use it for simple + // constants like SIGKILL, and I am not comfortable + // telling them to use x/sys for that. + if pass.Pkg.Path() != "syscall" { + for _, f := range pass.Files { + if depr := internalastutil.Deprecation(f.Doc); depr != "" { + pass.ExportPackageFact(&deprecationFact{depr}) + break + } } } + nodeFilter := []ast.Node{ (*ast.GenDecl)(nil), (*ast.FuncDecl)(nil), diff --git a/gopls/internal/analysis/fillstruct/fillstruct.go b/gopls/internal/analysis/fillstruct/fillstruct.go index 1181693c3d9..a8a861f0651 100644 --- a/gopls/internal/analysis/fillstruct/fillstruct.go +++ b/gopls/internal/analysis/fillstruct/fillstruct.go @@ -17,6 +17,7 @@ import ( "fmt" "go/ast" "go/format" + "go/printer" "go/token" "go/types" "strings" @@ -168,26 +169,16 @@ func SuggestedFix(fset *token.FileSet, start, end token.Pos, content []byte, fil // Check which types have already been filled in. (we only want to fill in // the unfilled types, or else we'll blat user-supplied details) prefilledFields := map[string]ast.Expr{} + var elts []ast.Expr for _, e := range expr.Elts { if kv, ok := e.(*ast.KeyValueExpr); ok { if key, ok := kv.Key.(*ast.Ident); ok { prefilledFields[key.Name] = kv.Value + elts = append(elts, kv) } } } - // Use a new fileset to build up a token.File for the new composite - // literal. We need one line for foo{, one line for }, and one line for - // each field we're going to set. format.Node only cares about line - // numbers, so we don't need to set columns, and each line can be - // 1 byte long. - // TODO(adonovan): why is this necessary? The position information - // is going to be wrong for the existing trees in prefilledFields. - // Can't the formatter just do its best with an empty fileset? - fakeFset := token.NewFileSet() - tok := fakeFset.AddFile("", -1, fieldCount+2) - - line := 2 // account for 1-based lines and the left brace var fieldTyps []types.Type for i := 0; i < fieldCount; i++ { field := tStruct.Field(i) @@ -200,47 +191,41 @@ func SuggestedFix(fset *token.FileSet, start, end token.Pos, content []byte, fil } matches := analysisinternal.MatchingIdents(fieldTyps, file, start, info, pkg) qual := typesinternal.FileQualifier(file, pkg) - var elts []ast.Expr + for i, fieldTyp := range fieldTyps { if fieldTyp == nil { continue // TODO(adonovan): is this reachable? } fieldName := tStruct.Field(i).Name() - - tok.AddLine(line - 1) // add 1 byte per line - if line > tok.LineCount() { - panic(fmt.Sprintf("invalid line number %v (of %v) for fillstruct", line, tok.LineCount())) + if _, ok := prefilledFields[fieldName]; ok { + // We already stored these when looping over expr.Elt. + // Want to preserve the original order of prefilled fields + continue } - pos := tok.LineStart(line) kv := &ast.KeyValueExpr{ Key: &ast.Ident{ - NamePos: pos, - Name: fieldName, + Name: fieldName, }, - Colon: pos, } - if expr, ok := prefilledFields[fieldName]; ok { + + names, ok := matches[fieldTyp] + if !ok { + return nil, nil, fmt.Errorf("invalid struct field type: %v", fieldTyp) + } + + // Find the name most similar to the field name. + // If no name matches the pattern, generate a zero value. + // NOTE: We currently match on the name of the field key rather than the field type. + if best := fuzzy.BestMatch(fieldName, names); best != "" { + kv.Value = ast.NewIdent(best) + } else if expr, isValid := populateValue(fieldTyp, qual); isValid { kv.Value = expr } else { - names, ok := matches[fieldTyp] - if !ok { - return nil, nil, fmt.Errorf("invalid struct field type: %v", fieldTyp) - } - - // Find the name most similar to the field name. - // If no name matches the pattern, generate a zero value. - // NOTE: We currently match on the name of the field key rather than the field type. - if best := fuzzy.BestMatch(fieldName, names); best != "" { - kv.Value = ast.NewIdent(best) - } else if expr, isValid := populateValue(fieldTyp, qual); isValid { - kv.Value = expr - } else { - return nil, nil, nil // no fix to suggest - } + return nil, nil, nil // no fix to suggest } + elts = append(elts, kv) - line++ } // If all of the struct's fields are unexported, we have nothing to do. @@ -248,21 +233,6 @@ func SuggestedFix(fset *token.FileSet, start, end token.Pos, content []byte, fil return nil, nil, fmt.Errorf("no elements to fill") } - // Add the final line for the right brace. Offset is the number of - // bytes already added plus 1. - tok.AddLine(len(elts) + 1) - line = len(elts) + 2 - if line > tok.LineCount() { - panic(fmt.Sprintf("invalid line number %v (of %v) for fillstruct", line, tok.LineCount())) - } - - cl := &ast.CompositeLit{ - Type: expr.Type, - Lbrace: tok.LineStart(1), - Elts: elts, - Rbrace: tok.LineStart(line), - } - // Find the line on which the composite literal is declared. split := bytes.Split(content, []byte("\n")) lineNumber := safetoken.StartPosition(fset, expr.Lbrace).Line @@ -274,26 +244,66 @@ func SuggestedFix(fset *token.FileSet, start, end token.Pos, content []byte, fil index := bytes.Index(firstLine, trimmed) whitespace := firstLine[:index] - // First pass through the formatter: turn the expr into a string. - var formatBuf bytes.Buffer - if err := format.Node(&formatBuf, fakeFset, cl); err != nil { - return nil, nil, fmt.Errorf("failed to run first format on:\n%s\ngot err: %v", cl.Type, err) - } - sug := indent(formatBuf.Bytes(), whitespace) + // Write a new composite literal "_{...}" composed of all prefilled and new elements, + // preserving existing formatting and comments. + // An alternative would be to only format the new fields, + // but by printing the entire composite literal, we ensure + // that the result is gofmt'ed. + var buf bytes.Buffer + buf.WriteString("_{\n") + fcmap := ast.NewCommentMap(fset, file, file.Comments) + comments := fcmap.Filter(expr).Comments() // comments inside the expr, in source order + for _, elt := range elts { + // Print comments before the current elt + for len(comments) > 0 && comments[0].Pos() < elt.Pos() { + for _, co := range comments[0].List { + fmt.Fprintln(&buf, co.Text) + } + comments = comments[1:] + } + + // Print the current elt with comments + eltcomments := fcmap.Filter(elt).Comments() + if err := format.Node(&buf, fset, &printer.CommentedNode{Node: elt, Comments: eltcomments}); err != nil { + return nil, nil, err + } + buf.WriteString(",") - if len(prefilledFields) > 0 { - // Attempt a second pass through the formatter to line up columns. - sourced, err := format.Source(sug) - if err == nil { - sug = indent(sourced, whitespace) + // Prune comments up to the end of the elt + for len(comments) > 0 && comments[0].Pos() < elt.End() { + comments = comments[1:] } + + // Write comments associated with the current elt that appear after it + // printer.CommentedNode only prints comments inside the elt. + for _, cg := range eltcomments { + for _, co := range cg.List { + if co.Pos() >= elt.End() { + fmt.Fprintln(&buf, co.Text) + if len(comments) > 0 { + comments = comments[1:] + } + } + } + } + buf.WriteString("\n") + } + buf.WriteString("}") + formatted, err := format.Source(buf.Bytes()) + if err != nil { + return nil, nil, err } + sug := indent(formatted, whitespace) + // Remove _ + idx := bytes.IndexByte(sug, '{') // cannot fail + sug = sug[idx:] + return fset, &analysis.SuggestedFix{ TextEdits: []analysis.TextEdit{ { - Pos: expr.Pos(), - End: expr.End(), + Pos: expr.Lbrace, + End: expr.Rbrace + token.Pos(len("}")), NewText: sug, }, }, diff --git a/internal/refactor/inline/analyzer/directive.go b/gopls/internal/analysis/gofix/directive.go similarity index 89% rename from internal/refactor/inline/analyzer/directive.go rename to gopls/internal/analysis/gofix/directive.go index f4426c5ffa8..796feb5189e 100644 --- a/internal/refactor/inline/analyzer/directive.go +++ b/gopls/internal/analysis/gofix/directive.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package analyzer +package gofix import ( "go/ast" @@ -13,6 +13,8 @@ import ( // -- plundered from the future (CL 605517, issue #68021) -- // TODO(adonovan): replace with ast.Directive after go1.24 (#68021). +// Beware of our local mods to handle analysistest +// "want" comments on the same line. // A directive is a comment line with special meaning to the Go // toolchain or another tool. It has the form: @@ -48,6 +50,9 @@ func directives(g *ast.CommentGroup) (res []*directive) { tool, nameargs = "", tool } name, args, _ := strings.Cut(nameargs, " ") // tab?? + // Permit an additional line comment after the args, chiefly to support + // [golang.org/x/tools/go/analysis/analysistest]. + args, _, _ = strings.Cut(args, "//") res = append(res, &directive{ Pos: c.Slash, Tool: tool, diff --git a/gopls/internal/analysis/gofix/doc.go b/gopls/internal/analysis/gofix/doc.go new file mode 100644 index 00000000000..a0c6a08ded9 --- /dev/null +++ b/gopls/internal/analysis/gofix/doc.go @@ -0,0 +1,81 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +/* +Package gofix defines an Analyzer that inlines calls to functions +and uses of constants +marked with a "//go:fix inline" doc comment. + +# Analyzer gofix + +gofix: apply fixes based on go:fix comment directives + +The gofix analyzer inlines functions and constants that are marked for inlining. + +# Functions + +Given a function that is marked for inlining, like this one: + + //go:fix inline + func Square(x int) int { return Pow(x, 2) } + +this analyzer will recommend that calls to the function elsewhere, in the same +or other packages, should be inlined. + +Inlining can be used to move off of a deprecated function: + + // Deprecated: prefer Pow(x, 2). + //go:fix inline + func Square(x int) int { return Pow(x, 2) } + +It can also be used to move off of an obsolete package, +as when the import path has changed or a higher major version is available: + + package pkg + + import pkg2 "pkg/v2" + + //go:fix inline + func F() { pkg2.F(nil) } + +Replacing a call pkg.F() by pkg2.F(nil) can have no effect on the program, +so this mechanism provides a low-risk way to update large numbers of calls. +We recommend, where possible, expressing the old API in terms of the new one +to enable automatic migration. + +# Constants + +Given a constant that is marked for inlining, like this one: + + //go:fix inline + const Ptr = Pointer + +this analyzer will recommend that uses of Ptr should be replaced with Pointer. + +As with functions, inlining can be used to replace deprecated constants and +constants in obsolete packages. + +A constant definition can be marked for inlining only if it refers to another +named constant. + +The "//go:fix inline" comment must appear before a single const declaration on its own, +as above; before a const declaration that is part of a group, as in this case: + + const ( + C = 1 + //go:fix inline + Ptr = Pointer + ) + +or before a group, applying to every constant in the group: + + //go:fix inline + const ( + Ptr = Pointer + Val = Value + ) + +The proposal https://go.dev/issue/32816 introduces the "//go:fix" directives. +*/ +package gofix diff --git a/gopls/internal/analysis/gofix/gofix.go b/gopls/internal/analysis/gofix/gofix.go new file mode 100644 index 00000000000..101924366d6 --- /dev/null +++ b/gopls/internal/analysis/gofix/gofix.go @@ -0,0 +1,341 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gofix + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + + _ "embed" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/gopls/internal/util/moreiters" + "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/astutil/cursor" + "golang.org/x/tools/internal/astutil/edge" + "golang.org/x/tools/internal/diff" + "golang.org/x/tools/internal/refactor/inline" + "golang.org/x/tools/internal/typesinternal" +) + +//go:embed doc.go +var doc string + +var Analyzer = &analysis.Analyzer{ + Name: "gofix", + Doc: analysisinternal.MustExtractDoc(doc, "gofix"), + URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/gofix", + Run: run, + FactTypes: []analysis.Fact{new(goFixInlineFuncFact), new(goFixInlineConstFact)}, + Requires: []*analysis.Analyzer{inspect.Analyzer}, +} + +func run(pass *analysis.Pass) (any, error) { + // Memoize repeated calls for same file. + fileContent := make(map[string][]byte) + readFile := func(node ast.Node) ([]byte, error) { + filename := pass.Fset.File(node.Pos()).Name() + content, ok := fileContent[filename] + if !ok { + var err error + content, err = pass.ReadFile(filename) + if err != nil { + return nil, err + } + fileContent[filename] = content + } + return content, nil + } + + // Return the unique ast.File for a cursor. + currentFile := func(c cursor.Cursor) *ast.File { + cf, _ := moreiters.First(c.Ancestors((*ast.File)(nil))) + return cf.Node().(*ast.File) + } + + // Pass 1: find functions and constants annotated with an appropriate "//go:fix" + // comment (the syntax proposed by #32816), + // and export a fact for each one. + inlinableFuncs := make(map[*types.Func]*inline.Callee) // memoization of fact import (nil => no fact) + inlinableConsts := make(map[*types.Const]*goFixInlineConstFact) + + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + nodeFilter := []ast.Node{(*ast.FuncDecl)(nil), (*ast.GenDecl)(nil)} + inspect.Preorder(nodeFilter, func(n ast.Node) { + switch decl := n.(type) { + case *ast.FuncDecl: + if !hasFixInline(decl.Doc) { + return + } + content, err := readFile(decl) + if err != nil { + pass.Reportf(decl.Doc.Pos(), "invalid inlining candidate: cannot read source file: %v", err) + return + } + callee, err := inline.AnalyzeCallee(discard, pass.Fset, pass.Pkg, pass.TypesInfo, decl, content) + if err != nil { + pass.Reportf(decl.Doc.Pos(), "invalid inlining candidate: %v", err) + return + } + fn := pass.TypesInfo.Defs[decl.Name].(*types.Func) + pass.ExportObjectFact(fn, &goFixInlineFuncFact{callee}) + inlinableFuncs[fn] = callee + + case *ast.GenDecl: + if decl.Tok != token.CONST { + return + } + declInline := hasFixInline(decl.Doc) + // Accept inline directives on the entire decl as well as individual specs. + for _, spec := range decl.Specs { + spec := spec.(*ast.ValueSpec) // guaranteed by Tok == CONST + specInline := hasFixInline(spec.Doc) + if declInline || specInline { + for i, name := range spec.Names { + if i >= len(spec.Values) { + // Possible following an iota. + break + } + val := spec.Values[i] + var rhsID *ast.Ident + switch e := val.(type) { + case *ast.Ident: + // Constants defined with the predeclared iota cannot be inlined. + if pass.TypesInfo.Uses[e] == builtinIota { + pass.Reportf(val.Pos(), "invalid //go:fix inline directive: const value is iota") + continue + } + rhsID = e + case *ast.SelectorExpr: + rhsID = e.Sel + default: + pass.Reportf(val.Pos(), "invalid //go:fix inline directive: const value is not the name of another constant") + continue + } + lhs := pass.TypesInfo.Defs[name].(*types.Const) + rhs := pass.TypesInfo.Uses[rhsID].(*types.Const) // must be so in a well-typed program + con := &goFixInlineConstFact{ + RHSName: rhs.Name(), + RHSPkgName: rhs.Pkg().Name(), + RHSPkgPath: rhs.Pkg().Path(), + } + if rhs.Pkg() == pass.Pkg { + con.rhsObj = rhs + } + inlinableConsts[lhs] = con + // Create a fact only if the LHS is exported and defined at top level. + // We create a fact even if the RHS is non-exported, + // so we can warn uses in other packages. + if lhs.Exported() && typesinternal.IsPackageLevel(lhs) { + pass.ExportObjectFact(lhs, con) + } + } + } + } + } + }) + + // Pass 2. Inline each static call to an inlinable function + // and each reference to an inlinable constant. + // + // TODO(adonovan): handle multiple diffs that each add the same import. + for cur := range cursor.Root(inspect).Preorder((*ast.CallExpr)(nil), (*ast.Ident)(nil)) { + n := cur.Node() + switch n := n.(type) { + case *ast.CallExpr: + call := n + if fn := typeutil.StaticCallee(pass.TypesInfo, call); fn != nil { + // Inlinable? + callee, ok := inlinableFuncs[fn] + if !ok { + var fact goFixInlineFuncFact + if pass.ImportObjectFact(fn, &fact) { + callee = fact.Callee + inlinableFuncs[fn] = callee + } + } + if callee == nil { + continue // nope + } + + // Inline the call. + content, err := readFile(call) + if err != nil { + pass.Reportf(call.Lparen, "invalid inlining candidate: cannot read source file: %v", err) + continue + } + curFile := currentFile(cur) + caller := &inline.Caller{ + Fset: pass.Fset, + Types: pass.Pkg, + Info: pass.TypesInfo, + File: curFile, + Call: call, + Content: content, + } + res, err := inline.Inline(caller, callee, &inline.Options{Logf: discard}) + if err != nil { + pass.Reportf(call.Lparen, "%v", err) + continue + } + if res.Literalized { + // Users are not fond of inlinings that literalize + // f(x) to func() { ... }(), so avoid them. + // + // (Unfortunately the inliner is very timid, + // and often literalizes when it cannot prove that + // reducing the call is safe; the user of this tool + // has no indication of what the problem is.) + continue + } + got := res.Content + + // Suggest the "fix". + var textEdits []analysis.TextEdit + for _, edit := range diff.Bytes(content, got) { + textEdits = append(textEdits, analysis.TextEdit{ + Pos: curFile.FileStart + token.Pos(edit.Start), + End: curFile.FileStart + token.Pos(edit.End), + NewText: []byte(edit.New), + }) + } + pass.Report(analysis.Diagnostic{ + Pos: call.Pos(), + End: call.End(), + Message: fmt.Sprintf("Call of %v should be inlined", callee), + SuggestedFixes: []analysis.SuggestedFix{{ + Message: fmt.Sprintf("Inline call of %v", callee), + TextEdits: textEdits, + }}, + }) + } + + case *ast.Ident: + // If the identifier is a use of an inlinable constant, suggest inlining it. + if con, ok := pass.TypesInfo.Uses[n].(*types.Const); ok { + fcon, ok := inlinableConsts[con] + if !ok { + var fact goFixInlineConstFact + if pass.ImportObjectFact(con, &fact) { + fcon = &fact + inlinableConsts[con] = fcon + } + } + if fcon == nil { + continue // nope + } + + // If n is qualified by a package identifier, we'll need the full selector expression. + var sel *ast.SelectorExpr + if e, _ := cur.Edge(); e == edge.SelectorExpr_Sel { + sel = cur.Parent().Node().(*ast.SelectorExpr) + } + curFile := currentFile(cur) + + // We have an identifier A here (n), possibly qualified by a package identifier (sel.X), + // and an inlinable "const A = B" elsewhere (fcon). + // Consider replacing A with B. + + // Check that the expression we are inlining (B) means the same thing + // (refers to the same object) in n's scope as it does in A's scope. + // If the RHS is not in the current package, AddImport will handle + // shadowing, so we only need to worry about when both expressions + // are in the current package. + if pass.Pkg.Path() == fcon.RHSPkgPath { + // fcon.rhsObj is the object referred to by B in the definition of A. + scope := pass.TypesInfo.Scopes[curFile].Innermost(n.Pos()) // n's scope + _, obj := scope.LookupParent(fcon.RHSName, n.Pos()) // what "B" means in n's scope + if obj == nil { + // Should be impossible: if code at n can refer to the LHS, + // it can refer to the RHS. + panic(fmt.Sprintf("no object for inlinable const %s RHS %s", n.Name, fcon.RHSName)) + } + if obj != fcon.rhsObj { + // "B" means something different here than at the inlinable const's scope. + continue + } + } + var ( + importPrefix string + edits []analysis.TextEdit + ) + if fcon.RHSPkgPath != pass.Pkg.Path() { + _, importPrefix, edits = analysisinternal.AddImport( + pass.TypesInfo, curFile, fcon.RHSPkgName, fcon.RHSPkgPath, fcon.RHSName, n.Pos()) + } + var ( + pos = n.Pos() + end = n.End() + name = n.Name + ) + // Replace the entire SelectorExpr if there is one. + if sel != nil { + pos = sel.Pos() + end = sel.End() + name = sel.X.(*ast.Ident).Name + "." + n.Name + } + edits = append(edits, analysis.TextEdit{ + Pos: pos, + End: end, + NewText: []byte(importPrefix + fcon.RHSName), + }) + pass.Report(analysis.Diagnostic{ + Pos: pos, + End: end, + Message: fmt.Sprintf("Constant %s should be inlined", name), + SuggestedFixes: []analysis.SuggestedFix{{ + Message: fmt.Sprintf("Inline constant %s", name), + TextEdits: edits, + }}, + }) + } + } + } + + return nil, nil +} + +// hasFixInline reports the presence of a "//go:fix inline" directive +// in the comments. +func hasFixInline(cg *ast.CommentGroup) bool { + for _, d := range directives(cg) { + if d.Tool == "go" && d.Name == "fix" && d.Args == "inline" { + return true + } + } + return false +} + +// A goFixInlineFuncFact is exported for each function marked "//go:fix inline". +// It holds information about the callee to support inlining. +type goFixInlineFuncFact struct{ Callee *inline.Callee } + +func (f *goFixInlineFuncFact) String() string { return "goFixInline " + f.Callee.String() } +func (*goFixInlineFuncFact) AFact() {} + +// A goFixInlineConstFact is exported for each constant marked "//go:fix inline". +// It holds information about an inlinable constant. Gob-serializable. +type goFixInlineConstFact struct { + // Information about "const LHSName = RHSName". + RHSName string + RHSPkgPath string + RHSPkgName string + rhsObj types.Object // for current package +} + +func (c *goFixInlineConstFact) String() string { + return fmt.Sprintf("goFixInline const %q.%s", c.RHSPkgPath, c.RHSName) +} + +func (*goFixInlineConstFact) AFact() {} + +func discard(string, ...any) {} + +var builtinIota = types.Universe.Lookup("iota") diff --git a/internal/refactor/inline/analyzer/analyzer_test.go b/gopls/internal/analysis/gofix/gofix_test.go similarity index 72% rename from internal/refactor/inline/analyzer/analyzer_test.go rename to gopls/internal/analysis/gofix/gofix_test.go index 5ad85cfb821..32bd87b6cd2 100644 --- a/internal/refactor/inline/analyzer/analyzer_test.go +++ b/gopls/internal/analysis/gofix/gofix_test.go @@ -2,15 +2,15 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package analyzer_test +package gofix_test import ( "testing" "golang.org/x/tools/go/analysis/analysistest" - inlineanalyzer "golang.org/x/tools/internal/refactor/inline/analyzer" + "golang.org/x/tools/gopls/internal/analysis/gofix" ) func TestAnalyzer(t *testing.T) { - analysistest.RunWithSuggestedFixes(t, analysistest.TestData(), inlineanalyzer.Analyzer, "a", "b") + analysistest.RunWithSuggestedFixes(t, analysistest.TestData(), gofix.Analyzer, "a", "b") } diff --git a/internal/refactor/inline/analyzer/main.go b/gopls/internal/analysis/gofix/main.go similarity index 64% rename from internal/refactor/inline/analyzer/main.go rename to gopls/internal/analysis/gofix/main.go index 4be223a80d6..fde633f2f62 100644 --- a/internal/refactor/inline/analyzer/main.go +++ b/gopls/internal/analysis/gofix/main.go @@ -8,12 +8,12 @@ // The inline command applies the inliner to the specified packages of // Go source code. Run with: // -// $ go run ./internal/refactor/inline/analyzer/main.go -fix packages... +// $ go run ./internal/analysis/gofix/main.go -fix packages... package main import ( "golang.org/x/tools/go/analysis/singlechecker" - inlineanalyzer "golang.org/x/tools/internal/refactor/inline/analyzer" + "golang.org/x/tools/gopls/internal/analysis/gofix" ) -func main() { singlechecker.Main(inlineanalyzer.Analyzer) } +func main() { singlechecker.Main(gofix.Analyzer) } diff --git a/gopls/internal/analysis/gofix/testdata/src/a/a.go b/gopls/internal/analysis/gofix/testdata/src/a/a.go new file mode 100644 index 00000000000..ae486746e5b --- /dev/null +++ b/gopls/internal/analysis/gofix/testdata/src/a/a.go @@ -0,0 +1,98 @@ +package a + +// Functions. + +func f() { + One() // want `Call of a.One should be inlined` + + new(T).Two() // want `Call of \(a.T\).Two should be inlined` +} + +type T struct{} + +//go:fix inline +func One() int { return one } // want One:`goFixInline a.One` + +const one = 1 + +//go:fix inline +func (T) Two() int { return 2 } // want Two:`goFixInline \(a.T\).Two` + +// Constants. + +const Uno = 1 + +//go:fix inline +const In1 = Uno // want In1: `goFixInline const "a".Uno` + +const ( + no1 = one + + //go:fix inline + In2 = one // want In2: `goFixInline const "a".one` +) + +//go:fix inline +const ( + in3 = one + in4 = one + bad1 = 1 // want `invalid //go:fix inline directive: const value is not the name of another constant` +) + +//go:fix inline +const in5, + in6, + bad2 = one, one, + one + 1 // want `invalid //go:fix inline directive: const value is not the name of another constant` + +// Make sure we don't crash on iota consts, but still process the whole decl. +// +//go:fix inline +const ( + a = iota // want `invalid //go:fix inline directive: const value is iota` + b + in7 = one +) + +func _() { + x := In1 // want `Constant In1 should be inlined` + x = In2 // want `Constant In2 should be inlined` + x = in3 // want `Constant in3 should be inlined` + x = in4 // want `Constant in4 should be inlined` + x = in5 // want `Constant in5 should be inlined` + x = in6 // want `Constant in6 should be inlined` + x = in7 // want `Constant in7 should be inlined` + x = no1 + _ = x + + in1 := 1 // don't inline lvalues + _ = in1 +} + +const ( + x = 1 + //go:fix inline + in8 = x +) + +func shadow() { + var x int // shadows x at package scope + + //go:fix inline + const a = iota // want `invalid //go:fix inline directive: const value is iota` + + const iota = 2 + // Below this point, iota is an ordinary constant. + + //go:fix inline + const b = iota + + x = a // a is defined with the predeclared iota, so it cannot be inlined + x = b // want `Constant b should be inlined` + + // Don't offer to inline in8, because the result, "x", would mean something different + // in this scope than it does in the scope where in8 is defined. + x = in8 + + _ = x +} diff --git a/gopls/internal/analysis/gofix/testdata/src/a/a.go.golden b/gopls/internal/analysis/gofix/testdata/src/a/a.go.golden new file mode 100644 index 00000000000..7d75a598fb7 --- /dev/null +++ b/gopls/internal/analysis/gofix/testdata/src/a/a.go.golden @@ -0,0 +1,98 @@ +package a + +// Functions. + +func f() { + _ = one // want `Call of a.One should be inlined` + + _ = 2 // want `Call of \(a.T\).Two should be inlined` +} + +type T struct{} + +//go:fix inline +func One() int { return one } // want One:`goFixInline a.One` + +const one = 1 + +//go:fix inline +func (T) Two() int { return 2 } // want Two:`goFixInline \(a.T\).Two` + +// Constants. + +const Uno = 1 + +//go:fix inline +const In1 = Uno // want In1: `goFixInline const "a".Uno` + +const ( + no1 = one + + //go:fix inline + In2 = one // want In2: `goFixInline const "a".one` +) + +//go:fix inline +const ( + in3 = one + in4 = one + bad1 = 1 // want `invalid //go:fix inline directive: const value is not the name of another constant` +) + +//go:fix inline +const in5, + in6, + bad2 = one, one, + one + 1 // want `invalid //go:fix inline directive: const value is not the name of another constant` + +// Make sure we don't crash on iota consts, but still process the whole decl. +// +//go:fix inline +const ( + a = iota // want `invalid //go:fix inline directive: const value is iota` + b + in7 = one +) + +func _() { + x := Uno // want `Constant In1 should be inlined` + x = one // want `Constant In2 should be inlined` + x = one // want `Constant in3 should be inlined` + x = one // want `Constant in4 should be inlined` + x = one // want `Constant in5 should be inlined` + x = one // want `Constant in6 should be inlined` + x = one // want `Constant in7 should be inlined` + x = no1 + _ = x + + in1 := 1 // don't inline lvalues + _ = in1 +} + +const ( + x = 1 + //go:fix inline + in8 = x +) + +func shadow() { + var x int // shadows x at package scope + + //go:fix inline + const a = iota // want `invalid //go:fix inline directive: const value is iota` + + const iota = 2 + // Below this point, iota is an ordinary constant. + + //go:fix inline + const b = iota + + x = a // a is defined with the predeclared iota, so it cannot be inlined + x = iota // want `Constant b should be inlined` + + // Don't offer to inline in8, because the result, "x", would mean something different + // in this scope than it does in the scope where in8 is defined. + x = in8 + + _ = x +} diff --git a/gopls/internal/analysis/gofix/testdata/src/b/b.go b/gopls/internal/analysis/gofix/testdata/src/b/b.go new file mode 100644 index 00000000000..4bf9f0dc650 --- /dev/null +++ b/gopls/internal/analysis/gofix/testdata/src/b/b.go @@ -0,0 +1,30 @@ +package b + +import "a" +import . "c" + +func f() { + a.One() // want `cannot inline call to a.One because body refers to non-exported one` + + new(a.T).Two() // want `Call of \(a.T\).Two should be inlined` +} + +//go:fix inline +const in2 = a.Uno + +//go:fix inline +const in3 = C // c.C, by dot import + +func g() { + x := a.In1 // want `Constant a\.In1 should be inlined` + + a := 1 + // Although the package identifier "a" is shadowed here, + // a second import of "a" will be added with a new package identifer. + x = in2 // want `Constant in2 should be inlined` + + x = in3 // want `Constant in3 should be inlined` + + _ = a + _ = x +} diff --git a/gopls/internal/analysis/gofix/testdata/src/b/b.go.golden b/gopls/internal/analysis/gofix/testdata/src/b/b.go.golden new file mode 100644 index 00000000000..b26a05c3046 --- /dev/null +++ b/gopls/internal/analysis/gofix/testdata/src/b/b.go.golden @@ -0,0 +1,34 @@ +package b + +import a0 "a" + +import ( + "a" + . "c" +) + +func f() { + a.One() // want `cannot inline call to a.One because body refers to non-exported one` + + _ = 2 // want `Call of \(a.T\).Two should be inlined` +} + +//go:fix inline +const in2 = a.Uno + +//go:fix inline +const in3 = C // c.C, by dot import + +func g() { + x := a.Uno // want `Constant a\.In1 should be inlined` + + a := 1 + // Although the package identifier "a" is shadowed here, + // a second import of "a" will be added with a new package identifer. + x = a0.Uno // want `Constant in2 should be inlined` + + x = C // want `Constant in3 should be inlined` + + _ = a + _ = x +} diff --git a/gopls/internal/analysis/gofix/testdata/src/c/c.go b/gopls/internal/analysis/gofix/testdata/src/c/c.go new file mode 100644 index 00000000000..36504b886a7 --- /dev/null +++ b/gopls/internal/analysis/gofix/testdata/src/c/c.go @@ -0,0 +1,5 @@ +package c + +// This package is dot-imported by package b. + +const C = 1 diff --git a/gopls/internal/analysis/hostport/hostport.go b/gopls/internal/analysis/hostport/hostport.go new file mode 100644 index 00000000000..a7030ae116f --- /dev/null +++ b/gopls/internal/analysis/hostport/hostport.go @@ -0,0 +1,191 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package hostport defines an analyzer for calls to net.Dial with +// addresses of the form "%s:%d" or "%s:%s", which work only with IPv4. +package hostport + +import ( + "fmt" + "go/ast" + "go/constant" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/gopls/internal/util/safetoken" + "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/astutil/cursor" +) + +const Doc = `check format of addresses passed to net.Dial + +This analyzer flags code that produce network address strings using +fmt.Sprintf, as in this example: + + addr := fmt.Sprintf("%s:%d", host, 12345) // "will not work with IPv6" + ... + conn, err := net.Dial("tcp", addr) // "when passed to dial here" + +The analyzer suggests a fix to use the correct approach, a call to +net.JoinHostPort: + + addr := net.JoinHostPort(host, "12345") + ... + conn, err := net.Dial("tcp", addr) + +A similar diagnostic and fix are produced for a format string of "%s:%s". +` + +var Analyzer = &analysis.Analyzer{ + Name: "hostport", + Doc: Doc, + URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/hostport", + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Run: run, +} + +func run(pass *analysis.Pass) (any, error) { + // Fast path: if the package doesn't import net and fmt, skip + // the traversal. + if !analysisinternal.Imports(pass.Pkg, "net") || + !analysisinternal.Imports(pass.Pkg, "fmt") { + return nil, nil + } + + info := pass.TypesInfo + + // checkAddr reports a diagnostic (and returns true) if e + // is a call of the form fmt.Sprintf("%d:%d", ...). + // The diagnostic includes a fix. + // + // dialCall is non-nil if the Dial call is non-local + // but within the same file. + checkAddr := func(e ast.Expr, dialCall *ast.CallExpr) { + if call, ok := e.(*ast.CallExpr); ok { + obj := typeutil.Callee(info, call) + if analysisinternal.IsFunctionNamed(obj, "fmt", "Sprintf") { + // Examine format string. + formatArg := call.Args[0] + if tv := info.Types[formatArg]; tv.Value != nil { + numericPort := false + format := constant.StringVal(tv.Value) + switch format { + case "%s:%d": + // Have: fmt.Sprintf("%s:%d", host, port) + numericPort = true + + case "%s:%s": + // Have: fmt.Sprintf("%s:%s", host, portStr) + // Keep port string as is. + + default: + return + } + + // Use granular edits to preserve original formatting. + edits := []analysis.TextEdit{ + { + // Replace fmt.Sprintf with net.JoinHostPort. + Pos: call.Fun.Pos(), + End: call.Fun.End(), + NewText: []byte("net.JoinHostPort"), + }, + { + // Delete format string. + Pos: formatArg.Pos(), + End: call.Args[1].Pos(), + }, + } + + // Turn numeric port into a string. + if numericPort { + // port => fmt.Sprintf("%d", port) + // 123 => "123" + port := call.Args[2] + newPort := fmt.Sprintf(`fmt.Sprintf("%%d", %s)`, port) + if port := info.Types[port].Value; port != nil { + if i, ok := constant.Int64Val(port); ok { + newPort = fmt.Sprintf(`"%d"`, i) // numeric constant + } + } + + edits = append(edits, analysis.TextEdit{ + Pos: port.Pos(), + End: port.End(), + NewText: []byte(newPort), + }) + } + + // Refer to Dial call, if not adjacent. + suffix := "" + if dialCall != nil { + suffix = fmt.Sprintf(" (passed to net.Dial at L%d)", + safetoken.StartPosition(pass.Fset, dialCall.Pos()).Line) + } + + pass.Report(analysis.Diagnostic{ + // Highlight the format string. + Pos: formatArg.Pos(), + End: formatArg.End(), + Message: fmt.Sprintf("address format %q does not work with IPv6%s", format, suffix), + SuggestedFixes: []analysis.SuggestedFix{{ + Message: "Replace fmt.Sprintf with net.JoinHostPort", + TextEdits: edits, + }}, + }) + } + } + } + } + + // Check address argument of each call to net.Dial et al. + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + for curCall := range cursor.Root(inspect).Preorder((*ast.CallExpr)(nil)) { + call := curCall.Node().(*ast.CallExpr) + + obj := typeutil.Callee(info, call) + if analysisinternal.IsFunctionNamed(obj, "net", "Dial", "DialTimeout") || + analysisinternal.IsMethodNamed(obj, "net", "Dialer", "Dial") { + + switch address := call.Args[1].(type) { + case *ast.CallExpr: + // net.Dial("tcp", fmt.Sprintf("%s:%d", ...)) + checkAddr(address, nil) + + case *ast.Ident: + // addr := fmt.Sprintf("%s:%d", ...) + // ... + // net.Dial("tcp", addr) + + // Search for decl of addrVar within common ancestor of addrVar and Dial call. + if addrVar, ok := info.Uses[address].(*types.Var); ok { + pos := addrVar.Pos() + for curAncestor := range curCall.Ancestors() { + if curIdent, ok := curAncestor.FindPos(pos, pos); ok { + // curIdent is the declaring ast.Ident of addr. + switch parent := curIdent.Parent().Node().(type) { + case *ast.AssignStmt: + if len(parent.Rhs) == 1 { + // Have: addr := fmt.Sprintf("%s:%d", ...) + checkAddr(parent.Rhs[0], call) + } + + case *ast.ValueSpec: + if len(parent.Values) == 1 { + // Have: var addr = fmt.Sprintf("%s:%d", ...) + checkAddr(parent.Values[0], call) + } + } + break + } + } + } + } + } + } + return nil, nil +} diff --git a/gopls/internal/analysis/hostport/hostport_test.go b/gopls/internal/analysis/hostport/hostport_test.go new file mode 100644 index 00000000000..4e57a43e8d4 --- /dev/null +++ b/gopls/internal/analysis/hostport/hostport_test.go @@ -0,0 +1,17 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package hostport_test + +import ( + "testing" + + "golang.org/x/tools/go/analysis/analysistest" + "golang.org/x/tools/gopls/internal/analysis/hostport" +) + +func Test(t *testing.T) { + testdata := analysistest.TestData() + analysistest.RunWithSuggestedFixes(t, testdata, hostport.Analyzer, "a") +} diff --git a/gopls/internal/analysis/hostport/main.go b/gopls/internal/analysis/hostport/main.go new file mode 100644 index 00000000000..99f7a09ec39 --- /dev/null +++ b/gopls/internal/analysis/hostport/main.go @@ -0,0 +1,14 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build ignore + +package main + +import ( + "golang.org/x/tools/go/analysis/singlechecker" + "golang.org/x/tools/gopls/internal/analysis/hostport" +) + +func main() { singlechecker.Main(hostport.Analyzer) } diff --git a/gopls/internal/analysis/hostport/testdata/src/a/a.go b/gopls/internal/analysis/hostport/testdata/src/a/a.go new file mode 100644 index 00000000000..7d80f80f734 --- /dev/null +++ b/gopls/internal/analysis/hostport/testdata/src/a/a.go @@ -0,0 +1,40 @@ +package a + +import ( + "fmt" + "net" +) + +func direct(host string, port int, portStr string) { + // Dial, directly called with result of Sprintf. + net.Dial("tcp", fmt.Sprintf("%s:%d", host, port)) // want `address format "%s:%d" does not work with IPv6` + + net.Dial("tcp", fmt.Sprintf("%s:%s", host, portStr)) // want `address format "%s:%s" does not work with IPv6` +} + +// port is a constant: +var addr4 = fmt.Sprintf("%s:%d", "localhost", 123) // want `address format "%s:%d" does not work with IPv6 \(passed to net.Dial at L39\)` + +func indirect(host string, port int) { + // Dial, addr is immediately preceding. + { + addr1 := fmt.Sprintf("%s:%d", host, port) // want `address format "%s:%d" does not work with IPv6.*at L22` + net.Dial("tcp", addr1) + } + + // DialTimeout, addr is in ancestor block. + addr2 := fmt.Sprintf("%s:%d", host, port) // want `address format "%s:%d" does not work with IPv6.*at L28` + { + net.DialTimeout("tcp", addr2, 0) + } + + // Dialer.Dial, addr is declared with var. + var dialer net.Dialer + { + var addr3 = fmt.Sprintf("%s:%d", host, port) // want `address format "%s:%d" does not work with IPv6.*at L35` + dialer.Dial("tcp", addr3) + } + + // Dialer.Dial again, addr is declared at package level. + dialer.Dial("tcp", addr4) +} diff --git a/gopls/internal/analysis/hostport/testdata/src/a/a.go.golden b/gopls/internal/analysis/hostport/testdata/src/a/a.go.golden new file mode 100644 index 00000000000..b219224e0aa --- /dev/null +++ b/gopls/internal/analysis/hostport/testdata/src/a/a.go.golden @@ -0,0 +1,40 @@ +package a + +import ( + "fmt" + "net" +) + +func direct(host string, port int, portStr string) { + // Dial, directly called with result of Sprintf. + net.Dial("tcp", net.JoinHostPort(host, fmt.Sprintf("%d", port))) // want `address format "%s:%d" does not work with IPv6` + + net.Dial("tcp", net.JoinHostPort(host, portStr)) // want `address format "%s:%s" does not work with IPv6` +} + +// port is a constant: +var addr4 = net.JoinHostPort("localhost", "123") // want `address format "%s:%d" does not work with IPv6 \(passed to net.Dial at L39\)` + +func indirect(host string, port int) { + // Dial, addr is immediately preceding. + { + addr1 := net.JoinHostPort(host, fmt.Sprintf("%d", port)) // want `address format "%s:%d" does not work with IPv6.*at L22` + net.Dial("tcp", addr1) + } + + // DialTimeout, addr is in ancestor block. + addr2 := net.JoinHostPort(host, fmt.Sprintf("%d", port)) // want `address format "%s:%d" does not work with IPv6.*at L28` + { + net.DialTimeout("tcp", addr2, 0) + } + + // Dialer.Dial, addr is declared with var. + var dialer net.Dialer + { + var addr3 = net.JoinHostPort(host, fmt.Sprintf("%d", port)) // want `address format "%s:%d" does not work with IPv6.*at L35` + dialer.Dial("tcp", addr3) + } + + // Dialer.Dial again, addr is declared at package level. + dialer.Dial("tcp", addr4) +} diff --git a/gopls/internal/analysis/modernize/bloop.go b/gopls/internal/analysis/modernize/bloop.go index 18be946281e..f851a6688e1 100644 --- a/gopls/internal/analysis/modernize/bloop.go +++ b/gopls/internal/analysis/modernize/bloop.go @@ -14,8 +14,8 @@ import ( "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/analysisinternal" "golang.org/x/tools/internal/astutil/cursor" - "golang.org/x/tools/internal/typesinternal" ) // bloop updates benchmarks that use "for range b.N", replacing it @@ -27,7 +27,7 @@ import ( // for i := 0; i < b.N; i++ {} => for b.Loop() {} // for range b.N {} func bloop(pass *analysis.Pass) { - if !_imports(pass.Pkg, "testing") { + if !analysisinternal.Imports(pass.Pkg, "testing") { return } @@ -36,12 +36,12 @@ func bloop(pass *analysis.Pass) { // edits computes the text edits for a matched for/range loop // at the specified cursor. b is the *testing.B value, and // (start, end) is the portion using b.N to delete. - edits := func(cur cursor.Cursor, b ast.Expr, start, end token.Pos) (edits []analysis.TextEdit) { + edits := func(curLoop cursor.Cursor, b ast.Expr, start, end token.Pos) (edits []analysis.TextEdit) { + curFn, _ := enclosingFunc(curLoop) // Within the same function, delete all calls to // b.{Start,Stop,Timer} that precede the loop. filter := []ast.Node{(*ast.ExprStmt)(nil), (*ast.FuncLit)(nil)} - fn, _ := enclosingFunc(cur) - fn.Inspect(filter, func(cur cursor.Cursor, push bool) (descend bool) { + curFn.Inspect(filter, func(cur cursor.Cursor, push bool) (descend bool) { if push { node := cur.Node() if is[*ast.FuncLit](node) { @@ -52,12 +52,8 @@ func bloop(pass *analysis.Pass) { return false // not preceding: stop } if call, ok := stmt.X.(*ast.CallExpr); ok { - fn := typeutil.StaticCallee(info, call) - if fn != nil && - (isMethod(fn, "testing", "B", "StopTimer") || - isMethod(fn, "testing", "B", "StartTimer") || - isMethod(fn, "testing", "B", "ResetTimer")) { - + obj := typeutil.Callee(info, call) + if analysisinternal.IsMethodNamed(obj, "testing", "B", "StopTimer", "StartTimer", "ResetTimer") { // Delete call statement. // TODO(adonovan): delete following newline, or // up to start of next stmt? (May delete a comment.) @@ -75,7 +71,7 @@ func bloop(pass *analysis.Pass) { return append(edits, analysis.TextEdit{ Pos: start, End: end, - NewText: fmt.Appendf(nil, "%s.Loop()", formatNode(pass.Fset, b)), + NewText: fmt.Appendf(nil, "%s.Loop()", analysisinternal.Format(pass.Fset, b)), }) } @@ -93,7 +89,7 @@ func bloop(pass *analysis.Pass) { if cmp, ok := n.Cond.(*ast.BinaryExpr); ok && cmp.Op == token.LSS { if sel, ok := cmp.Y.(*ast.SelectorExpr); ok && sel.Sel.Name == "N" && - isPtrToNamed(info.TypeOf(sel.X), "testing", "B") { + analysisinternal.IsPointerToNamed(info.TypeOf(sel.X), "testing", "B") { delStart, delEnd := n.Cond.Pos(), n.Cond.End() @@ -136,7 +132,7 @@ func bloop(pass *analysis.Pass) { n.Key == nil && n.Value == nil && sel.Sel.Name == "N" && - isPtrToNamed(info.TypeOf(sel.X), "testing", "B") { + analysisinternal.IsPointerToNamed(info.TypeOf(sel.X), "testing", "B") { pass.Report(analysis.Diagnostic{ // Highlight "range b.N". @@ -155,17 +151,6 @@ func bloop(pass *analysis.Pass) { } } -// isPtrToNamed reports whether t is type "*pkgpath.Name". -func isPtrToNamed(t types.Type, pkgpath, name string) bool { - if ptr, ok := t.(*types.Pointer); ok { - named, ok := ptr.Elem().(*types.Named) - return ok && - named.Obj().Name() == name && - named.Obj().Pkg().Path() == pkgpath - } - return false -} - // uses reports whether the subtree cur contains a use of obj. func uses(info *types.Info, cur cursor.Cursor, obj types.Object) bool { for curId := range cur.Preorder((*ast.Ident)(nil)) { @@ -176,34 +161,11 @@ func uses(info *types.Info, cur cursor.Cursor, obj types.Object) bool { return false } -// isMethod reports whether fn is pkgpath.(T).Name. -func isMethod(fn *types.Func, pkgpath, T, name string) bool { - if recv := fn.Signature().Recv(); recv != nil { - _, recvName := typesinternal.ReceiverNamed(recv) - return recvName != nil && - isPackageLevel(recvName.Obj(), pkgpath, T) && - fn.Name() == name - } - return false -} - // enclosingFunc returns the cursor for the innermost Func{Decl,Lit} -// that encloses (or is) c, if any. -// -// TODO(adonovan): consider adding: -// -// func (Cursor) AnyEnclosing(filter ...ast.Node) (Cursor bool) -// func (Cursor) Enclosing[N ast.Node]() (Cursor, bool) -// -// See comments at [cursor.Cursor.Stack]. +// that encloses c, if any. func enclosingFunc(c cursor.Cursor) (cursor.Cursor, bool) { - for { - switch c.Node().(type) { - case *ast.FuncLit, *ast.FuncDecl: - return c, true - case nil: - return cursor.Cursor{}, false - } - c = c.Parent() + for curAncestor := range c.Ancestors((*ast.FuncDecl)(nil), (*ast.FuncLit)(nil)) { + return curAncestor, true } + return cursor.Cursor{}, false } diff --git a/gopls/internal/analysis/modernize/main.go b/gopls/internal/analysis/modernize/cmd/modernize/main.go similarity index 96% rename from gopls/internal/analysis/modernize/main.go rename to gopls/internal/analysis/modernize/cmd/modernize/main.go index e1276e333ae..1e8a4b95682 100644 --- a/gopls/internal/analysis/modernize/main.go +++ b/gopls/internal/analysis/modernize/cmd/modernize/main.go @@ -2,8 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build ignore - // The modernize command suggests (or, with -fix, applies) fixes that // clarify Go code by using more modern features. package main diff --git a/gopls/internal/analysis/modernize/doc.go b/gopls/internal/analysis/modernize/doc.go index 379e29b9b0b..15aeab64d8d 100644 --- a/gopls/internal/analysis/modernize/doc.go +++ b/gopls/internal/analysis/modernize/doc.go @@ -23,4 +23,13 @@ // from the maps package, added in go1.21; // - replacing []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...), // added in go1.19; +// - replacing uses of context.WithCancel in tests with t.Context, added in +// go1.24; +// - replacing omitempty by omitzero on structs, added in go1.24; +// - replacing append(s[:i], s[i+1]...) by slices.Delete(s, i, i+1), +// added in go1.21 +// - replacing a 3-clause for i := 0; i < n; i++ {} loop by +// for i := range n {}, added in go1.22; +// - replacing Split in "for range strings.Split(...)" by go1.24's +// more efficient SplitSeq; package modernize diff --git a/gopls/internal/analysis/modernize/fmtappendf.go b/gopls/internal/analysis/modernize/fmtappendf.go index dd1013e511a..8575827aa3e 100644 --- a/gopls/internal/analysis/modernize/fmtappendf.go +++ b/gopls/internal/analysis/modernize/fmtappendf.go @@ -7,10 +7,13 @@ package modernize import ( "go/ast" "go/types" + "strings" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/analysisinternal" ) // The fmtappend function replaces []byte(fmt.Sprintf(...)) by @@ -25,17 +28,20 @@ func fmtappendf(pass *analysis.Pass) { if tv.IsType() && types.Identical(tv.Type, byteSliceType) { call, ok := conv.Args[0].(*ast.CallExpr) if ok { - var appendText = "" - var id *ast.Ident - if id = isQualifiedIdent(info, call.Fun, "fmt", "Sprintf"); id != nil { - appendText = "Appendf" - } else if id = isQualifiedIdent(info, call.Fun, "fmt", "Sprint"); id != nil { - appendText = "Append" - } else if id = isQualifiedIdent(info, call.Fun, "fmt", "Sprintln"); id != nil { - appendText = "Appendln" - } else { + obj := typeutil.Callee(info, call) + if !analysisinternal.IsFunctionNamed(obj, "fmt", "Sprintf", "Sprintln", "Sprint") { continue } + + // Find "Sprint" identifier. + var id *ast.Ident + switch e := ast.Unparen(call.Fun).(type) { + case *ast.SelectorExpr: + id = e.Sel // "fmt.Sprint" + case *ast.Ident: + id = e // "Sprint" after `import . "fmt"` + } + pass.Report(analysis.Diagnostic{ Pos: conv.Pos(), End: conv.End(), @@ -57,7 +63,7 @@ func fmtappendf(pass *analysis.Pass) { { Pos: id.Pos(), End: id.End(), - NewText: []byte(appendText), // replace Sprint with Append + NewText: []byte(strings.Replace(obj.Name(), "Sprint", "Append", 1)), }, { Pos: call.Lparen + 1, diff --git a/gopls/internal/analysis/modernize/maps.go b/gopls/internal/analysis/modernize/maps.go index 071d074533a..c93899621ef 100644 --- a/gopls/internal/analysis/modernize/maps.go +++ b/gopls/internal/analysis/modernize/maps.go @@ -126,31 +126,36 @@ func mapsloop(pass *analysis.Pass) { } } - // Choose function, report diagnostic, and suggest fix. + // Choose function. + var funcName string + if mrhs != nil { + funcName = cond(xmap, "Clone", "Collect") + } else { + funcName = cond(xmap, "Copy", "Insert") + } + + // Report diagnostic, and suggest fix. rng := curRange.Node() - mapsName, importEdits := analysisinternal.AddImport(info, file, rng.Pos(), "maps", "maps") + _, prefix, importEdits := analysisinternal.AddImport(info, file, "maps", "maps", funcName, rng.Pos()) var ( - funcName string newText []byte start, end token.Pos ) if mrhs != nil { // Replace RHS of preceding m=... assignment (and loop) with expression. start, end = mrhs.Pos(), rng.End() - funcName = cond(xmap, "Clone", "Collect") - newText = fmt.Appendf(nil, "%s.%s(%s)", - mapsName, + newText = fmt.Appendf(nil, "%s%s(%s)", + prefix, funcName, - formatNode(pass.Fset, x)) + analysisinternal.Format(pass.Fset, x)) } else { // Replace loop with call statement. start, end = rng.Pos(), rng.End() - funcName = cond(xmap, "Copy", "Insert") - newText = fmt.Appendf(nil, "%s.%s(%s, %s)", - mapsName, + newText = fmt.Appendf(nil, "%s%s(%s, %s)", + prefix, funcName, - formatNode(pass.Fset, m), - formatNode(pass.Fset, x)) + analysisinternal.Format(pass.Fset, m), + analysisinternal.Format(pass.Fset, x)) } pass.Report(analysis.Diagnostic{ Pos: assign.Lhs[0].Pos(), @@ -177,16 +182,22 @@ func mapsloop(pass *analysis.Pass) { for curRange := range curFile.Preorder((*ast.RangeStmt)(nil)) { rng := curRange.Node().(*ast.RangeStmt) - if rng.Tok == token.DEFINE && rng.Key != nil && rng.Value != nil && len(rng.Body.List) == 1 { - // Have: for k, v := range x { S } - if assign, ok := rng.Body.List[0].(*ast.AssignStmt); ok && len(assign.Lhs) == 1 { - if index, ok := assign.Lhs[0].(*ast.IndexExpr); ok && - equalSyntax(rng.Key, index.Index) && - equalSyntax(rng.Value, assign.Rhs[0]) { - - // Have: for k, v := range x { m[k] = v } - check(file, curRange, assign, index.X, rng.X) - } + if rng.Tok == token.DEFINE && + rng.Key != nil && + rng.Value != nil && + isAssignBlock(rng.Body) { + // Have: for k, v := range x { lhs = rhs } + + assign := rng.Body.List[0].(*ast.AssignStmt) + if index, ok := assign.Lhs[0].(*ast.IndexExpr); ok && + equalSyntax(rng.Key, index.Index) && + equalSyntax(rng.Value, assign.Rhs[0]) && + is[*types.Map](typeparams.CoreType(info.TypeOf(index.X))) && + types.Identical(info.TypeOf(index), info.TypeOf(rng.Value)) { // m[k], v + + // Have: for k, v := range x { m[k] = v } + // where there is no implicit conversion. + check(file, curRange, assign, index.X, rng.X) } } } @@ -197,8 +208,8 @@ func mapsloop(pass *analysis.Pass) { // iter.Seq[K, V] and returns K and V if so. func assignableToIterSeq2(t types.Type) (k, v types.Type, ok bool) { // The only named type assignable to iter.Seq2 is iter.Seq2. - if named, isNamed := t.(*types.Named); isNamed { - if !isPackageLevel(named.Obj(), "iter", "Seq2") { + if is[*types.Named](t) { + if !analysisinternal.IsTypeNamed(t, "iter", "Seq2") { return } t = t.Underlying() diff --git a/gopls/internal/analysis/modernize/minmax.go b/gopls/internal/analysis/modernize/minmax.go index 06330657876..26b12341cad 100644 --- a/gopls/internal/analysis/modernize/minmax.go +++ b/gopls/internal/analysis/modernize/minmax.go @@ -13,6 +13,7 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/analysisinternal" "golang.org/x/tools/internal/astutil/cursor" ) @@ -85,31 +86,35 @@ func minmax(pass *analysis.Pass) { Pos: ifStmt.Pos(), End: ifStmt.End(), NewText: fmt.Appendf(nil, "%s = %s(%s, %s)", - formatNode(pass.Fset, lhs), + analysisinternal.Format(pass.Fset, lhs), sym, - formatNode(pass.Fset, a), - formatNode(pass.Fset, b)), + analysisinternal.Format(pass.Fset, a), + analysisinternal.Format(pass.Fset, b)), }}, }}, }) } - } else if prev, ok := curIfStmt.PrevSibling(); ok && is[*ast.AssignStmt](prev.Node()) { + } else if prev, ok := curIfStmt.PrevSibling(); ok && isSimpleAssign(prev.Node()) { fassign := prev.Node().(*ast.AssignStmt) - // Have: lhs2 = rhs2; if a < b { lhs = rhs } + // Have: lhs0 = rhs0; if a < b { lhs = rhs } + // // For pattern 2, check that - // - lhs = lhs2 - // - {rhs,rhs2} = {a,b}, but allow lhs2 to - // stand for rhs2. - // TODO(adonovan): accept "var lhs2 = rhs2" form too. - lhs2 := fassign.Lhs[0] - rhs2 := fassign.Rhs[0] - - if equalSyntax(lhs, lhs2) { - if equalSyntax(rhs, a) && (equalSyntax(rhs2, b) || equalSyntax(lhs2, b)) { + // - lhs = lhs0 + // - {a,b} = {rhs,rhs0} or {rhs,lhs0} + // The replacement must use rhs0 not lhs0 though. + // For example, we accept this variant: + // lhs = x; if lhs < y { lhs = y } => lhs = min(x, y), not min(lhs, y) + // + // TODO(adonovan): accept "var lhs0 = rhs0" form too. + lhs0 := fassign.Lhs[0] + rhs0 := fassign.Rhs[0] + + if equalSyntax(lhs, lhs0) { + if equalSyntax(rhs, a) && (equalSyntax(rhs0, b) || equalSyntax(lhs0, b)) { sign = +sign - } else if (equalSyntax(rhs2, a) || equalSyntax(lhs2, a)) && equalSyntax(rhs, b) { + } else if (equalSyntax(rhs0, a) || equalSyntax(lhs0, a)) && equalSyntax(rhs, b) { sign = -sign } else { return @@ -120,6 +125,15 @@ func minmax(pass *analysis.Pass) { return // min/max function is shadowed } + // Permit lhs0 to stand for rhs0 in the matching, + // but don't actually reduce to lhs0 = min(lhs0, rhs) + // since the "=" could be a ":=". Use min(rhs0, rhs). + if equalSyntax(lhs0, a) { + a = rhs0 + } else if equalSyntax(lhs0, b) { + b = rhs0 + } + // pattern 2 pass.Report(analysis.Diagnostic{ // Highlight the condition a < b. @@ -130,13 +144,13 @@ func minmax(pass *analysis.Pass) { SuggestedFixes: []analysis.SuggestedFix{{ Message: fmt.Sprintf("Replace if/else with %s", sym), TextEdits: []analysis.TextEdit{{ - // Replace rhs2 and IfStmt with min(a, b) - Pos: rhs2.Pos(), + // Replace rhs0 and IfStmt with min(a, b) + Pos: rhs0.Pos(), End: ifStmt.End(), NewText: fmt.Appendf(nil, "%s(%s, %s)", sym, - formatNode(pass.Fset, a), - formatNode(pass.Fset, b)), + analysisinternal.Format(pass.Fset, a), + analysisinternal.Format(pass.Fset, b)), }}, }}, }) @@ -179,8 +193,17 @@ func isAssignBlock(b *ast.BlockStmt) bool { if len(b.List) != 1 { return false } - assign, ok := b.List[0].(*ast.AssignStmt) - return ok && assign.Tok == token.ASSIGN && len(assign.Lhs) == 1 && len(assign.Rhs) == 1 + // Inv: the sole statement cannot be { lhs := rhs }. + return isSimpleAssign(b.List[0]) +} + +// isSimpleAssign reports whether n has the form "lhs = rhs" or "lhs := rhs". +func isSimpleAssign(n ast.Node) bool { + assign, ok := n.(*ast.AssignStmt) + return ok && + (assign.Tok == token.ASSIGN || assign.Tok == token.DEFINE) && + len(assign.Lhs) == 1 && + len(assign.Rhs) == 1 } // -- utils -- diff --git a/gopls/internal/analysis/modernize/modernize.go b/gopls/internal/analysis/modernize/modernize.go index a117afa994c..0f7b58eed37 100644 --- a/gopls/internal/analysis/modernize/modernize.go +++ b/gopls/internal/analysis/modernize/modernize.go @@ -5,13 +5,13 @@ package modernize import ( - "bytes" _ "embed" "go/ast" "go/format" "go/token" "go/types" "iter" + "regexp" "strings" "golang.org/x/tools/go/analysis" @@ -50,6 +50,9 @@ func run(pass *analysis.Pass) (any, error) { } report := pass.Report pass.Report = func(diag analysis.Diagnostic) { + if diag.Category == "" { + panic("Diagnostic.Category is unset") + } if _, ok := generated[pass.Fset.File(diag.Pos)]; ok { return // skip checking if it's generated code } @@ -63,7 +66,13 @@ func run(pass *analysis.Pass) (any, error) { fmtappendf(pass) mapsloop(pass) minmax(pass) + omitzero(pass) + rangeint(pass) + slicescontains(pass) + slicesdelete(pass) + splitseq(pass) sortslice(pass) + testingContext(pass) // TODO(adonovan): // - more modernizers here; see #70815. @@ -79,29 +88,12 @@ func run(pass *analysis.Pass) (any, error) { // -- helpers -- -// TODO(adonovan): factor with analysisutil.Imports. -func _imports(pkg *types.Package, path string) bool { - for _, imp := range pkg.Imports() { - if imp.Path() == path { - return true - } - } - return false -} - // equalSyntax reports whether x and y are syntactically equal (ignoring comments). func equalSyntax(x, y ast.Expr) bool { sameName := func(x, y *ast.Ident) bool { return x.Name == y.Name } return astutil.Equal(x, y, sameName) } -// formatNode formats n. -func formatNode(fset *token.FileSet, n ast.Node) []byte { - var buf bytes.Buffer - format.Node(&buf, fset, n) // ignore errors - return buf.Bytes() -} - // formatExprs formats a comma-separated list of expressions. func formatExprs(fset *token.FileSet, exprs []ast.Expr) string { var buf strings.Builder @@ -120,15 +112,6 @@ func isZeroLiteral(e ast.Expr) bool { return ok && lit.Kind == token.INT && lit.Value == "0" } -// isPackageLevel reports whether obj is the package-level symbol pkg.Name. -func isPackageLevel(obj types.Object, pkgpath, name string) bool { - pkg := obj.Pkg() - return pkg != nil && - obj.Parent() == pkg.Scope() && - obj.Pkg().Path() == pkgpath && - obj.Name() == name -} - // filesUsing returns a cursor for each *ast.File in the inspector // that uses at least the specified version of Go (e.g. "go1.24"). func filesUsing(inspect *inspector.Inspector, info *types.Info, version string) iter.Seq[cursor.Cursor] { @@ -143,10 +126,14 @@ func filesUsing(inspect *inspector.Inspector, info *types.Info, version string) } var ( - builtinAny = types.Universe.Lookup("any") - builtinAppend = types.Universe.Lookup("append") - builtinBool = types.Universe.Lookup("bool") - builtinMake = types.Universe.Lookup("make") - builtinNil = types.Universe.Lookup("nil") - byteSliceType = types.NewSlice(types.Typ[types.Byte]) + builtinAny = types.Universe.Lookup("any") + builtinAppend = types.Universe.Lookup("append") + builtinBool = types.Universe.Lookup("bool") + builtinFalse = types.Universe.Lookup("false") + builtinLen = types.Universe.Lookup("len") + builtinMake = types.Universe.Lookup("make") + builtinNil = types.Universe.Lookup("nil") + builtinTrue = types.Universe.Lookup("true") + byteSliceType = types.NewSlice(types.Typ[types.Byte]) + omitemptyRegex = regexp.MustCompile(`(?:^json| json):"[^"]*(,omitempty)(?:"|,[^"]*")\s?`) ) diff --git a/gopls/internal/analysis/modernize/modernize_test.go b/gopls/internal/analysis/modernize/modernize_test.go index 218c2238762..6662914b28d 100644 --- a/gopls/internal/analysis/modernize/modernize_test.go +++ b/gopls/internal/analysis/modernize/modernize_test.go @@ -19,6 +19,12 @@ func Test(t *testing.T) { "fmtappendf", "mapsloop", "minmax", + "omitzero", + "rangeint", + "slicescontains", + "slicesdelete", + "splitseq", "sortslice", + "testingcontext", ) } diff --git a/gopls/internal/analysis/modernize/omitzero.go b/gopls/internal/analysis/modernize/omitzero.go new file mode 100644 index 00000000000..02b7e3fbcd0 --- /dev/null +++ b/gopls/internal/analysis/modernize/omitzero.go @@ -0,0 +1,104 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package modernize + +import ( + "go/ast" + "go/types" + "reflect" + "strconv" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/astutil" +) + +func checkOmitEmptyField(pass *analysis.Pass, info *types.Info, curField *ast.Field) { + typ := info.TypeOf(curField.Type) + _, ok := typ.Underlying().(*types.Struct) + if !ok { + // Not a struct + return + } + tag := curField.Tag + if tag == nil { + // No tag to check + return + } + // The omitempty tag may be used by other packages besides json, but we should only modify its use with json + tagconv, _ := strconv.Unquote(tag.Value) + match := omitemptyRegex.FindStringSubmatchIndex(tagconv) + if match == nil { + // No omitempty in json tag + return + } + omitEmptyPos, omitEmptyEnd, err := astutil.RangeInStringLiteral(curField.Tag, match[2], match[3]) + if err != nil { + return + } + removePos, removeEnd := omitEmptyPos, omitEmptyEnd + + jsonTag := reflect.StructTag(tagconv).Get("json") + if jsonTag == ",omitempty" { + // Remove the entire struct tag if json is the only package used + if match[1]-match[0] == len(tagconv) { + removePos = curField.Tag.Pos() + removeEnd = curField.Tag.End() + } else { + // Remove the json tag if omitempty is the only field + removePos, err = astutil.PosInStringLiteral(curField.Tag, match[0]) + if err != nil { + return + } + removeEnd, err = astutil.PosInStringLiteral(curField.Tag, match[1]) + if err != nil { + return + } + } + } + pass.Report(analysis.Diagnostic{ + Pos: curField.Tag.Pos(), + End: curField.Tag.End(), + Category: "omitzero", + Message: "Omitempty has no effect on nested struct fields", + SuggestedFixes: []analysis.SuggestedFix{ + { + Message: "Remove redundant omitempty tag", + TextEdits: []analysis.TextEdit{ + { + Pos: removePos, + End: removeEnd, + }, + }, + }, + { + Message: "Replace omitempty with omitzero (behavior change)", + TextEdits: []analysis.TextEdit{ + { + Pos: omitEmptyPos, + End: omitEmptyEnd, + NewText: []byte(",omitzero"), + }, + }, + }, + }}) +} + +// The omitzero pass searches for instances of "omitempty" in a json field tag on a +// struct. Since "omitempty" does not have any effect when applied to a struct field, +// it suggests either deleting "omitempty" or replacing it with "omitzero", which +// correctly excludes structs from a json encoding. +func omitzero(pass *analysis.Pass) { + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + info := pass.TypesInfo + for curFile := range filesUsing(inspect, info, "go1.24") { + for curStruct := range curFile.Preorder((*ast.StructType)(nil)) { + for _, curField := range curStruct.Node().(*ast.StructType).Fields.List { + checkOmitEmptyField(pass, info, curField) + } + } + } +} diff --git a/gopls/internal/analysis/modernize/rangeint.go b/gopls/internal/analysis/modernize/rangeint.go new file mode 100644 index 00000000000..c36203cef06 --- /dev/null +++ b/gopls/internal/analysis/modernize/rangeint.go @@ -0,0 +1,163 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package modernize + +import ( + "fmt" + "go/ast" + "go/token" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/astutil/cursor" + "golang.org/x/tools/internal/astutil/edge" +) + +// rangeint offers a fix to replace a 3-clause 'for' loop: +// +// for i := 0; i < limit; i++ {} +// +// by a range loop with an integer operand: +// +// for i := range limit {} +// +// Variants: +// - The ':=' may be replaced by '='. +// - The fix may remove "i :=" if it would become unused. +// +// Restrictions: +// - The variable i must not be assigned or address-taken within the +// loop, because a "for range int" loop does not respect assignments +// to the loop index. +// - The limit must not be b.N, to avoid redundancy with bloop's fixes. +// +// Caveats: +// - The fix will cause the limit expression to be evaluated exactly +// once, instead of once per iteration. The limit may be a function call +// (e.g. seq.Len()). The fix may change the cardinality of side effects. +func rangeint(pass *analysis.Pass) { + info := pass.TypesInfo + + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + for curFile := range filesUsing(inspect, info, "go1.22") { + nextLoop: + for curLoop := range curFile.Preorder((*ast.ForStmt)(nil)) { + loop := curLoop.Node().(*ast.ForStmt) + if init, ok := loop.Init.(*ast.AssignStmt); ok && + isSimpleAssign(init) && + is[*ast.Ident](init.Lhs[0]) && + isZeroLiteral(init.Rhs[0]) { + // Have: for i = 0; ... (or i := 0) + index := init.Lhs[0].(*ast.Ident) + + if compare, ok := loop.Cond.(*ast.BinaryExpr); ok && + compare.Op == token.LSS && + equalSyntax(compare.X, init.Lhs[0]) { + // Have: for i = 0; i < limit; ... {} + limit := compare.Y + + // Skip loops up to b.N in benchmarks; see [bloop]. + if sel, ok := limit.(*ast.SelectorExpr); ok && + sel.Sel.Name == "N" && + analysisinternal.IsPointerToNamed(info.TypeOf(sel.X), "testing", "B") { + continue // skip b.N + } + + if inc, ok := loop.Post.(*ast.IncDecStmt); ok && + inc.Tok == token.INC && + equalSyntax(compare.X, inc.X) { + // Have: for i = 0; i < limit; i++ {} + + // Find references to i within the loop body. + v := info.Defs[index] + used := false + for curId := range curLoop.Child(loop.Body).Preorder((*ast.Ident)(nil)) { + id := curId.Node().(*ast.Ident) + if info.Uses[id] == v { + used = true + + // Reject if any is an l-value (assigned or address-taken): + // a "for range int" loop does not respect assignments to + // the loop variable. + if isScalarLvalue(curId) { + continue nextLoop + } + } + } + + // If i is no longer used, delete "i := ". + var edits []analysis.TextEdit + if !used && init.Tok == token.DEFINE { + edits = append(edits, analysis.TextEdit{ + Pos: index.Pos(), + End: init.Rhs[0].Pos(), + }) + } + + pass.Report(analysis.Diagnostic{ + Pos: init.Pos(), + End: inc.End(), + Category: "rangeint", + Message: "for loop can be modernized using range over int", + SuggestedFixes: []analysis.SuggestedFix{{ + Message: fmt.Sprintf("Replace for loop with range %s", + analysisinternal.Format(pass.Fset, limit)), + TextEdits: append(edits, []analysis.TextEdit{ + // for i := 0; i < limit; i++ {} + // ----- --- + // ------- + // for i := range limit {} + { + Pos: init.Rhs[0].Pos(), + End: limit.Pos(), + NewText: []byte("range "), + }, + { + Pos: limit.End(), + End: inc.End(), + }, + }...), + }}, + }) + } + } + } + } + } +} + +// isScalarLvalue reports whether the specified identifier is +// address-taken or appears on the left side of an assignment. +// +// This function is valid only for scalars (x = ...), +// not for aggregates (x.a[i] = ...) +func isScalarLvalue(curId cursor.Cursor) bool { + // Unfortunately we can't simply use info.Types[e].Assignable() + // as it is always true for a variable even when that variable is + // used only as an r-value. So we must inspect enclosing syntax. + + cur := curId + + // Strip enclosing parens. + ek, _ := cur.Edge() + for ek == edge.ParenExpr_X { + cur = cur.Parent() + ek, _ = cur.Edge() + } + + switch ek { + case edge.AssignStmt_Lhs: + return true // i = j + case edge.IncDecStmt_X: + return true // i++, i-- + case edge.UnaryExpr_X: + if cur.Parent().Node().(*ast.UnaryExpr).Op == token.AND { + return true // &i + } + } + return false +} diff --git a/gopls/internal/analysis/modernize/slices.go b/gopls/internal/analysis/modernize/slices.go index 695ade3f652..bdab9dea649 100644 --- a/gopls/internal/analysis/modernize/slices.go +++ b/gopls/internal/analysis/modernize/slices.go @@ -5,6 +5,7 @@ package modernize // This file defines modernizers that use the "slices" package. +// TODO(adonovan): actually let's split them up and rename this file. import ( "fmt" @@ -15,6 +16,7 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/go/types/typeutil" "golang.org/x/tools/internal/analysisinternal" ) @@ -50,18 +52,21 @@ func appendclipped(pass *analysis.Pass) { // Only appends whose base is a clipped slice can be simplified: // We must conservatively assume an append to an unclipped slice // such as append(y[:0], x...) is intended to have effects on y. - clipped, empty := isClippedSlice(info, base) - if !clipped { + clipped, empty := clippedSlice(info, base) + if clipped == nil { return } // If the (clipped) base is empty, it may be safely ignored. - // Otherwise treat it as just another arg (the first) to Concat. + // Otherwise treat it (or its unclipped subexpression, if possible) + // as just another arg (the first) to Concat. if !empty { - sliceArgs = append(sliceArgs, base) + sliceArgs = append(sliceArgs, clipped) } slices.Reverse(sliceArgs) + // TODO(adonovan): simplify sliceArgs[0] further: slices.Clone(s) -> s + // Concat of a single (non-trivial) slice degenerates to Clone. if len(sliceArgs) == 1 { s := sliceArgs[0] @@ -69,8 +74,8 @@ func appendclipped(pass *analysis.Pass) { // Special case for common but redundant clone of os.Environ(). // append(zerocap, os.Environ()...) -> os.Environ() if scall, ok := s.(*ast.CallExpr); ok { - if id := isQualifiedIdent(info, scall.Fun, "os", "Environ"); id != nil { - + obj := typeutil.Callee(info, scall) + if analysisinternal.IsFunctionNamed(obj, "os", "Environ") { pass.Report(analysis.Diagnostic{ Pos: call.Pos(), End: call.End(), @@ -81,7 +86,7 @@ func appendclipped(pass *analysis.Pass) { TextEdits: []analysis.TextEdit{{ Pos: call.Pos(), End: call.End(), - NewText: formatNode(pass.Fset, s), + NewText: []byte(analysisinternal.Format(pass.Fset, s)), }}, }}, }) @@ -90,7 +95,7 @@ func appendclipped(pass *analysis.Pass) { } // append(zerocap, s...) -> slices.Clone(s) - slicesName, importEdits := analysisinternal.AddImport(info, file, call.Pos(), "slices", "slices") + _, prefix, importEdits := analysisinternal.AddImport(info, file, "slices", "slices", "Clone", call.Pos()) pass.Report(analysis.Diagnostic{ Pos: call.Pos(), End: call.End(), @@ -101,7 +106,7 @@ func appendclipped(pass *analysis.Pass) { TextEdits: append(importEdits, []analysis.TextEdit{{ Pos: call.Pos(), End: call.End(), - NewText: []byte(fmt.Sprintf("%s.Clone(%s)", slicesName, formatNode(pass.Fset, s))), + NewText: fmt.Appendf(nil, "%sClone(%s)", prefix, analysisinternal.Format(pass.Fset, s)), }}...), }}, }) @@ -109,12 +114,7 @@ func appendclipped(pass *analysis.Pass) { } // append(append(append(base, a...), b..., c...) -> slices.Concat(base, a, b, c) - // - // TODO(adonovan): simplify sliceArgs[0] further: - // - slices.Clone(s) -> s - // - s[:len(s):len(s)] -> s - // - slices.Clip(s) -> s - slicesName, importEdits := analysisinternal.AddImport(info, file, call.Pos(), "slices", "slices") + _, prefix, importEdits := analysisinternal.AddImport(info, file, "slices", "slices", "Concat", call.Pos()) pass.Report(analysis.Diagnostic{ Pos: call.Pos(), End: call.End(), @@ -125,7 +125,7 @@ func appendclipped(pass *analysis.Pass) { TextEdits: append(importEdits, []analysis.TextEdit{{ Pos: call.Pos(), End: call.End(), - NewText: []byte(fmt.Sprintf("%s.Concat(%s)", slicesName, formatExprs(pass.Fset, sliceArgs))), + NewText: fmt.Appendf(nil, "%sConcat(%s)", prefix, formatExprs(pass.Fset, sliceArgs)), }}...), }}, }) @@ -170,25 +170,36 @@ func appendclipped(pass *analysis.Pass) { } } -// isClippedSlice reports whether e denotes a slice that is definitely -// clipped, that is, its len(s)==cap(s). +// clippedSlice returns res != nil if e denotes a slice that is +// definitely clipped, that is, its len(s)==cap(s). +// +// The value of res is either the same as e or is a subexpression of e +// that denotes the same slice but without the clipping operation. // -// In addition, it reports whether the slice is definitely empty. +// In addition, it reports whether the slice is definitely empty, // // Examples of clipped slices: // // x[:0:0] (empty) // []T(nil) (empty) // Slice{} (empty) -// x[:len(x):len(x)] (nonempty) +// x[:len(x):len(x)] (nonempty) res=x // x[:k:k] (nonempty) -// slices.Clip(x) (nonempty) -func isClippedSlice(info *types.Info, e ast.Expr) (clipped, empty bool) { +// slices.Clip(x) (nonempty) res=x +func clippedSlice(info *types.Info, e ast.Expr) (res ast.Expr, empty bool) { switch e := e.(type) { case *ast.SliceExpr: - // x[:0:0], x[:len(x):len(x)], x[:k:k], x[:0] - clipped = e.Slice3 && e.High != nil && e.Max != nil && equalSyntax(e.High, e.Max) // x[:k:k] - empty = e.High != nil && isZeroLiteral(e.High) // x[:0:*] + // x[:0:0], x[:len(x):len(x)], x[:k:k] + if e.Slice3 && e.High != nil && e.Max != nil && equalSyntax(e.High, e.Max) { // x[:k:k] + res = e + empty = isZeroLiteral(e.High) // x[:0:0] + if call, ok := e.High.(*ast.CallExpr); ok && + typeutil.Callee(info, call) == builtinLen && + equalSyntax(call.Args[0], e.X) { + res = e.X // x[:len(x):len(x)] -> x + } + return + } return case *ast.CallExpr: @@ -196,19 +207,20 @@ func isClippedSlice(info *types.Info, e ast.Expr) (clipped, empty bool) { if info.Types[e.Fun].IsType() && is[*ast.Ident](e.Args[0]) && info.Uses[e.Args[0].(*ast.Ident)] == builtinNil { - return true, true + return e, true } // slices.Clip(x)? - if id := isQualifiedIdent(info, e.Fun, "slices", "Clip"); id != nil { - return true, false + obj := typeutil.Callee(info, e) + if analysisinternal.IsFunctionNamed(obj, "slices", "Clip") { + return e.Args[0], false // slices.Clip(x) -> x } case *ast.CompositeLit: // Slice{}? if len(e.Elts) == 0 { - return true, true + return e, true } } - return false, false + return nil, false } diff --git a/gopls/internal/analysis/modernize/slicescontains.go b/gopls/internal/analysis/modernize/slicescontains.go new file mode 100644 index 00000000000..09642448bb5 --- /dev/null +++ b/gopls/internal/analysis/modernize/slicescontains.go @@ -0,0 +1,390 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package modernize + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/astutil/cursor" + "golang.org/x/tools/internal/typeparams" +) + +// The slicescontains pass identifies loops that can be replaced by a +// call to slices.Contains{,Func}. For example: +// +// for i, elem := range s { +// if elem == needle { +// ... +// break +// } +// } +// +// => +// +// if slices.Contains(s, needle) { ... } +// +// Variants: +// - if the if-condition is f(elem), the replacement +// uses slices.ContainsFunc(s, f). +// - if the if-body is "return true" and the fallthrough +// statement is "return false" (or vice versa), the +// loop becomes "return [!]slices.Contains(...)". +// - if the if-body is "found = true" and the previous +// statement is "found = false" (or vice versa), the +// loop becomes "found = [!]slices.Contains(...)". +// +// It may change cardinality of effects of the "needle" expression. +// (Mostly this appears to be a desirable optimization, avoiding +// redundantly repeated evaluation.) +func slicescontains(pass *analysis.Pass) { + // Don't modify the slices package itself. + if pass.Pkg.Path() == "slices" { + return + } + + info := pass.TypesInfo + + // check is called for each RangeStmt of this form: + // for i, elem := range s { if cond { ... } } + check := func(file *ast.File, curRange cursor.Cursor) { + rng := curRange.Node().(*ast.RangeStmt) + ifStmt := rng.Body.List[0].(*ast.IfStmt) + + // isSliceElem reports whether e denotes the + // current slice element (elem or s[i]). + isSliceElem := func(e ast.Expr) bool { + if rng.Value != nil && equalSyntax(e, rng.Value) { + return true // "elem" + } + if x, ok := e.(*ast.IndexExpr); ok && + equalSyntax(x.X, rng.X) && + equalSyntax(x.Index, rng.Key) { + return true // "s[i]" + } + return false + } + + // Examine the condition for one of these forms: + // + // - if elem or s[i] == needle { ... } => Contains + // - if predicate(s[i] or elem) { ... } => ContainsFunc + var ( + funcName string // "Contains" or "ContainsFunc" + arg2 ast.Expr // second argument to func (needle or predicate) + ) + switch cond := ifStmt.Cond.(type) { + case *ast.BinaryExpr: + if cond.Op == token.EQL { + var elem ast.Expr + if isSliceElem(cond.X) { + funcName = "Contains" + elem = cond.X + arg2 = cond.Y // "if elem == needle" + } else if isSliceElem(cond.Y) { + funcName = "Contains" + elem = cond.Y + arg2 = cond.X // "if needle == elem" + } + + // Reject if elem and needle have different types. + if elem != nil { + tElem := info.TypeOf(elem) + tNeedle := info.TypeOf(arg2) + if !types.Identical(tElem, tNeedle) { + // Avoid ill-typed slices.Contains([]error, any). + if !types.AssignableTo(tNeedle, tElem) { + return + } + // TODO(adonovan): relax this check to allow + // slices.Contains([]error, error(any)), + // inserting an explicit widening conversion + // around the needle. + return + } + } + } + + case *ast.CallExpr: + if len(cond.Args) == 1 && + isSliceElem(cond.Args[0]) && + typeutil.Callee(info, cond) != nil { // not a conversion + + funcName = "ContainsFunc" + arg2 = cond.Fun // "if predicate(elem)" + } + } + if funcName == "" { + return // not a candidate for Contains{,Func} + } + + // body is the "true" body. + body := ifStmt.Body + if len(body.List) == 0 { + // (We could perhaps delete the loop entirely.) + return + } + + // Reject if the body, needle or predicate references either range variable. + usesRangeVar := func(n ast.Node) bool { + cur, ok := curRange.FindNode(n) + if !ok { + panic(fmt.Sprintf("FindNode(%T) failed", n)) + } + return uses(info, cur, info.Defs[rng.Key.(*ast.Ident)]) || + rng.Value != nil && uses(info, cur, info.Defs[rng.Value.(*ast.Ident)]) + } + if usesRangeVar(body) { + // Body uses range var "i" or "elem". + // + // (The check for "i" could be relaxed when we + // generalize this to support slices.Index; + // and the check for "elem" could be relaxed + // if "elem" can safely be replaced in the + // body by "needle".) + return + } + if usesRangeVar(arg2) { + return + } + + // Prepare slices.Contains{,Func} call. + _, prefix, importEdits := analysisinternal.AddImport(info, file, "slices", "slices", funcName, rng.Pos()) + contains := fmt.Sprintf("%s%s(%s, %s)", + prefix, + funcName, + analysisinternal.Format(pass.Fset, rng.X), + analysisinternal.Format(pass.Fset, arg2)) + + report := func(edits []analysis.TextEdit) { + pass.Report(analysis.Diagnostic{ + Pos: rng.Pos(), + End: rng.End(), + Category: "slicescontains", + Message: fmt.Sprintf("Loop can be simplified using slices.%s", funcName), + SuggestedFixes: []analysis.SuggestedFix{{ + Message: "Replace loop by call to slices." + funcName, + TextEdits: append(edits, importEdits...), + }}, + }) + } + + // Last statement of body must return/break out of the loop. + // + // TODO(adonovan): opt:consider avoiding FindNode with new API of form: + // curRange.Get(edge.RangeStmt_Body, -1). + // Get(edge.BodyStmt_List, 0). + // Get(edge.IfStmt_Body) + curBody, _ := curRange.FindNode(body) + curLastStmt, _ := curBody.LastChild() + + // Reject if any statement in the body except the + // last has a free continuation (continue or break) + // that might affected by melting down the loop. + // + // TODO(adonovan): relax check by analyzing branch target. + for curBodyStmt := range curBody.Children() { + if curBodyStmt != curLastStmt { + for range curBodyStmt.Preorder((*ast.BranchStmt)(nil), (*ast.ReturnStmt)(nil)) { + return + } + } + } + + switch lastStmt := curLastStmt.Node().(type) { + case *ast.ReturnStmt: + // Have: for ... range seq { if ... { stmts; return x } } + + // Special case: + // body={ return true } next="return false" (or negation) + // => return [!]slices.Contains(...) + if curNext, ok := curRange.NextSibling(); ok { + nextStmt := curNext.Node().(ast.Stmt) + tval := isReturnTrueOrFalse(info, lastStmt) + fval := isReturnTrueOrFalse(info, nextStmt) + if len(body.List) == 1 && tval*fval < 0 { + // for ... { if ... { return true/false } } + // => return [!]slices.Contains(...) + report([]analysis.TextEdit{ + // Delete the range statement and following space. + { + Pos: rng.Pos(), + End: nextStmt.Pos(), + }, + // Change return to [!]slices.Contains(...). + { + Pos: nextStmt.Pos(), + End: nextStmt.End(), + NewText: fmt.Appendf(nil, "return %s%s", + cond(tval > 0, "", "!"), + contains), + }, + }) + return + } + } + + // General case: + // => if slices.Contains(...) { stmts; return x } + report([]analysis.TextEdit{ + // Replace "for ... { if ... " with "if slices.Contains(...)". + { + Pos: rng.Pos(), + End: ifStmt.Body.Pos(), + NewText: fmt.Appendf(nil, "if %s ", contains), + }, + // Delete '}' of range statement and preceding space. + { + Pos: ifStmt.Body.End(), + End: rng.End(), + }, + }) + return + + case *ast.BranchStmt: + if lastStmt.Tok == token.BREAK && lastStmt.Label == nil { // unlabeled break + // Have: for ... { if ... { stmts; break } } + + var prevStmt ast.Stmt // previous statement to range (if any) + if curPrev, ok := curRange.PrevSibling(); ok { + // If the RangeStmt's previous sibling is a Stmt, + // the RangeStmt must be among the Body list of + // a BlockStmt, CauseClause, or CommClause. + // In all cases, the prevStmt is the immediate + // predecessor of the RangeStmt during execution. + // + // (This is not true for Stmts in general; + // see [Cursor.Children] and #71074.) + prevStmt, _ = curPrev.Node().(ast.Stmt) + } + + // Special case: + // prev="lhs = false" body={ lhs = true; break } + // => lhs = slices.Contains(...) (or negation) + if assign, ok := body.List[0].(*ast.AssignStmt); ok && + len(body.List) == 2 && + assign.Tok == token.ASSIGN && + len(assign.Lhs) == 1 && + len(assign.Rhs) == 1 { + + // Have: body={ lhs = rhs; break } + + if prevAssign, ok := prevStmt.(*ast.AssignStmt); ok && + len(prevAssign.Lhs) == 1 && + len(prevAssign.Rhs) == 1 && + equalSyntax(prevAssign.Lhs[0], assign.Lhs[0]) && + is[*ast.Ident](assign.Rhs[0]) && + info.Uses[assign.Rhs[0].(*ast.Ident)] == builtinTrue { + + // Have: + // lhs = false + // for ... { if ... { lhs = true; break } } + // => + // lhs = slices.Contains(...) + // + // TODO(adonovan): + // - support "var lhs bool = false" and variants. + // - support negation. + // Both these variants seem quite significant. + // - allow the break to be omitted. + report([]analysis.TextEdit{ + // Replace "rhs" of previous assignment by slices.Contains(...) + { + Pos: prevAssign.Rhs[0].Pos(), + End: prevAssign.Rhs[0].End(), + NewText: []byte(contains), + }, + // Delete the loop and preceding space. + { + Pos: prevAssign.Rhs[0].End(), + End: rng.End(), + }, + }) + return + } + } + + // General case: + // for ... { if ... { stmts; break } } + // => if slices.Contains(...) { stmts } + report([]analysis.TextEdit{ + // Replace "for ... { if ... " with "if slices.Contains(...)". + { + Pos: rng.Pos(), + End: ifStmt.Body.Pos(), + NewText: fmt.Appendf(nil, "if %s ", contains), + }, + // Delete break statement and preceding space. + { + Pos: func() token.Pos { + if len(body.List) > 1 { + beforeBreak, _ := curLastStmt.PrevSibling() + return beforeBreak.Node().End() + } + return lastStmt.Pos() + }(), + End: lastStmt.End(), + }, + // Delete '}' of range statement and preceding space. + { + Pos: ifStmt.Body.End(), + End: rng.End(), + }, + }) + return + } + } + } + + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + for curFile := range filesUsing(inspect, info, "go1.21") { + file := curFile.Node().(*ast.File) + + for curRange := range curFile.Preorder((*ast.RangeStmt)(nil)) { + rng := curRange.Node().(*ast.RangeStmt) + + if is[*ast.Ident](rng.Key) && + rng.Tok == token.DEFINE && + len(rng.Body.List) == 1 && + is[*types.Slice](typeparams.CoreType(info.TypeOf(rng.X))) { + + // Have: + // - for _, elem := range s { S } + // - for i := range s { S } + + if ifStmt, ok := rng.Body.List[0].(*ast.IfStmt); ok && + ifStmt.Init == nil && ifStmt.Else == nil { + + // Have: for i, elem := range s { if cond { ... } } + check(file, curRange) + } + } + } + } +} + +// -- helpers -- + +// isReturnTrueOrFalse returns nonzero if stmt returns true (+1) or false (-1). +func isReturnTrueOrFalse(info *types.Info, stmt ast.Stmt) int { + if ret, ok := stmt.(*ast.ReturnStmt); ok && len(ret.Results) == 1 { + if id, ok := ret.Results[0].(*ast.Ident); ok { + switch info.Uses[id] { + case builtinTrue: + return +1 + case builtinFalse: + return -1 + } + } + } + return 0 +} diff --git a/gopls/internal/analysis/modernize/slicesdelete.go b/gopls/internal/analysis/modernize/slicesdelete.go new file mode 100644 index 00000000000..24b2182ca6a --- /dev/null +++ b/gopls/internal/analysis/modernize/slicesdelete.go @@ -0,0 +1,128 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package modernize + +import ( + "go/ast" + "go/constant" + "go/token" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/analysisinternal" +) + +// The slicesdelete pass attempts to replace instances of append(s[:i], s[i+k:]...) +// with slices.Delete(s, i, i+k) where k is some positive constant. +// Other variations that will also have suggested replacements include: +// append(s[:i-1], s[i:]...) and append(s[:i+k1], s[i+k2:]) where k2 > k1. +func slicesdelete(pass *analysis.Pass) { + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + info := pass.TypesInfo + report := func(file *ast.File, call *ast.CallExpr, slice1, slice2 *ast.SliceExpr) { + _, prefix, edits := analysisinternal.AddImport(info, file, "slices", "slices", "Delete", call.Pos()) + pass.Report(analysis.Diagnostic{ + Pos: call.Pos(), + End: call.End(), + Category: "slicesdelete", + Message: "Replace append with slices.Delete", + SuggestedFixes: []analysis.SuggestedFix{{ + Message: "Replace append with slices.Delete", + TextEdits: append(edits, []analysis.TextEdit{ + // Change name of called function. + { + Pos: call.Fun.Pos(), + End: call.Fun.End(), + NewText: []byte(prefix + "Delete"), + }, + // Delete ellipsis. + { + Pos: call.Ellipsis, + End: call.Ellipsis + token.Pos(len("...")), // delete ellipsis + }, + // Remove second slice variable name. + { + Pos: slice2.X.Pos(), + End: slice2.X.End(), + }, + // Insert after first slice variable name. + { + Pos: slice1.X.End(), + NewText: []byte(", "), + }, + // Remove brackets and colons. + { + Pos: slice1.Lbrack, + End: slice1.High.Pos(), + }, + { + Pos: slice1.Rbrack, + End: slice1.Rbrack + 1, + }, + { + Pos: slice2.Lbrack, + End: slice2.Lbrack + 1, + }, + { + Pos: slice2.Low.End(), + End: slice2.Rbrack + 1, + }, + }...), + }}, + }) + } + for curFile := range filesUsing(inspect, info, "go1.21") { + file := curFile.Node().(*ast.File) + for curCall := range curFile.Preorder((*ast.CallExpr)(nil)) { + call := curCall.Node().(*ast.CallExpr) + if id, ok := call.Fun.(*ast.Ident); ok && len(call.Args) == 2 { + // Verify we have append with two slices and ... operator, + // the first slice has no low index and second slice has no + // high index, and not a three-index slice. + if call.Ellipsis.IsValid() && info.Uses[id] == builtinAppend { + slice1, ok1 := call.Args[0].(*ast.SliceExpr) + slice2, ok2 := call.Args[1].(*ast.SliceExpr) + if ok1 && slice1.Low == nil && !slice1.Slice3 && + ok2 && slice2.High == nil && !slice2.Slice3 && + equalSyntax(slice1.X, slice2.X) && + increasingSliceIndices(info, slice1.High, slice2.Low) { + // Have append(s[:a], s[b:]...) where we can verify a < b. + report(file, call, slice1, slice2) + } + } + } + } + } +} + +// Given two slice indices a and b, returns true if we can verify that a < b. +// It recognizes certain forms such as i+k1 < i+k2 where k1 < k2. +func increasingSliceIndices(info *types.Info, a, b ast.Expr) bool { + + // Given an expression of the form i±k, returns (i, k) + // where k is a signed constant. Otherwise it returns (e, 0). + split := func(e ast.Expr) (ast.Expr, constant.Value) { + if binary, ok := e.(*ast.BinaryExpr); ok && (binary.Op == token.SUB || binary.Op == token.ADD) { + // Negate constants if operation is subtract instead of add + if k := info.Types[binary.Y].Value; k != nil { + return binary.X, constant.UnaryOp(binary.Op, k, 0) // i ± k + } + } + return e, constant.MakeInt64(0) + } + + // Handle case where either a or b is a constant + ak := info.Types[a].Value + bk := info.Types[b].Value + if ak != nil || bk != nil { + return ak != nil && bk != nil && constant.Compare(ak, token.LSS, bk) + } + + ai, ak := split(a) + bi, bk := split(b) + return equalSyntax(ai, bi) && constant.Compare(ak, token.LSS, bk) +} diff --git a/gopls/internal/analysis/modernize/sortslice.go b/gopls/internal/analysis/modernize/sortslice.go index 98e501875d2..7f695d76495 100644 --- a/gopls/internal/analysis/modernize/sortslice.go +++ b/gopls/internal/analysis/modernize/sortslice.go @@ -13,6 +13,7 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/go/types/typeutil" "golang.org/x/tools/internal/analysisinternal" ) @@ -23,33 +24,31 @@ import ( // sort.Slice(s, func(i, j int) bool { return s[i] < s[j] }) // => slices.Sort(s) // -// It also supports the SliceStable variant. +// There is no slices.SortStable. // // TODO(adonovan): support // // - sort.Slice(s, func(i, j int) bool { return s[i] ... s[j] }) -// -> slices.SortFunc(s, func(x, y int) bool { return x ... y }) -// iff all uses of i, j can be replaced by s[i], s[j]. +// -> slices.SortFunc(s, func(x, y T) int { return x ... y }) +// iff all uses of i, j can be replaced by s[i], s[j] and "<" can be replaced with cmp.Compare. +// +// - As above for sort.SliceStable -> slices.SortStableFunc. // // - sort.Sort(x) where x has a named slice type whose Less method is the natural order. // -> sort.Slice(x) func sortslice(pass *analysis.Pass) { - if !_imports(pass.Pkg, "sort") { + if !analysisinternal.Imports(pass.Pkg, "sort") { return } info := pass.TypesInfo check := func(file *ast.File, call *ast.CallExpr) { - // call to sort.Slice{,Stable}? - var stable string - if isQualifiedIdent(info, call.Fun, "sort", "Slice") != nil { - } else if isQualifiedIdent(info, call.Fun, "sort", "SliceStable") != nil { - stable = "Stable" - } else { + // call to sort.Slice? + obj := typeutil.Callee(info, call) + if !analysisinternal.IsFunctionNamed(obj, "sort", "Slice") { return } - if lit, ok := call.Args[1].(*ast.FuncLit); ok && len(lit.Body.List) == 1 { sig := info.Types[lit.Type].Type.(*types.Signature) @@ -71,22 +70,23 @@ func sortslice(pass *analysis.Pass) { if isIndex(compare.X, i) && isIndex(compare.Y, j) { // Have: sort.Slice(s, func(i, j int) bool { return s[i] < s[j] }) - slicesName, importEdits := analysisinternal.AddImport(info, file, call.Pos(), "slices", "slices") + _, prefix, importEdits := analysisinternal.AddImport( + info, file, "slices", "slices", "Sort", call.Pos()) pass.Report(analysis.Diagnostic{ // Highlight "sort.Slice". Pos: call.Fun.Pos(), End: call.Fun.End(), Category: "sortslice", - Message: fmt.Sprintf("sort.Slice%[1]s can be modernized using slices.Sort%[1]s", stable), + Message: fmt.Sprintf("sort.Slice can be modernized using slices.Sort"), SuggestedFixes: []analysis.SuggestedFix{{ - Message: fmt.Sprintf("Replace sort.Slice%[1]s call by slices.Sort%[1]s", stable), + Message: fmt.Sprintf("Replace sort.Slice call by slices.Sort"), TextEdits: append(importEdits, []analysis.TextEdit{ { // Replace sort.Slice with slices.Sort. Pos: call.Fun.Pos(), End: call.Fun.End(), - NewText: []byte(slicesName + ".Sort" + stable), + NewText: []byte(prefix + "Sort"), }, { // Eliminate FuncLit. @@ -111,21 +111,3 @@ func sortslice(pass *analysis.Pass) { } } } - -// isQualifiedIdent reports whether e is a reference to pkg.Name. If so, it returns the identifier. -func isQualifiedIdent(info *types.Info, e ast.Expr, pkgpath, name string) *ast.Ident { - var id *ast.Ident - switch e := e.(type) { - case *ast.Ident: - id = e // e.g. dot import - case *ast.SelectorExpr: - id = e.Sel - default: - return nil - } - obj, ok := info.Uses[id] - if ok && isPackageLevel(obj, pkgpath, name) { - return id - } - return nil -} diff --git a/gopls/internal/analysis/modernize/splitseq.go b/gopls/internal/analysis/modernize/splitseq.go new file mode 100644 index 00000000000..1f3da859e9b --- /dev/null +++ b/gopls/internal/analysis/modernize/splitseq.go @@ -0,0 +1,112 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package modernize + +import ( + "go/ast" + "go/token" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/astutil/edge" +) + +// splitseq offers a fix to replace a call to strings.Split with +// SplitSeq when it is the operand of a range loop, either directly: +// +// for _, line := range strings.Split() {...} +// +// or indirectly, if the variable's sole use is the range statement: +// +// lines := strings.Split() +// for _, line := range lines {...} +// +// Variants: +// - bytes.SplitSeq +func splitseq(pass *analysis.Pass) { + if !analysisinternal.Imports(pass.Pkg, "strings") && + !analysisinternal.Imports(pass.Pkg, "bytes") { + return + } + info := pass.TypesInfo + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + for curFile := range filesUsing(inspect, info, "go1.24") { + for curRange := range curFile.Preorder((*ast.RangeStmt)(nil)) { + rng := curRange.Node().(*ast.RangeStmt) + + // Reject "for i, line := ..." since SplitSeq is not an iter.Seq2. + // (We require that i is blank.) + if id, ok := rng.Key.(*ast.Ident); ok && id.Name != "_" { + continue + } + + // Find the call operand of the range statement, + // whether direct or indirect. + call, ok := rng.X.(*ast.CallExpr) + if !ok { + if id, ok := rng.X.(*ast.Ident); ok { + if v, ok := info.Uses[id].(*types.Var); ok { + if ek, idx := curRange.Edge(); ek == edge.BlockStmt_List && idx > 0 { + curPrev, _ := curRange.PrevSibling() + if assign, ok := curPrev.Node().(*ast.AssignStmt); ok && + assign.Tok == token.DEFINE && + len(assign.Lhs) == 1 && + len(assign.Rhs) == 1 && + info.Defs[assign.Lhs[0].(*ast.Ident)] == v && + soleUse(info, v) == id { + // Have: + // lines := ... + // for _, line := range lines {...} + // and no other uses of lines. + call, _ = assign.Rhs[0].(*ast.CallExpr) + } + } + } + } + } + + if call != nil { + var edits []analysis.TextEdit + if rng.Key != nil { + // Delete (blank) RangeStmt.Key: + // for _, line := -> for line := + // for _, _ := -> for + // for _ := -> for + end := rng.Range + if rng.Value != nil { + end = rng.Value.Pos() + } + edits = append(edits, analysis.TextEdit{ + Pos: rng.Key.Pos(), + End: end, + }) + } + + if sel, ok := call.Fun.(*ast.SelectorExpr); ok && + (analysisinternal.IsFunctionNamed(typeutil.Callee(info, call), "strings", "Split") || + analysisinternal.IsFunctionNamed(typeutil.Callee(info, call), "bytes", "Split")) { + pass.Report(analysis.Diagnostic{ + Pos: sel.Pos(), + End: sel.End(), + Category: "splitseq", + Message: "Ranging over SplitSeq is more efficient", + SuggestedFixes: []analysis.SuggestedFix{{ + Message: "Replace Split with SplitSeq", + TextEdits: append(edits, analysis.TextEdit{ + // Split -> SplitSeq + Pos: sel.Sel.Pos(), + End: sel.Sel.End(), + NewText: []byte("SplitSeq")}), + }}, + }) + } + } + } + } +} diff --git a/gopls/internal/analysis/modernize/testdata/src/appendclipped/appendclipped.go.golden b/gopls/internal/analysis/modernize/testdata/src/appendclipped/appendclipped.go.golden index 5d6761b5371..6352d525b34 100644 --- a/gopls/internal/analysis/modernize/testdata/src/appendclipped/appendclipped.go.golden +++ b/gopls/internal/analysis/modernize/testdata/src/appendclipped/appendclipped.go.golden @@ -20,7 +20,7 @@ func _(s, other []string) { print(slices.Concat(Bytes{1, 2, 3}, Bytes{4, 5, 6})) // want "Replace append with slices.Concat" print(slices.Concat(s, other, other)) // want "Replace append with slices.Concat" print(slices.Concat(os.Environ(), other, other)) // want "Replace append with slices.Concat" - print(slices.Concat(other[:len(other):len(other)], s, other)) // want "Replace append with slices.Concat" - print(slices.Concat(slices.Clip(other), s, other)) // want "Replace append with slices.Concat" + print(slices.Concat(other, s, other)) // want "Replace append with slices.Concat" + print(slices.Concat(other, s, other)) // want "Replace append with slices.Concat" print(append(append(append(other[:0], s...), other...), other...)) // nope: intent may be to mutate other } diff --git a/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop.go b/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop.go index ab1305d3b81..769b4c84f60 100644 --- a/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop.go +++ b/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop.go @@ -20,6 +20,13 @@ func useCopy(dst, src map[int]string) { } } +func useCopyGeneric[K comparable, V any, M ~map[K]V](dst, src M) { + // Replace loop by maps.Copy. + for key, value := range src { + dst[key] = value // want "Replace m\\[k\\]=v loop with maps.Copy" + } +} + func useClone(src map[int]string) { // Replace make(...) by maps.Clone. dst := make(map[int]string, len(src)) @@ -138,3 +145,32 @@ func nopeBodyNotASingleton(src map[int]string) { println() // nope: other things in the loop body } } + +// Regression test for https://github.com/golang/go/issues/70815#issuecomment-2581999787. +func nopeAssignmentHasIncrementOperator(src map[int]int) { + dst := make(map[int]int) + for k, v := range src { + dst[k] += v + } +} + +func nopeNotAMap(src map[int]string) { + var dst []string + for k, v := range src { + dst[k] = v + } +} + +func nopeNotAMapGeneric[E any, M ~map[int]E, S ~[]E](src M) { + var dst S + for k, v := range src { + dst[k] = v + } +} + +func nopeHasImplicitWidening(src map[string]int) { + dst := make(map[string]any) + for k, v := range src { + dst[k] = v + } +} diff --git a/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop.go.golden b/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop.go.golden index 6d95cc023ee..b9aa39021e8 100644 --- a/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop.go.golden +++ b/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop.go.golden @@ -18,6 +18,11 @@ func useCopy(dst, src map[int]string) { maps.Copy(dst, src) } +func useCopyGeneric[K comparable, V any, M ~map[K]V](dst, src M) { + // Replace loop by maps.Copy. + maps.Copy(dst, src) +} + func useClone(src map[int]string) { // Replace make(...) by maps.Clone. dst := maps.Clone(src) @@ -110,3 +115,32 @@ func nopeBodyNotASingleton(src map[int]string) { println() // nope: other things in the loop body } } + +// Regression test for https://github.com/golang/go/issues/70815#issuecomment-2581999787. +func nopeAssignmentHasIncrementOperator(src map[int]int) { + dst := make(map[int]int) + for k, v := range src { + dst[k] += v + } +} + +func nopeNotAMap(src map[int]string) { + var dst []string + for k, v := range src { + dst[k] = v + } +} + +func nopeNotAMapGeneric[E any, M ~map[int]E, S ~[]E](src M) { + var dst S + for k, v := range src { + dst[k] = v + } +} + +func nopeHasImplicitWidening(src map[string]int) { + dst := make(map[string]any) + for k, v := range src { + dst[k] = v + } +} diff --git a/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop_dot.go b/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop_dot.go new file mode 100644 index 00000000000..c33d43e23ad --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop_dot.go @@ -0,0 +1,23 @@ +//go:build go1.23 + +package mapsloop + +import . "maps" + +var _ = Clone[M] // force "maps" import so that each diagnostic doesn't add one + +func useCopyDot(dst, src map[int]string) { + // Replace loop by maps.Copy. + for key, value := range src { + dst[key] = value // want "Replace m\\[k\\]=v loop with maps.Copy" + } +} + +func useCloneDot(src map[int]string) { + // Replace make(...) by maps.Clone. + dst := make(map[int]string, len(src)) + for key, value := range src { + dst[key] = value // want "Replace m\\[k\\]=v loop with maps.Clone" + } + println(dst) +} diff --git a/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop_dot.go.golden b/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop_dot.go.golden new file mode 100644 index 00000000000..d6a30537645 --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop_dot.go.golden @@ -0,0 +1,19 @@ +//go:build go1.23 + +package mapsloop + +import . "maps" + +var _ = Clone[M] // force "maps" import so that each diagnostic doesn't add one + +func useCopyDot(dst, src map[int]string) { + // Replace loop by maps.Copy. + Copy(dst, src) +} + +func useCloneDot(src map[int]string) { + // Replace make(...) by maps.Clone. + dst := Clone(src) + println(dst) +} + diff --git a/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go b/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go index 393b3729e07..c73bd30139b 100644 --- a/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go +++ b/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go @@ -71,3 +71,24 @@ func nopeIfStmtHasInitStmt() { } print(x) } + +// Regression test for a bug: fix was "y := max(x, y)". +func oops() { + x := 1 + y := 2 + if x > y { // want "if statement can be modernized using max" + y = x + } + print(y) +} + +// Regression test for a bug: += is not a simple assignment. +func nopeAssignHasIncrementOperator() { + x := 1 + y := 0 + y += 2 + if x > y { + y = x + } + print(y) +} diff --git a/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go.golden b/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go.golden index aacf84dd1c4..11eac2c1418 100644 --- a/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go.golden +++ b/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go.golden @@ -11,12 +11,12 @@ func ifmax(a, b int) { } func ifminvariant(a, b int) { - x := min(x, b) + x := min(a, b) print(x) } func ifmaxvariant(a, b int) { - x := min(a, x) + x := min(a, b) print(x) } @@ -51,3 +51,21 @@ func nopeIfStmtHasInitStmt() { } print(x) } + +// Regression test for a bug: fix was "y := max(x, y)". +func oops() { + x := 1 + y := max(x, 2) + print(y) +} + +// Regression test for a bug: += is not a simple assignment. +func nopeAssignHasIncrementOperator() { + x := 1 + y := 0 + y += 2 + if x > y { + y = x + } + print(y) +} diff --git a/gopls/internal/analysis/modernize/testdata/src/omitzero/omitzero.go b/gopls/internal/analysis/modernize/testdata/src/omitzero/omitzero.go new file mode 100644 index 00000000000..f6c50cc93bb --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/omitzero/omitzero.go @@ -0,0 +1,30 @@ +package omitzero + +type Foo struct { + EmptyStruct struct{} `json:",omitempty"` // want "Omitempty has no effect on nested struct fields" +} + +type Bar struct { + NonEmptyStruct struct{ a int } `json:",omitempty"` // want "Omitempty has no effect on nested struct fields" +} + +type C struct { + D string `json:",omitempty"` +} + +type R struct { + M string `json:",omitempty"` +} + +type A struct { + C C `json:"test,omitempty"` // want "Omitempty has no effect on nested struct fields" + R R `json:"test"` +} + +type X struct { + NonEmptyStruct struct{ a int } `json:",omitempty" yaml:",omitempty"` // want "Omitempty has no effect on nested struct fields" +} + +type Y struct { + NonEmptyStruct struct{ a int } `yaml:",omitempty" json:",omitempty"` // want "Omitempty has no effect on nested struct fields" +} diff --git a/gopls/internal/analysis/modernize/testdata/src/omitzero/omitzero.go.golden b/gopls/internal/analysis/modernize/testdata/src/omitzero/omitzero.go.golden new file mode 100644 index 00000000000..daf0ea8235b --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/omitzero/omitzero.go.golden @@ -0,0 +1,63 @@ +-- Replace omitempty with omitzero (behavior change) -- +package omitzero + +type Foo struct { + EmptyStruct struct{} `json:",omitzero"` // want "Omitempty has no effect on nested struct fields" +} + +type Bar struct { + NonEmptyStruct struct{ a int } `json:",omitzero"` // want "Omitempty has no effect on nested struct fields" +} + +type C struct { + D string `json:",omitempty"` +} + +type R struct { + M string `json:",omitempty"` +} + +type A struct { + C C `json:"test,omitzero"` // want "Omitempty has no effect on nested struct fields" + R R `json:"test"` +} + +type X struct { + NonEmptyStruct struct{ a int } `json:",omitzero" yaml:",omitempty"` // want "Omitempty has no effect on nested struct fields" +} + +type Y struct { + NonEmptyStruct struct{ a int } `yaml:",omitempty" json:",omitzero"` // want "Omitempty has no effect on nested struct fields" +} + +-- Remove redundant omitempty tag -- +package omitzero + +type Foo struct { + EmptyStruct struct{} // want "Omitempty has no effect on nested struct fields" +} + +type Bar struct { + NonEmptyStruct struct{ a int } // want "Omitempty has no effect on nested struct fields" +} + +type C struct { + D string `json:",omitempty"` +} + +type R struct { + M string `json:",omitempty"` +} + +type A struct { + C C `json:"test"` // want "Omitempty has no effect on nested struct fields" + R R `json:"test"` +} + +type X struct { + NonEmptyStruct struct{ a int } `yaml:",omitempty"` // want "Omitempty has no effect on nested struct fields" +} + +type Y struct { + NonEmptyStruct struct{ a int } `yaml:",omitempty"` // want "Omitempty has no effect on nested struct fields" +} diff --git a/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go new file mode 100644 index 00000000000..e17dccac9d0 --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go @@ -0,0 +1,37 @@ +package rangeint + +func _(i int, s struct{ i int }) { + for i := 0; i < 10; i++ { // want "for loop can be modernized using range over int" + println(i) + } + for i = 0; i < f(); i++ { // want "for loop can be modernized using range over int" + } + for i := 0; i < 10; i++ { // want "for loop can be modernized using range over int" + // i unused within loop + } + + // nope + for i := 0; i < 10; { // nope: missing increment + } + for i := 0; i < 10; i-- { // nope: negative increment + } + for i := 0; ; i++ { // nope: missing comparison + } + for i := 0; i <= 10; i++ { // nope: wrong comparison + } + for ; i < 10; i++ { // nope: missing init + } + for s.i = 0; s.i < 10; s.i++ { // nope: not an ident + } + for i := 0; i < 10; i++ { // nope: takes address of i + println(&i) + } + for i := 0; i < 10; i++ { // nope: increments i + i++ + } + for i := 0; i < 10; i++ { // nope: assigns i + i = 8 + } +} + +func f() int { return 0 } diff --git a/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden new file mode 100644 index 00000000000..5a76229c858 --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden @@ -0,0 +1,37 @@ +package rangeint + +func _(i int, s struct{ i int }) { + for i := range 10 { // want "for loop can be modernized using range over int" + println(i) + } + for i = range f() { // want "for loop can be modernized using range over int" + } + for range 10 { // want "for loop can be modernized using range over int" + // i unused within loop + } + + // nope + for i := 0; i < 10; { // nope: missing increment + } + for i := 0; i < 10; i-- { // nope: negative increment + } + for i := 0; ; i++ { // nope: missing comparison + } + for i := 0; i <= 10; i++ { // nope: wrong comparison + } + for ; i < 10; i++ { // nope: missing init + } + for s.i = 0; s.i < 10; s.i++ { // nope: not an ident + } + for i := 0; i < 10; i++ { // nope: takes address of i + println(&i) + } + for i := 0; i < 10; i++ { // nope: increments i + i++ + } + for i := 0; i < 10; i++ { // nope: assigns i + i = 8 + } +} + +func f() int { return 0 } diff --git a/gopls/internal/analysis/modernize/testdata/src/slicescontains/slicescontains.go b/gopls/internal/analysis/modernize/testdata/src/slicescontains/slicescontains.go new file mode 100644 index 00000000000..6116ce14838 --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/slicescontains/slicescontains.go @@ -0,0 +1,148 @@ +package slicescontains + +import "slices" + +var _ = slices.Contains[[]int] // force import of "slices" to avoid duplicate import edits + +func nopeNoBreak(slice []int, needle int) { + for i := range slice { + if slice[i] == needle { + println("found") + } + } +} + +func rangeIndex(slice []int, needle int) { + for i := range slice { // want "Loop can be simplified using slices.Contains" + if slice[i] == needle { + println("found") + break + } + } +} + +func rangeValue(slice []int, needle int) { + for _, elem := range slice { // want "Loop can be simplified using slices.Contains" + if elem == needle { + println("found") + break + } + } +} + +func returns(slice []int, needle int) { + for i := range slice { // want "Loop can be simplified using slices.Contains" + if slice[i] == needle { + println("found") + return + } + } +} + +func assignTrueBreak(slice []int, needle int) { + found := false + for _, elem := range slice { // want "Loop can be simplified using slices.Contains" + if elem == needle { + found = true + break + } + } + print(found) +} + +func assignFalseBreak(slice []int, needle int) { // TODO: treat this specially like booleanTrue + found := true + for _, elem := range slice { // want "Loop can be simplified using slices.Contains" + if elem == needle { + found = false + break + } + } + print(found) +} + +func assignFalseBreakInSelectSwitch(slice []int, needle int) { + // Exercise RangeStmt in CommClause, CaseClause. + select { + default: + found := false + for _, elem := range slice { // want "Loop can be simplified using slices.Contains" + if elem == needle { + found = true + break + } + } + print(found) + } + switch { + default: + found := false + for _, elem := range slice { // want "Loop can be simplified using slices.Contains" + if elem == needle { + found = true + break + } + } + print(found) + } +} + +func returnTrue(slice []int, needle int) bool { + for _, elem := range slice { // want "Loop can be simplified using slices.Contains" + if elem == needle { + return true + } + } + return false +} + +func returnFalse(slice []int, needle int) bool { + for _, elem := range slice { // want "Loop can be simplified using slices.Contains" + if elem == needle { + return false + } + } + return true +} + +func containsFunc(slice []int, needle int) bool { + for _, elem := range slice { // want "Loop can be simplified using slices.ContainsFunc" + if predicate(elem) { + return true + } + } + return false +} + +func nopeLoopBodyHasFreeContinuation(slice []int, needle int) bool { + for _, elem := range slice { + if predicate(elem) { + if needle == 7 { + continue // this statement defeats loop elimination + } + return true + } + } + return false +} + +func predicate(int) bool + +// Regression tests for bad fixes when needle +// and haystack have different types (#71313): + +func nopeNeedleHaystackDifferentTypes(x any, args []error) { + for _, arg := range args { + if arg == x { + return + } + } +} + +func nopeNeedleHaystackDifferentTypes2(x error, args []any) { + for _, arg := range args { + if arg == x { + return + } + } +} diff --git a/gopls/internal/analysis/modernize/testdata/src/slicescontains/slicescontains.go.golden b/gopls/internal/analysis/modernize/testdata/src/slicescontains/slicescontains.go.golden new file mode 100644 index 00000000000..2d67395f203 --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/slicescontains/slicescontains.go.golden @@ -0,0 +1,104 @@ +package slicescontains + +import "slices" + +var _ = slices.Contains[[]int] // force import of "slices" to avoid duplicate import edits + +func nopeNoBreak(slice []int, needle int) { + for i := range slice { + if slice[i] == needle { + println("found") + } + } +} + +func rangeIndex(slice []int, needle int) { + if slices.Contains(slice, needle) { + println("found") + } +} + +func rangeValue(slice []int, needle int) { + if slices.Contains(slice, needle) { + println("found") + } +} + +func returns(slice []int, needle int) { + if slices.Contains(slice, needle) { + println("found") + return + } +} + +func assignTrueBreak(slice []int, needle int) { + found := slices.Contains(slice, needle) + print(found) +} + +func assignFalseBreak(slice []int, needle int) { // TODO: treat this specially like booleanTrue + found := true + if slices.Contains(slice, needle) { + found = false + } + print(found) +} + +func assignFalseBreakInSelectSwitch(slice []int, needle int) { + // Exercise RangeStmt in CommClause, CaseClause. + select { + default: + found := slices.Contains(slice, needle) + print(found) + } + switch { + default: + found := slices.Contains(slice, needle) + print(found) + } +} + +func returnTrue(slice []int, needle int) bool { + return slices.Contains(slice, needle) +} + +func returnFalse(slice []int, needle int) bool { + return !slices.Contains(slice, needle) +} + +func containsFunc(slice []int, needle int) bool { + return slices.ContainsFunc(slice, predicate) +} + +func nopeLoopBodyHasFreeContinuation(slice []int, needle int) bool { + for _, elem := range slice { + if predicate(elem) { + if needle == 7 { + continue // this statement defeats loop elimination + } + return true + } + } + return false +} + +func predicate(int) bool + +// Regression tests for bad fixes when needle +// and haystack have different types (#71313): + +func nopeNeedleHaystackDifferentTypes(x any, args []error) { + for _, arg := range args { + if arg == x { + return + } + } +} + +func nopeNeedleHaystackDifferentTypes2(x error, args []any) { + for _, arg := range args { + if arg == x { + return + } + } +} diff --git a/gopls/internal/analysis/modernize/testdata/src/slicesdelete/slicesdelete.go b/gopls/internal/analysis/modernize/testdata/src/slicesdelete/slicesdelete.go new file mode 100644 index 00000000000..a710d06f2fe --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/slicesdelete/slicesdelete.go @@ -0,0 +1,36 @@ +package slicesdelete + +var g struct{ f []int } + +func slicesdelete(test, other []byte, i int) { + const k = 1 + _ = append(test[:i], test[i+1:]...) // want "Replace append with slices.Delete" + + _ = append(test[:i+1], test[i+2:]...) // want "Replace append with slices.Delete" + + _ = append(test[:i+1], test[i+1:]...) // not deleting any slice elements + + _ = append(test[:i], test[i-1:]...) // not deleting any slice elements + + _ = append(test[:i-1], test[i:]...) // want "Replace append with slices.Delete" + + _ = append(test[:i-2], test[i+1:]...) // want "Replace append with slices.Delete" + + _ = append(test[:i-2], other[i+1:]...) // different slices "test" and "other" + + _ = append(test[:i-2], other[i+1+k:]...) // cannot verify a < b + + _ = append(test[:i-2], test[11:]...) // cannot verify a < b + + _ = append(test[:1], test[3:]...) // want "Replace append with slices.Delete" + + _ = append(g.f[:i], g.f[i+k:]...) // want "Replace append with slices.Delete" + + _ = append(test[:3], test[i+1:]...) // cannot verify a < b + + _ = append(test[:i-4], test[i-1:]...) // want "Replace append with slices.Delete" + + _ = append(test[:1+2], test[3+4:]...) // want "Replace append with slices.Delete" + + _ = append(test[:1+2], test[i-1:]...) // cannot verify a < b +} diff --git a/gopls/internal/analysis/modernize/testdata/src/slicesdelete/slicesdelete.go.golden b/gopls/internal/analysis/modernize/testdata/src/slicesdelete/slicesdelete.go.golden new file mode 100644 index 00000000000..9b2ba9a0b80 --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/slicesdelete/slicesdelete.go.golden @@ -0,0 +1,52 @@ +package slicesdelete + +import "slices" + +import "slices" + +import "slices" + +import "slices" + +import "slices" + +import "slices" + +import "slices" + +import "slices" + +var g struct{ f []int } + +func slicesdelete(test, other []byte, i int) { + const k = 1 + _ = slices.Delete(test, i, i+1) // want "Replace append with slices.Delete" + + _ = slices.Delete(test, i+1, i+2) // want "Replace append with slices.Delete" + + _ = append(test[:i+1], test[i+1:]...) // not deleting any slice elements + + _ = append(test[:i], test[i-1:]...) // not deleting any slice elements + + _ = slices.Delete(test, i-1, i) // want "Replace append with slices.Delete" + + _ = slices.Delete(test, i-2, i+1) // want "Replace append with slices.Delete" + + _ = append(test[:i-2], other[i+1:]...) // different slices "test" and "other" + + _ = append(test[:i-2], other[i+1+k:]...) // cannot verify a < b + + _ = append(test[:i-2], test[11:]...) // cannot verify a < b + + _ = slices.Delete(test, 1, 3) // want "Replace append with slices.Delete" + + _ = slices.Delete(g.f, i, i+k) // want "Replace append with slices.Delete" + + _ = append(test[:3], test[i+1:]...) // cannot verify a < b + + _ = slices.Delete(test, i-4, i-1) // want "Replace append with slices.Delete" + + _ = slices.Delete(test, 1+2, 3+4) // want "Replace append with slices.Delete" + + _ = append(test[:1+2], test[i-1:]...) // cannot verify a < b +} \ No newline at end of file diff --git a/gopls/internal/analysis/modernize/testdata/src/sortslice/sortslice.go b/gopls/internal/analysis/modernize/testdata/src/sortslice/sortslice.go index fce3e006328..53d15746839 100644 --- a/gopls/internal/analysis/modernize/testdata/src/sortslice/sortslice.go +++ b/gopls/internal/analysis/modernize/testdata/src/sortslice/sortslice.go @@ -6,8 +6,6 @@ type myint int func _(s []myint) { sort.Slice(s, func(i, j int) bool { return s[i] < s[j] }) // want "sort.Slice can be modernized using slices.Sort" - - sort.SliceStable(s, func(i, j int) bool { return s[i] < s[j] }) // want "sort.SliceStable can be modernized using slices.SortStable" } func _(x *struct{ s []int }) { diff --git a/gopls/internal/analysis/modernize/testdata/src/sortslice/sortslice.go.golden b/gopls/internal/analysis/modernize/testdata/src/sortslice/sortslice.go.golden index 176ae66d204..d97636fd311 100644 --- a/gopls/internal/analysis/modernize/testdata/src/sortslice/sortslice.go.golden +++ b/gopls/internal/analysis/modernize/testdata/src/sortslice/sortslice.go.golden @@ -4,16 +4,12 @@ import "slices" import "slices" -import "slices" - import "sort" type myint int func _(s []myint) { slices.Sort(s) // want "sort.Slice can be modernized using slices.Sort" - - slices.SortStable(s) // want "sort.SliceStable can be modernized using slices.SortStable" } func _(x *struct{ s []int }) { diff --git a/gopls/internal/analysis/modernize/testdata/src/sortslice/sortslice_dot.go b/gopls/internal/analysis/modernize/testdata/src/sortslice/sortslice_dot.go new file mode 100644 index 00000000000..8502718c1a5 --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/sortslice/sortslice_dot.go @@ -0,0 +1,26 @@ +package sortslice + +import . "slices" +import "sort" + +func _(s []myint) { + sort.Slice(s, func(i, j int) bool { return s[i] < s[j] }) // want "sort.Slice can be modernized using slices.Sort" +} + +func _(x *struct{ s []int }) { + sort.Slice(x.s, func(first, second int) bool { return x.s[first] < x.s[second] }) // want "sort.Slice can be modernized using slices.Sort" +} + +func _(s []int) { + sort.Slice(s, func(i, j int) bool { return s[i] > s[j] }) // nope: wrong comparison operator +} + +func _(s []int) { + sort.Slice(s, func(i, j int) bool { return s[j] < s[i] }) // nope: wrong index var +} + +func _(s2 []struct{ x int }) { + sort.Slice(s2, func(i, j int) bool { return s2[i].x < s2[j].x }) // nope: not a simple index operation +} + +func _() { Clip([]int{}) } diff --git a/gopls/internal/analysis/modernize/testdata/src/sortslice/sortslice_dot.go.golden b/gopls/internal/analysis/modernize/testdata/src/sortslice/sortslice_dot.go.golden new file mode 100644 index 00000000000..45c056d24fb --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/sortslice/sortslice_dot.go.golden @@ -0,0 +1,26 @@ +package sortslice + +import . "slices" +import "sort" + +func _(s []myint) { + Sort(s) // want "sort.Slice can be modernized using slices.Sort" +} + +func _(x *struct{ s []int }) { + Sort(x.s) // want "sort.Slice can be modernized using slices.Sort" +} + +func _(s []int) { + sort.Slice(s, func(i, j int) bool { return s[i] > s[j] }) // nope: wrong comparison operator +} + +func _(s []int) { + sort.Slice(s, func(i, j int) bool { return s[j] < s[i] }) // nope: wrong index var +} + +func _(s2 []struct{ x int }) { + sort.Slice(s2, func(i, j int) bool { return s2[i].x < s2[j].x }) // nope: not a simple index operation +} + +func _() { Clip([]int{}) } diff --git a/gopls/internal/analysis/modernize/testdata/src/splitseq/splitseq.go b/gopls/internal/analysis/modernize/testdata/src/splitseq/splitseq.go new file mode 100644 index 00000000000..4f533ed22bc --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/splitseq/splitseq.go @@ -0,0 +1,42 @@ +//go:build go1.24 + +package splitseq + +import ( + "bytes" + "strings" +) + +func _() { + for _, line := range strings.Split("", "") { // want "Ranging over SplitSeq is more efficient" + println(line) + } + for i, line := range strings.Split("", "") { // nope: uses index var + println(i, line) + } + for i, _ := range strings.Split("", "") { // nope: uses index var + println(i) + } + for i := range strings.Split("", "") { // nope: uses index var + println(i) + } + for _ = range strings.Split("", "") { // want "Ranging over SplitSeq is more efficient" + } + for range strings.Split("", "") { // want "Ranging over SplitSeq is more efficient" + } + for range bytes.Split(nil, nil) { // want "Ranging over SplitSeq is more efficient" + } + { + lines := strings.Split("", "") // want "Ranging over SplitSeq is more efficient" + for _, line := range lines { + println(line) + } + } + { + lines := strings.Split("", "") // nope: lines is used not just by range + for _, line := range lines { + println(line) + } + println(lines) + } +} diff --git a/gopls/internal/analysis/modernize/testdata/src/splitseq/splitseq.go.golden b/gopls/internal/analysis/modernize/testdata/src/splitseq/splitseq.go.golden new file mode 100644 index 00000000000..d10e0e8e564 --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/splitseq/splitseq.go.golden @@ -0,0 +1,42 @@ +//go:build go1.24 + +package splitseq + +import ( + "bytes" + "strings" +) + +func _() { + for line := range strings.SplitSeq("", "") { // want "Ranging over SplitSeq is more efficient" + println(line) + } + for i, line := range strings.Split("", "") { // nope: uses index var + println(i, line) + } + for i, _ := range strings.Split("", "") { // nope: uses index var + println(i) + } + for i := range strings.Split("", "") { // nope: uses index var + println(i) + } + for range strings.SplitSeq("", "") { // want "Ranging over SplitSeq is more efficient" + } + for range strings.SplitSeq("", "") { // want "Ranging over SplitSeq is more efficient" + } + for range bytes.SplitSeq(nil, nil) { // want "Ranging over SplitSeq is more efficient" + } + { + lines := strings.SplitSeq("", "") // want "Ranging over SplitSeq is more efficient" + for line := range lines { + println(line) + } + } + { + lines := strings.Split("", "") // nope: lines is used not just by range + for _, line := range lines { + println(line) + } + println(lines) + } +} diff --git a/gopls/internal/analysis/modernize/testdata/src/splitseq/splitseq_go123.go b/gopls/internal/analysis/modernize/testdata/src/splitseq/splitseq_go123.go new file mode 100644 index 00000000000..c3e86bb2ed9 --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/splitseq/splitseq_go123.go @@ -0,0 +1 @@ +package splitseq diff --git a/gopls/internal/analysis/modernize/testdata/src/testingcontext/testingcontext.go b/gopls/internal/analysis/modernize/testdata/src/testingcontext/testingcontext.go new file mode 100644 index 00000000000..8f29e6f6098 --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/testingcontext/testingcontext.go @@ -0,0 +1 @@ +package testingcontext diff --git a/gopls/internal/analysis/modernize/testdata/src/testingcontext/testingcontext_test.go b/gopls/internal/analysis/modernize/testdata/src/testingcontext/testingcontext_test.go new file mode 100644 index 00000000000..e4f2b6257ab --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/testingcontext/testingcontext_test.go @@ -0,0 +1,78 @@ +package testingcontext + +import ( + "context" + + "testing" +) + +func Test(t *testing.T) { + ctx, cancel := context.WithCancel(context.Background()) // want "context.WithCancel can be modernized using t.Context" + defer cancel() + _ = ctx + + func() { + ctx, cancel := context.WithCancel(context.Background()) // Nope. scope of defer is not the testing func. + defer cancel() + _ = ctx + }() + + { + ctx, cancel := context.WithCancel(context.TODO()) // want "context.WithCancel can be modernized using t.Context" + defer cancel() + _ = ctx + var t int // not in scope of the call to WithCancel + _ = t + } + + { + ctx := context.Background() + ctx, cancel := context.WithCancel(context.Background()) // Nope. ctx is redeclared. + defer cancel() + _ = ctx + } + + { + var t int + ctx, cancel := context.WithCancel(context.Background()) // Nope. t is shadowed. + defer cancel() + _ = ctx + _ = t + } + + t.Run("subtest", func(t2 *testing.T) { + ctx, cancel := context.WithCancel(context.Background()) // want "context.WithCancel can be modernized using t2.Context" + defer cancel() + _ = ctx + }) +} + +func TestAlt(t2 *testing.T) { + ctx, cancel := context.WithCancel(context.Background()) // want "context.WithCancel can be modernized using t2.Context" + defer cancel() + _ = ctx +} + +func Testnot(t *testing.T) { + ctx, cancel := context.WithCancel(context.Background()) // Nope. Not a test func. + defer cancel() + _ = ctx +} + +func Benchmark(b *testing.B) { + ctx, cancel := context.WithCancel(context.Background()) // want "context.WithCancel can be modernized using b.Context" + defer cancel() + _ = ctx + + b.Run("subtest", func(b2 *testing.B) { + ctx, cancel := context.WithCancel(context.Background()) // want "context.WithCancel can be modernized using b2.Context" + defer cancel() + _ = ctx + }) +} + +func Fuzz(f *testing.F) { + ctx, cancel := context.WithCancel(context.Background()) // want "context.WithCancel can be modernized using f.Context" + defer cancel() + _ = ctx +} diff --git a/gopls/internal/analysis/modernize/testdata/src/testingcontext/testingcontext_test.go.golden b/gopls/internal/analysis/modernize/testdata/src/testingcontext/testingcontext_test.go.golden new file mode 100644 index 00000000000..c1d6bf0fce4 --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/testingcontext/testingcontext_test.go.golden @@ -0,0 +1,71 @@ +package testingcontext + +import ( + "context" + + "testing" +) + +func Test(t *testing.T) { + ctx := t.Context() + _ = ctx + + func() { + ctx, cancel := context.WithCancel(context.Background()) // Nope. scope of defer is not the testing func. + defer cancel() + _ = ctx + }() + + { + ctx := t.Context() + _ = ctx + var t int // not in scope of the call to WithCancel + _ = t + } + + { + ctx := context.Background() + ctx, cancel := context.WithCancel(context.Background()) // Nope. ctx is redeclared. + defer cancel() + _ = ctx + } + + { + var t int + ctx, cancel := context.WithCancel(context.Background()) // Nope. t is shadowed. + defer cancel() + _ = ctx + _ = t + } + + t.Run("subtest", func(t2 *testing.T) { + ctx := t2.Context() + _ = ctx + }) +} + +func TestAlt(t2 *testing.T) { + ctx := t2.Context() + _ = ctx +} + +func Testnot(t *testing.T) { + ctx, cancel := context.WithCancel(context.Background()) // Nope. Not a test func. + defer cancel() + _ = ctx +} + +func Benchmark(b *testing.B) { + ctx := b.Context() + _ = ctx + + b.Run("subtest", func(b2 *testing.B) { + ctx := b2.Context() + _ = ctx + }) +} + +func Fuzz(f *testing.F) { + ctx := f.Context() + _ = ctx +} diff --git a/gopls/internal/analysis/modernize/testingcontext.go b/gopls/internal/analysis/modernize/testingcontext.go new file mode 100644 index 00000000000..9bdc11ccfca --- /dev/null +++ b/gopls/internal/analysis/modernize/testingcontext.go @@ -0,0 +1,253 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package modernize + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + "strings" + "unicode" + "unicode/utf8" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/astutil/cursor" + "golang.org/x/tools/internal/astutil/edge" +) + +// The testingContext pass replaces calls to context.WithCancel from within +// tests to a use of testing.{T,B,F}.Context(), added in Go 1.24. +// +// Specifically, the testingContext pass suggests to replace: +// +// ctx, cancel := context.WithCancel(context.Background()) // or context.TODO +// defer cancel() +// +// with: +// +// ctx := t.Context() +// +// provided: +// +// - ctx and cancel are declared by the assignment +// - the deferred call is the only use of cancel +// - the call is within a test or subtest function +// - the relevant testing.{T,B,F} is named and not shadowed at the call +func testingContext(pass *analysis.Pass) { + if !analysisinternal.Imports(pass.Pkg, "testing") { + return + } + + info := pass.TypesInfo + + // checkCall finds eligible calls to context.WithCancel to replace. + checkCall := func(cur cursor.Cursor) { + call := cur.Node().(*ast.CallExpr) + obj := typeutil.Callee(info, call) + if !analysisinternal.IsFunctionNamed(obj, "context", "WithCancel") { + return + } + + // Have: context.WithCancel(arg) + + arg, ok := call.Args[0].(*ast.CallExpr) + if !ok { + return + } + if obj := typeutil.Callee(info, arg); !analysisinternal.IsFunctionNamed(obj, "context", "Background", "TODO") { + return + } + + // Have: context.WithCancel(context.{Background,TODO}()) + + parent := cur.Parent() + assign, ok := parent.Node().(*ast.AssignStmt) + if !ok || assign.Tok != token.DEFINE { + return + } + + // Have: a, b := context.WithCancel(context.{Background,TODO}()) + + // Check that both a and b are declared, not redeclarations. + var lhs []types.Object + for _, expr := range assign.Lhs { + id, ok := expr.(*ast.Ident) + if !ok { + return + } + obj, ok := info.Defs[id] + if !ok { + return + } + lhs = append(lhs, obj) + } + + next, ok := parent.NextSibling() + if !ok { + return + } + defr, ok := next.Node().(*ast.DeferStmt) + if !ok { + return + } + if soleUse(info, lhs[1]) != defr.Call.Fun { + return + } + + // Have: + // a, b := context.WithCancel(context.{Background,TODO}()) + // defer b() + + // Check that we are in a test func. + var testObj types.Object // relevant testing.{T,B,F}, or nil + if curFunc, ok := enclosingFunc(cur); ok { + switch n := curFunc.Node().(type) { + case *ast.FuncLit: + if e, idx := curFunc.Edge(); e == edge.CallExpr_Args && idx == 1 { + // Have: call(..., func(...) { ...context.WithCancel(...)... }) + obj := typeutil.Callee(info, curFunc.Parent().Node().(*ast.CallExpr)) + if (analysisinternal.IsMethodNamed(obj, "testing", "T", "Run") || + analysisinternal.IsMethodNamed(obj, "testing", "B", "Run")) && + len(n.Type.Params.List[0].Names) == 1 { + + // Have tb.Run(..., func(..., tb *testing.[TB]) { ...context.WithCancel(...)... } + testObj = info.Defs[n.Type.Params.List[0].Names[0]] + } + } + + case *ast.FuncDecl: + testObj = isTestFn(info, n) + } + } + + if testObj != nil { + // Have a test function. Check that we can resolve the relevant + // testing.{T,B,F} at the current position. + if _, obj := lhs[0].Parent().LookupParent(testObj.Name(), lhs[0].Pos()); obj == testObj { + pass.Report(analysis.Diagnostic{ + Pos: call.Fun.Pos(), + End: call.Fun.End(), + Category: "testingcontext", + Message: fmt.Sprintf("context.WithCancel can be modernized using %s.Context", testObj.Name()), + SuggestedFixes: []analysis.SuggestedFix{{ + Message: fmt.Sprintf("Replace context.WithCancel with %s.Context", testObj.Name()), + TextEdits: []analysis.TextEdit{{ + Pos: assign.Pos(), + End: defr.End(), + NewText: fmt.Appendf(nil, "%s := %s.Context()", lhs[0].Name(), testObj.Name()), + }}, + }}, + }) + } + } + } + + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + for curFile := range filesUsing(inspect, info, "go1.24") { + for cur := range curFile.Preorder((*ast.CallExpr)(nil)) { + checkCall(cur) + } + } +} + +// soleUse returns the ident that refers to obj, if there is exactly one. +// +// TODO(rfindley): consider factoring to share with gopls/internal/refactor/inline. +func soleUse(info *types.Info, obj types.Object) (sole *ast.Ident) { + // This is not efficient, but it is called infrequently. + for id, obj2 := range info.Uses { + if obj2 == obj { + if sole != nil { + return nil // not unique + } + sole = id + } + } + return sole +} + +// isTestFn checks whether fn is a test function (TestX, BenchmarkX, FuzzX), +// returning the corresponding types.Object of the *testing.{T,B,F} argument. +// Returns nil if fn is a test function, but the testing.{T,B,F} argument is +// unnamed (or _). +// +// TODO(rfindley): consider handling the case of an unnamed argument, by adding +// an edit to give the argument a name. +// +// Adapted from go/analysis/passes/tests. +// TODO(rfindley): consider refactoring to share logic. +func isTestFn(info *types.Info, fn *ast.FuncDecl) types.Object { + // Want functions with 0 results and 1 parameter. + if fn.Type.Results != nil && len(fn.Type.Results.List) > 0 || + fn.Type.Params == nil || + len(fn.Type.Params.List) != 1 || + len(fn.Type.Params.List[0].Names) != 1 { + + return nil + } + + prefix := testKind(fn.Name.Name) + if prefix == "" { + return nil + } + + if tparams := fn.Type.TypeParams; tparams != nil && len(tparams.List) > 0 { + return nil // test functions must not be generic + } + + obj := info.Defs[fn.Type.Params.List[0].Names[0]] + if obj == nil { + return nil // e.g. _ *testing.T + } + + var name string + switch prefix { + case "Test": + name = "T" + case "Benchmark": + name = "B" + case "Fuzz": + name = "F" + } + + if !analysisinternal.IsPointerToNamed(obj.Type(), "testing", name) { + return nil + } + return obj +} + +// testKind returns "Test", "Benchmark", or "Fuzz" if name is a valid resp. +// test, benchmark, or fuzz function name. Otherwise, isTestName returns "". +// +// Adapted from go/analysis/passes/tests.isTestName. +func testKind(name string) string { + var prefix string + switch { + case strings.HasPrefix(name, "Test"): + prefix = "Test" + case strings.HasPrefix(name, "Benchmark"): + prefix = "Benchmark" + case strings.HasPrefix(name, "Fuzz"): + prefix = "Fuzz" + } + if prefix == "" { + return "" + } + suffix := name[len(prefix):] + if len(suffix) == 0 { + // "Test" is ok. + return prefix + } + r, _ := utf8.DecodeRuneInString(suffix) + if unicode.IsLower(r) { + return "" + } + return prefix +} diff --git a/gopls/internal/analysis/unusedparams/doc.go b/gopls/internal/analysis/unusedparams/doc.go index 07e43c0d084..16d318e86fa 100644 --- a/gopls/internal/analysis/unusedparams/doc.go +++ b/gopls/internal/analysis/unusedparams/doc.go @@ -31,4 +31,6 @@ // arguments at call sites, while taking care to preserve any side // effects in the argument expressions; see // https://github.com/golang/tools/releases/tag/gopls%2Fv0.14. +// +// This analyzer ignores generated code. package unusedparams diff --git a/gopls/internal/analysis/unusedparams/testdata/src/generatedcode/generatedcode.go b/gopls/internal/analysis/unusedparams/testdata/src/generatedcode/generatedcode.go new file mode 100644 index 00000000000..fdbe64d9e90 --- /dev/null +++ b/gopls/internal/analysis/unusedparams/testdata/src/generatedcode/generatedcode.go @@ -0,0 +1,15 @@ +// Code generated with somegen DO NOT EDIT. +// +// Because this file is generated, there should be no diagnostics +// reported for any unused parameters. + +package generatedcode + +// generatedInterface exists to ensure that the generated code +// is considered when determining whether parameters are used +// in non-generated code. +type generatedInterface interface{ n(f bool) } + +func a(x bool) { println() } + +var v = func(x bool) { println() } diff --git a/gopls/internal/analysis/unusedparams/testdata/src/generatedcode/generatedcode.go.golden b/gopls/internal/analysis/unusedparams/testdata/src/generatedcode/generatedcode.go.golden new file mode 100644 index 00000000000..fdbe64d9e90 --- /dev/null +++ b/gopls/internal/analysis/unusedparams/testdata/src/generatedcode/generatedcode.go.golden @@ -0,0 +1,15 @@ +// Code generated with somegen DO NOT EDIT. +// +// Because this file is generated, there should be no diagnostics +// reported for any unused parameters. + +package generatedcode + +// generatedInterface exists to ensure that the generated code +// is considered when determining whether parameters are used +// in non-generated code. +type generatedInterface interface{ n(f bool) } + +func a(x bool) { println() } + +var v = func(x bool) { println() } diff --git a/gopls/internal/analysis/unusedparams/testdata/src/generatedcode/nongeneratedcode.go b/gopls/internal/analysis/unusedparams/testdata/src/generatedcode/nongeneratedcode.go new file mode 100644 index 00000000000..fe0ef94afbb --- /dev/null +++ b/gopls/internal/analysis/unusedparams/testdata/src/generatedcode/nongeneratedcode.go @@ -0,0 +1,20 @@ +package generatedcode + +// This file does not have the generated code comment. +// It exists to ensure that generated code is considered +// when determining whether or not function parameters +// are used. + +type implementsGeneratedInterface struct{} + +// The f parameter should not be reported as unused, +// because this method implements the parent interface defined +// in the generated code. +func (implementsGeneratedInterface) n(f bool) { + // The body must not be empty, otherwise unusedparams will + // not report the unused parameter regardles of the + // interface. + println() +} + +func b(x bool) { println() } // want "unused parameter: x" diff --git a/gopls/internal/analysis/unusedparams/testdata/src/generatedcode/nongeneratedcode.go.golden b/gopls/internal/analysis/unusedparams/testdata/src/generatedcode/nongeneratedcode.go.golden new file mode 100644 index 00000000000..170dc85785c --- /dev/null +++ b/gopls/internal/analysis/unusedparams/testdata/src/generatedcode/nongeneratedcode.go.golden @@ -0,0 +1,20 @@ +package generatedcode + +// This file does not have the generated code comment. +// It exists to ensure that generated code is considered +// when determining whether or not function parameters +// are used. + +type implementsGeneratedInterface struct{} + +// The f parameter should not be reported as unused, +// because this method implements the parent interface defined +// in the generated code. +func (implementsGeneratedInterface) n(f bool) { + // The body must not be empty, otherwise unusedparams will + // not report the unused parameter regardles of the + // interface. + println() +} + +func b(_ bool) { println() } // want "unused parameter: x" diff --git a/gopls/internal/analysis/unusedparams/unusedparams.go b/gopls/internal/analysis/unusedparams/unusedparams.go index 2b74328021d..2986dfd6e41 100644 --- a/gopls/internal/analysis/unusedparams/unusedparams.go +++ b/gopls/internal/analysis/unusedparams/unusedparams.go @@ -16,6 +16,8 @@ import ( "golang.org/x/tools/gopls/internal/util/moreslices" "golang.org/x/tools/internal/analysisinternal" "golang.org/x/tools/internal/astutil/cursor" + "golang.org/x/tools/internal/astutil/edge" + "golang.org/x/tools/internal/typesinternal" ) //go:embed doc.go @@ -137,59 +139,72 @@ func run(pass *analysis.Pass) (any, error) { } } - // Check each non-address-taken function's parameters are all used. - filter := []ast.Node{ - (*ast.FuncDecl)(nil), - (*ast.FuncLit)(nil), - } - cursor.Root(inspect).Inspect(filter, func(c cursor.Cursor, push bool) bool { - // (We always return true so that we visit nested FuncLits.) - + // Inspect each file to see if it is generated. + // + // We do not want to report unused parameters in generated code itself, + // however we need to include generated code in the overall analysis as + // it may be calling functions in non-generated code. + files := []ast.Node{(*ast.File)(nil)} + cursor.Root(inspect).Inspect(files, func(c cursor.Cursor, push bool) bool { if !push { return true } - var ( - fn types.Object // function symbol (*Func, possibly *Var for a FuncLit) - ftype *ast.FuncType - body *ast.BlockStmt - ) - switch n := c.Node().(type) { - case *ast.FuncDecl: - // We can't analyze non-Go functions. - if n.Body == nil { - return true - } + isGenerated := ast.IsGenerated(c.Node().(*ast.File)) - // Ignore exported functions and methods: we - // must assume they may be address-taken in - // another package. - if n.Name.IsExported() { + // Descend into the file, check each non-address-taken function's parameters + // are all used. + funcs := []ast.Node{ + (*ast.FuncDecl)(nil), + (*ast.FuncLit)(nil), + } + c.Inspect(funcs, func(c cursor.Cursor, push bool) bool { + // (We always return true so that we visit nested FuncLits.) + if !push { return true } - // Ignore methods that match the name of any - // interface method declared in this package, - // as the method's signature may need to conform - // to the interface. - if n.Recv != nil && unexportedIMethodNames[n.Name.Name] { - return true - } + var ( + fn types.Object // function symbol (*Func, possibly *Var for a FuncLit) + ftype *ast.FuncType + body *ast.BlockStmt + ) + switch n := c.Node().(type) { + case *ast.FuncDecl: + // We can't analyze non-Go functions. + if n.Body == nil { + return true + } + + // Ignore exported functions and methods: we + // must assume they may be address-taken in + // another package. + if n.Name.IsExported() { + return true + } + + // Ignore methods that match the name of any + // interface method declared in this package, + // as the method's signature may need to conform + // to the interface. + if n.Recv != nil && unexportedIMethodNames[n.Name.Name] { + return true + } - fn = pass.TypesInfo.Defs[n.Name].(*types.Func) - ftype, body = n.Type, n.Body - - case *ast.FuncLit: - // Find the symbol for the variable (if any) - // to which the FuncLit is bound. - // (We don't bother to allow ParenExprs.) - switch parent := c.Parent().Node().(type) { - case *ast.AssignStmt: - // f = func() {...} - // f := func() {...} - for i, rhs := range parent.Rhs { - if rhs == n { - if id, ok := parent.Lhs[i].(*ast.Ident); ok { + fn = pass.TypesInfo.Defs[n.Name].(*types.Func) + ftype, body = n.Type, n.Body + + case *ast.FuncLit: + // Find the symbol for the variable (if any) + // to which the FuncLit is bound. + // (We don't bother to allow ParenExprs.) + switch parent := c.Parent().Node().(type) { + case *ast.AssignStmt: + // f = func() {...} + // f := func() {...} + if e, idx := c.Edge(); e == edge.AssignStmt_Rhs { + // Inv: n == AssignStmt.Rhs[idx] + if id, ok := parent.Lhs[idx].(*ast.Ident); ok { fn = pass.TypesInfo.ObjectOf(id) // Edge case: f = func() {...} @@ -200,109 +215,117 @@ func run(pass *analysis.Pass) (any, error) { if fn == nil && id.Name == "_" { // Edge case: _ = func() {...} - // has no var. Fake one. - fn = types.NewVar(id.Pos(), pass.Pkg, id.Name, pass.TypesInfo.TypeOf(n)) + // has no local var. Fake one. + v := types.NewVar(id.Pos(), pass.Pkg, id.Name, pass.TypesInfo.TypeOf(n)) + typesinternal.SetVarKind(v, typesinternal.LocalVar) + fn = v } } - break } - } - case *ast.ValueSpec: - // var f = func() { ... } - // (unless f is an exported package-level var) - for i, val := range parent.Values { - if val == n { - v := pass.TypesInfo.Defs[parent.Names[i]] - if !(v.Parent() == pass.Pkg.Scope() && v.Exported()) { - fn = v + case *ast.ValueSpec: + // var f = func() { ... } + // (unless f is an exported package-level var) + for i, val := range parent.Values { + if val == n { + v := pass.TypesInfo.Defs[parent.Names[i]] + if !(v.Parent() == pass.Pkg.Scope() && v.Exported()) { + fn = v + } + break } - break } } - } - ftype, body = n.Type, n.Body - } + ftype, body = n.Type, n.Body + } - // Ignore address-taken functions and methods: unused - // parameters may be needed to conform to a func type. - if fn == nil || len(usesOutsideCall[fn]) > 0 { - return true - } + // Ignore address-taken functions and methods: unused + // parameters may be needed to conform to a func type. + if fn == nil || len(usesOutsideCall[fn]) > 0 { + return true + } - // If there are no parameters, there are no unused parameters. - if ftype.Params.NumFields() == 0 { - return true - } + // If there are no parameters, there are no unused parameters. + if ftype.Params.NumFields() == 0 { + return true + } - // To reduce false positives, ignore functions with an - // empty or panic body. - // - // We choose not to ignore functions whose body is a - // single return statement (as earlier versions did) - // func f() { return } - // func f() { return g(...) } - // as we suspect that was just heuristic to reduce - // false positives in the earlier unsound algorithm. - switch len(body.List) { - case 0: - // Empty body. Although the parameter is - // unnecessary, it's pretty obvious to the - // reader that that's the case, so we allow it. - return true // func f() {} - case 1: - if stmt, ok := body.List[0].(*ast.ExprStmt); ok { - // We allow a panic body, as it is often a - // placeholder for a future implementation: - // func f() { panic(...) } - if call, ok := stmt.X.(*ast.CallExpr); ok { - if fun, ok := call.Fun.(*ast.Ident); ok && fun.Name == "panic" { - return true + // To reduce false positives, ignore functions with an + // empty or panic body. + // + // We choose not to ignore functions whose body is a + // single return statement (as earlier versions did) + // func f() { return } + // func f() { return g(...) } + // as we suspect that was just heuristic to reduce + // false positives in the earlier unsound algorithm. + switch len(body.List) { + case 0: + // Empty body. Although the parameter is + // unnecessary, it's pretty obvious to the + // reader that that's the case, so we allow it. + return true // func f() {} + case 1: + if stmt, ok := body.List[0].(*ast.ExprStmt); ok { + // We allow a panic body, as it is often a + // placeholder for a future implementation: + // func f() { panic(...) } + if call, ok := stmt.X.(*ast.CallExpr); ok { + if fun, ok := call.Fun.(*ast.Ident); ok && fun.Name == "panic" { + return true + } } } } - } - // Report each unused parameter. - for _, field := range ftype.Params.List { - for _, id := range field.Names { - if id.Name == "_" { - continue - } - param := pass.TypesInfo.Defs[id].(*types.Var) - if !usedVars[param] { - start, end := field.Pos(), field.End() - if len(field.Names) > 1 { - start, end = id.Pos(), id.End() + // Don't report diagnostics on generated files. + if isGenerated { + return true + } + + // Report each unused parameter. + for _, field := range ftype.Params.List { + for _, id := range field.Names { + if id.Name == "_" { + continue } - // This diagnostic carries both an edit-based fix to - // rename the unused parameter, and a command-based fix - // to remove it (see golang.RemoveUnusedParameter). - pass.Report(analysis.Diagnostic{ - Pos: start, - End: end, - Message: fmt.Sprintf("unused parameter: %s", id.Name), - Category: FixCategory, - SuggestedFixes: []analysis.SuggestedFix{ - { - Message: `Rename parameter to "_"`, - TextEdits: []analysis.TextEdit{{ - Pos: id.Pos(), - End: id.End(), - NewText: []byte("_"), - }}, - }, - { - Message: fmt.Sprintf("Remove unused parameter %q", id.Name), - // No TextEdits => computed by gopls command + param := pass.TypesInfo.Defs[id].(*types.Var) + if !usedVars[param] { + start, end := field.Pos(), field.End() + if len(field.Names) > 1 { + start, end = id.Pos(), id.End() + } + + // This diagnostic carries both an edit-based fix to + // rename the unused parameter, and a command-based fix + // to remove it (see golang.RemoveUnusedParameter). + pass.Report(analysis.Diagnostic{ + Pos: start, + End: end, + Message: fmt.Sprintf("unused parameter: %s", id.Name), + Category: FixCategory, + SuggestedFixes: []analysis.SuggestedFix{ + { + Message: `Rename parameter to "_"`, + TextEdits: []analysis.TextEdit{{ + Pos: id.Pos(), + End: id.End(), + NewText: []byte("_"), + }}, + }, + { + Message: fmt.Sprintf("Remove unused parameter %q", id.Name), + // No TextEdits => computed by gopls command + }, }, - }, - }) + }) + } } } - } + return true + }) return true }) return nil, nil diff --git a/gopls/internal/analysis/unusedparams/unusedparams_test.go b/gopls/internal/analysis/unusedparams/unusedparams_test.go index 1e2d8851b8b..e943c20d898 100644 --- a/gopls/internal/analysis/unusedparams/unusedparams_test.go +++ b/gopls/internal/analysis/unusedparams/unusedparams_test.go @@ -13,5 +13,5 @@ import ( func Test(t *testing.T) { testdata := analysistest.TestData() - analysistest.RunWithSuggestedFixes(t, testdata, unusedparams.Analyzer, "a", "typeparams") + analysistest.RunWithSuggestedFixes(t, testdata, unusedparams.Analyzer, "a", "generatedcode", "typeparams") } diff --git a/gopls/internal/analysis/unusedvariable/testdata/src/assign/a.go b/gopls/internal/analysis/unusedvariable/testdata/src/assign/a.go index 8421824b2d3..f53fd8cc091 100644 --- a/gopls/internal/analysis/unusedvariable/testdata/src/assign/a.go +++ b/gopls/internal/analysis/unusedvariable/testdata/src/assign/a.go @@ -65,6 +65,17 @@ func commentAbove() { v := "s" // want `declared (and|but) not used` } +func commentBelow() { + v := "s" // want `declared (and|but) not used` + // v is a variable +} + +func commentSpaceBelow() { + v := "s" // want `declared (and|but) not used` + + // v is a variable +} + func fBool() bool { return true } diff --git a/gopls/internal/analysis/unusedvariable/testdata/src/assign/a.go.golden b/gopls/internal/analysis/unusedvariable/testdata/src/assign/a.go.golden index 8f8d6128ea8..075d7c28b42 100644 --- a/gopls/internal/analysis/unusedvariable/testdata/src/assign/a.go.golden +++ b/gopls/internal/analysis/unusedvariable/testdata/src/assign/a.go.golden @@ -50,6 +50,14 @@ func commentAbove() { // v is a variable } +func commentBelow() { + // v is a variable +} + +func commentSpaceBelow() { + // v is a variable +} + func fBool() bool { return true } diff --git a/gopls/internal/analysis/unusedvariable/unusedvariable.go b/gopls/internal/analysis/unusedvariable/unusedvariable.go index 5e4dd52be7e..15bcd43d873 100644 --- a/gopls/internal/analysis/unusedvariable/unusedvariable.go +++ b/gopls/internal/analysis/unusedvariable/unusedvariable.go @@ -17,6 +17,7 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/gopls/internal/util/safetoken" ) const Doc = `check for unused variables and suggest fixes` @@ -37,7 +38,7 @@ var unusedVariableRegexp = []*regexp.Regexp{ regexp.MustCompile("^declared and not used: (.*)$"), // Go 1.23+ } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { for _, typeErr := range pass.TypeErrors { for _, re := range unusedVariableRegexp { match := re.FindStringSubmatch(typeErr.Msg) @@ -113,7 +114,7 @@ func runForError(pass *analysis.Pass, err types.Error, name string) error { continue } - fixes := removeVariableFromAssignment(path, stmt, ident) + fixes := removeVariableFromAssignment(pass.Fset, path, stmt, ident) // fixes may be nil if len(fixes) > 0 { diag.SuggestedFixes = fixes @@ -164,7 +165,7 @@ func removeVariableFromSpec(pass *analysis.Pass, path []ast.Node, stmt *ast.Valu // Find parent DeclStmt and delete it for _, node := range path { if declStmt, ok := node.(*ast.DeclStmt); ok { - edits := deleteStmtFromBlock(path, declStmt) + edits := deleteStmtFromBlock(pass.Fset, path, declStmt) if len(edits) == 0 { return nil // can this happen? } @@ -198,7 +199,7 @@ func removeVariableFromSpec(pass *analysis.Pass, path []ast.Node, stmt *ast.Valu } } -func removeVariableFromAssignment(path []ast.Node, stmt *ast.AssignStmt, ident *ast.Ident) []analysis.SuggestedFix { +func removeVariableFromAssignment(fset *token.FileSet, path []ast.Node, stmt *ast.AssignStmt, ident *ast.Ident) []analysis.SuggestedFix { // The only variable in the assignment is unused if len(stmt.Lhs) == 1 { // If LHS has only one expression to be valid it has to have 1 expression @@ -221,7 +222,7 @@ func removeVariableFromAssignment(path []ast.Node, stmt *ast.AssignStmt, ident * } // RHS does not have any side effects, delete the whole statement - edits := deleteStmtFromBlock(path, stmt) + edits := deleteStmtFromBlock(fset, path, stmt) if len(edits) == 0 { return nil // can this happen? } @@ -252,7 +253,7 @@ func suggestedFixMessage(name string) string { return fmt.Sprintf("Remove variable %s", name) } -func deleteStmtFromBlock(path []ast.Node, stmt ast.Stmt) []analysis.TextEdit { +func deleteStmtFromBlock(fset *token.FileSet, path []ast.Node, stmt ast.Stmt) []analysis.TextEdit { // Find innermost enclosing BlockStmt. var block *ast.BlockStmt for i := range path { @@ -282,6 +283,31 @@ func deleteStmtFromBlock(path []ast.Node, stmt ast.Stmt) []analysis.TextEdit { end = block.List[nodeIndex+1].Pos() } + // Account for comments within the block containing the statement + // TODO(adonovan): when golang/go#20744 is addressed, query the AST + // directly for comments between stmt.End() and end. For now we + // must scan the entire file's comments (though we could binary search). + astFile := path[len(path)-1].(*ast.File) + currFile := fset.File(end) + stmtEndLine := safetoken.Line(currFile, stmt.End()) +outer: + for _, cg := range astFile.Comments { + for _, co := range cg.List { + if stmt.End() <= co.Pos() && co.Pos() <= end { + coLine := safetoken.Line(currFile, co.Pos()) + // If a comment exists within the current block, after the unused variable statement, + // and before the next statement, we shouldn't delete it. + if coLine > stmtEndLine { + end = co.Pos() + break outer + } + if co.Pos() > end { + break outer + } + } + } + } + return []analysis.TextEdit{ { Pos: stmt.Pos(), diff --git a/gopls/internal/cache/analysis.go b/gopls/internal/cache/analysis.go index 4c5abbc23ce..d570c0a46ae 100644 --- a/gopls/internal/cache/analysis.go +++ b/gopls/internal/cache/analysis.go @@ -1131,6 +1131,11 @@ func (act *action) exec(ctx context.Context) (any, *actionSummary, error) { TypeErrors: apkg.typeErrors, ResultOf: inputs, Report: func(d analysis.Diagnostic) { + // Assert that SuggestedFixes are well formed. + if err := analysisinternal.ValidateFixes(apkg.pkg.FileSet(), analyzer, d.SuggestedFixes); err != nil { + bug.Reportf("invalid SuggestedFixes: %v", err) + d.SuggestedFixes = nil + } diagnostic, err := toGobDiagnostic(posToLocation, analyzer, d) if err != nil { // Don't bug.Report here: these errors all originate in diff --git a/gopls/internal/cache/check.go b/gopls/internal/cache/check.go index 068fa70b4ed..d094c535d7a 100644 --- a/gopls/internal/cache/check.go +++ b/gopls/internal/cache/check.go @@ -492,13 +492,69 @@ func (b *typeCheckBatch) importPackage(ctx context.Context, mp *metadata.Package return bug.Errorf("internal error: package name is %q, want %q (id=%q, path=%q) (see issue #60904) (using GOPACKAGESDRIVER)", pkg.Name(), item.Name, id, item.Path) } else { - return bug.Errorf("internal error: package name is %q, want %q (id=%q, path=%q) (see issue #60904)", - pkg.Name(), item.Name, id, item.Path) - + // There's a package in the export data with the same path as the + // imported package, but a different name. + // + // This is observed to occur (very frequently!) in telemetry, yet + // we don't yet have a plausible explanation: any self import or + // circular import should have resulted in a broken import, which + // can't be referenced by export data. (Any type qualified by the + // broken import name will be invalid.) + // + // However, there are some mechanisms that could potentially be + // involved: + // 1. go/types will synthesize package names based on the import + // path for fake packages (but as mentioned above, I don't think + // these can be referenced by export data.) + // 2. Test variants have the same path as non-test variant. Could + // that somehow be involved? (I don't see how, particularly using + // the go list driver, but nevertheless it's worth considering.) + // 3. Command-line arguments and main packages may have special + // handling that we don't fully understand. + // Try to sort these potential causes into unique stacks, as well + // as a few other pathological scenarios. + report := func() error { + return bug.Errorf("internal error: package name is %q, want %q (id=%q, path=%q) (see issue #60904)", + pkg.Name(), item.Name, id, item.Path) + } + impliedName := "" + if i := strings.LastIndex(item.Path, "/"); i >= 0 { + impliedName = item.Path[i+1:] + } + switch { + case pkg.Name() == "": + return report() + case item.Name == "": + return report() + case metadata.IsCommandLineArguments(mp.ID): + return report() + case mp.ForTest != "": + return report() + case len(mp.CompiledGoFiles) == 0: + return report() + case len(mp.Errors) > 0: + return report() + case impliedName != "" && impliedName != string(mp.Name): + return report() + case len(mp.CompiledGoFiles) != len(mp.GoFiles): + return report() + case mp.Module == nil: + return report() + case mp.Name == "main": + return report() + default: + return report() + } } } } else { - id = importLookup(PackagePath(item.Path)) + var alt PackageID + id, alt = importLookup(PackagePath(item.Path)) + if alt != "" { + // Any bug leading to this scenario would have already been reported + // in importLookup. + return fmt.Errorf("inconsistent metadata during import: for package path %q, found both IDs %q and %q", item.Path, id, alt) + } var err error pkg, err = b.getImportPackage(ctx, id) if err != nil { @@ -615,8 +671,12 @@ func (b *typeCheckBatch) checkPackageForImport(ctx context.Context, ph *packageH // a given package path, based on the forward transitive closure of the initial // package (id). // +// If the second result is non-empty, it is another ID discovered in the import +// graph for the same package path. This means the import graph is +// incoherent--see #63822 and the long comment below. +// // The resulting function is not concurrency safe. -func importLookup(mp *metadata.Package, source metadata.Source) func(PackagePath) PackageID { +func importLookup(mp *metadata.Package, source metadata.Source) func(PackagePath) (id, altID PackageID) { assert(mp != nil, "nil metadata") // This function implements an incremental depth first scan through the @@ -630,6 +690,10 @@ func importLookup(mp *metadata.Package, source metadata.Source) func(PackagePath mp.PkgPath: mp.ID, } + // altIDs records alternative IDs for the given path, to report inconsistent + // metadata. + var altIDs map[PackagePath]PackageID + // pending is a FIFO queue of package metadata that has yet to have its // dependencies fully scanned. // Invariant: all entries in pending are already mapped in impMap. @@ -645,13 +709,82 @@ func importLookup(mp *metadata.Package, source metadata.Source) func(PackagePath if prevID, ok := impMap[depPath]; ok { // debugging #63822 if prevID != depID { + if altIDs == nil { + altIDs = make(map[PackagePath]PackageID) + } + if _, ok := altIDs[depPath]; !ok { + altIDs[depPath] = depID + } prev := source.Metadata(prevID) curr := source.Metadata(depID) switch { case prev == nil || curr == nil: bug.Reportf("inconsistent view of dependencies (missing dep)") case prev.ForTest != curr.ForTest: - bug.Reportf("inconsistent view of dependencies (mismatching ForTest)") + // This case is unfortunately understood to be possible. + // + // To explain this, consider a package a_test testing the package + // a, and for brevity denote by b' the intermediate test variant of + // the package b, which is created for the import graph of a_test, + // if b imports a. + // + // Now imagine that we have the following import graph, where + // higher packages import lower ones. + // + // a_test + // / \ + // b' c + // / \ / + // a d + // + // In this graph, there is one intermediate test variant b', + // because b imports a and so b' must hold the test variant import. + // + // Now, imagine that an on-disk change (perhaps due to a branch + // switch) affects the above import graph such that d imports a. + // + // a_test + // / \ + // b' c* + // / \ / + // / d* + // a---/ + // + // In this case, c and d should really be intermediate test + // variants, because they reach a. However, suppose that gopls does + // not know this yet (as indicated by '*'). + // + // Now suppose that the metadata of package c is invalidated, for + // example due to a change in an unrelated import or an added file. + // This will invalidate the metadata of c and a_test (but NOT b), + // and now gopls observes this graph: + // + // a_test + // / \ + // b' c' + // /| | + // / d d' + // a-----/ + // + // That is: a_test now sees c', which sees d', but since b was not + // invalidated, gopls still thinks that b' imports d (not d')! + // + // The problem, of course, is that gopls never observed the change + // to d, which would have invalidated b. This may be due to racing + // file watching events, in which case the problem should + // self-correct when gopls sees the change to d, or it may be due + // to d being outside the coverage of gopls' file watching glob + // patterns, or it may be due to buggy or entirely absent + // client-side file watching. + // + // TODO(rfindley): fix this, one way or another. It would be hard + // or impossible to repair gopls' state here, during type checking. + // However, we could perhaps reload metadata in Snapshot.load until + // we achieve a consistent state, or better, until the loaded state + // is consistent with our view of the filesystem, by making the Go + // command report digests of the files it reads. Both of those are + // tricker than they may seem, and have significant performance + // implications. default: bug.Reportf("inconsistent view of dependencies") } @@ -673,16 +806,16 @@ func importLookup(mp *metadata.Package, source metadata.Source) func(PackagePath return id, found } - return func(pkgPath PackagePath) PackageID { + return func(pkgPath PackagePath) (id, altID PackageID) { if id, ok := impMap[pkgPath]; ok { - return id + return id, altIDs[pkgPath] } for len(pending) > 0 { if id, found := search(pkgPath); found { - return id + return id, altIDs[pkgPath] } } - return "" + return "", "" } } @@ -1290,6 +1423,7 @@ func (s *Snapshot) typerefData(ctx context.Context, id PackageID, imports map[Im return data, nil } else if err != filecache.ErrNotFound { bug.Reportf("internal error reading typerefs data: %v", err) + // Unexpected error: treat as cache miss, and fall through. } pgfs, err := s.view.parseCache.parseFiles(ctx, token.NewFileSet(), parsego.Full&^parser.ParseComments, true, cgfs...) diff --git a/gopls/internal/cache/diagnostics.go b/gopls/internal/cache/diagnostics.go index 68c1632594f..d43c2f395dd 100644 --- a/gopls/internal/cache/diagnostics.go +++ b/gopls/internal/cache/diagnostics.go @@ -31,7 +31,7 @@ type InitializationError struct { func byURI(d *Diagnostic) protocol.DocumentURI { return d.URI } // For use in maps.Group. -// An Diagnostic corresponds to an LSP Diagnostic. +// A Diagnostic corresponds to an LSP Diagnostic. // https://microsoft.github.io/language-server-protocol/specification#diagnostic // // It is (effectively) gob-serializable; see {encode,decode}Diagnostics. diff --git a/gopls/internal/cache/errors.go b/gopls/internal/cache/errors.go index 26747a63d33..39eb8387702 100644 --- a/gopls/internal/cache/errors.go +++ b/gopls/internal/cache/errors.go @@ -270,15 +270,10 @@ func toSourceDiagnostic(srcAnalyzer *settings.Analyzer, gobDiag *gobDiagnostic) related = append(related, protocol.DiagnosticRelatedInformation(gobRelated)) } - severity := srcAnalyzer.Severity() - if severity == 0 { - severity = protocol.SeverityWarning - } - diag := &Diagnostic{ URI: gobDiag.Location.URI, Range: gobDiag.Location.Range, - Severity: severity, + Severity: srcAnalyzer.Severity(), Code: gobDiag.Code, CodeHref: gobDiag.CodeHref, Source: DiagnosticSource(gobDiag.Source), @@ -458,14 +453,14 @@ func parseGoListImportCycleError(ctx context.Context, e packages.Error, mp *meta // Search file imports for the import that is causing the import cycle. for _, imp := range pgf.File.Imports { if imp.Path.Value == circImp { - rng, err := pgf.NodeMappedRange(imp) + rng, err := pgf.NodeRange(imp) if err != nil { return nil, nil } return &Diagnostic{ URI: pgf.URI, - Range: rng.Range(), + Range: rng, Severity: protocol.SeverityError, Source: ListError, Message: msg, diff --git a/gopls/internal/cache/filemap.go b/gopls/internal/cache/filemap.go index c826141ed98..1f1fd947d71 100644 --- a/gopls/internal/cache/filemap.go +++ b/gopls/internal/cache/filemap.go @@ -5,6 +5,7 @@ package cache import ( + "iter" "path/filepath" "golang.org/x/tools/gopls/internal/file" @@ -77,9 +78,9 @@ func (m *fileMap) get(key protocol.DocumentURI) (file.Handle, bool) { return m.files.Get(key) } -// foreach calls f for each (uri, fh) in the map. -func (m *fileMap) foreach(f func(uri protocol.DocumentURI, fh file.Handle)) { - m.files.Range(f) +// all returns the sequence of (uri, fh) entries in the map. +func (m *fileMap) all() iter.Seq2[protocol.DocumentURI, file.Handle] { + return m.files.All() } // set stores the given file handle for key, updating overlays and directories @@ -130,9 +131,9 @@ func (m *fileMap) delete(key protocol.DocumentURI) { // getOverlays returns a new unordered array of overlay files. func (m *fileMap) getOverlays() []*overlay { var overlays []*overlay - m.overlays.Range(func(_ protocol.DocumentURI, o *overlay) { + for _, o := range m.overlays.All() { overlays = append(overlays, o) - }) + } return overlays } @@ -143,9 +144,9 @@ func (m *fileMap) getOverlays() []*overlay { func (m *fileMap) getDirs() *persistent.Set[string] { if m.dirs == nil { m.dirs = new(persistent.Set[string]) - m.files.Range(func(u protocol.DocumentURI, _ file.Handle) { - m.addDirs(u) - }) + for uri := range m.files.All() { + m.addDirs(uri) + } } return m.dirs } diff --git a/gopls/internal/cache/filemap_test.go b/gopls/internal/cache/filemap_test.go index 13f2c1a9ccd..24b3a19d108 100644 --- a/gopls/internal/cache/filemap_test.go +++ b/gopls/internal/cache/filemap_test.go @@ -83,9 +83,9 @@ func TestFileMap(t *testing.T) { } var gotFiles []string - m.foreach(func(uri protocol.DocumentURI, _ file.Handle) { + for uri := range m.all() { gotFiles = append(gotFiles, normalize(uri.Path())) - }) + } sort.Strings(gotFiles) if diff := cmp.Diff(test.wantFiles, gotFiles); diff != "" { t.Errorf("Files mismatch (-want +got):\n%s", diff) @@ -100,9 +100,9 @@ func TestFileMap(t *testing.T) { } var gotDirs []string - m.getDirs().Range(func(dir string) { + for dir := range m.getDirs().All() { gotDirs = append(gotDirs, normalize(dir)) - }) + } sort.Strings(gotDirs) if diff := cmp.Diff(test.wantDirs, gotDirs); diff != "" { t.Errorf("Dirs mismatch (-want +got):\n%s", diff) diff --git a/gopls/internal/cache/imports.go b/gopls/internal/cache/imports.go index c467a851f8f..31a1b9d42a5 100644 --- a/gopls/internal/cache/imports.go +++ b/gopls/internal/cache/imports.go @@ -8,6 +8,7 @@ import ( "context" "fmt" "sync" + "testing" "time" "golang.org/x/tools/gopls/internal/file" @@ -15,6 +16,7 @@ import ( "golang.org/x/tools/internal/event" "golang.org/x/tools/internal/event/keys" "golang.org/x/tools/internal/imports" + "golang.org/x/tools/internal/modindex" ) // refreshTimer implements delayed asynchronous refreshing of state. @@ -59,11 +61,8 @@ func (t *refreshTimer) schedule() { if t.timer == nil { // Don't refresh more than twice per minute. - delay := 30 * time.Second // Don't spend more than ~2% of the time refreshing. - if adaptive := 50 * t.duration; adaptive > delay { - delay = adaptive - } + delay := max(30*time.Second, 50*t.duration) t.timer = time.AfterFunc(delay, func() { start := time.Now() t.mu.Lock() @@ -149,6 +148,71 @@ func newImportsState(backgroundCtx context.Context, modCache *sharedModCache, en return s } +// modcacheState holds a modindex.Index and controls its updates +type modcacheState struct { + dir string // GOMODCACHE + refreshTimer *refreshTimer + mu sync.Mutex + index *modindex.Index +} + +// newModcacheState constructs a new modcacheState for goimports. +// The returned state is automatically updated until [modcacheState.stopTimer] is called. +func newModcacheState(dir string) *modcacheState { + s := &modcacheState{ + dir: dir, + } + s.index, _ = modindex.ReadIndex(dir) + s.refreshTimer = newRefreshTimer(s.refreshIndex) + go s.refreshIndex() + return s +} + +func (s *modcacheState) GetIndex() (*modindex.Index, error) { + s.mu.Lock() + defer s.mu.Unlock() + ix := s.index + if ix == nil || len(ix.Entries) == 0 { + var err error + // this should only happen near the beginning of a session + // (or in tests) + ix, err = modindex.ReadIndex(s.dir) + if err != nil { + return nil, fmt.Errorf("ReadIndex %w", err) + } + if !testing.Testing() { + return ix, nil + } + if ix == nil || len(ix.Entries) == 0 { + err = modindex.Create(s.dir) + if err != nil { + return nil, fmt.Errorf("creating index %w", err) + } + ix, err = modindex.ReadIndex(s.dir) + if err != nil { + return nil, fmt.Errorf("read index after create %w", err) + } + s.index = ix + } + } + return s.index, nil +} + +func (s *modcacheState) refreshIndex() { + ok, err := modindex.Update(s.dir) + if err != nil || !ok { + return + } + // read the new index + s.mu.Lock() + defer s.mu.Unlock() + s.index, _ = modindex.ReadIndex(s.dir) +} + +func (s *modcacheState) stopTimer() { + s.refreshTimer.stop() +} + // stopTimer stops scheduled refreshes of this imports state. func (s *importsState) stopTimer() { s.refreshTimer.stop() diff --git a/gopls/internal/cache/load.go b/gopls/internal/cache/load.go index 873cef56a2b..140cbc45490 100644 --- a/gopls/internal/cache/load.go +++ b/gopls/internal/cache/load.go @@ -262,11 +262,11 @@ func (s *Snapshot) load(ctx context.Context, allowNetwork AllowNetwork, scopes . s.mu.Lock() // Assert the invariant s.packages.Get(id).m == s.meta.metadata[id]. - s.packages.Range(func(id PackageID, ph *packageHandle) { + for id, ph := range s.packages.All() { if s.meta.Packages[id] != ph.mp { panic("inconsistent metadata") } - }) + } // Compute the minimal metadata updates (for Clone) // required to preserve the above invariant. diff --git a/gopls/internal/cache/parsego/file.go b/gopls/internal/cache/parsego/file.go index ea8db19b4ff..41fd1937ec1 100644 --- a/gopls/internal/cache/parsego/file.go +++ b/gopls/internal/cache/parsego/file.go @@ -76,12 +76,6 @@ func (pgf *File) PosRange(start, end token.Pos) (protocol.Range, error) { return pgf.Mapper.PosRange(pgf.Tok, start, end) } -// PosMappedRange returns a MappedRange for the token.Pos interval in this file. -// A MappedRange can be converted to any other form. -func (pgf *File) PosMappedRange(start, end token.Pos) (protocol.MappedRange, error) { - return pgf.Mapper.PosMappedRange(pgf.Tok, start, end) -} - // PosLocation returns a protocol Location for the token.Pos interval in this file. func (pgf *File) PosLocation(start, end token.Pos) (protocol.Location, error) { return pgf.Mapper.PosLocation(pgf.Tok, start, end) @@ -97,12 +91,6 @@ func (pgf *File) NodeOffsets(node ast.Node) (start int, end int, _ error) { return safetoken.Offsets(pgf.Tok, node.Pos(), node.End()) } -// NodeMappedRange returns a MappedRange for the ast.Node interval in this file. -// A MappedRange can be converted to any other form. -func (pgf *File) NodeMappedRange(node ast.Node) (protocol.MappedRange, error) { - return pgf.Mapper.NodeMappedRange(pgf.Tok, node) -} - // NodeLocation returns a protocol Location for the ast.Node interval in this file. func (pgf *File) NodeLocation(node ast.Node) (protocol.Location, error) { return pgf.Mapper.PosLocation(pgf.Tok, node.Pos(), node.End()) diff --git a/gopls/internal/cache/session.go b/gopls/internal/cache/session.go index a6f4118e23e..a7fb618f679 100644 --- a/gopls/internal/cache/session.go +++ b/gopls/internal/cache/session.go @@ -8,10 +8,10 @@ import ( "context" "errors" "fmt" + "maps" "os" "path/filepath" "slices" - "sort" "strconv" "strings" "sync" @@ -23,6 +23,7 @@ import ( "golang.org/x/tools/gopls/internal/file" "golang.org/x/tools/gopls/internal/label" "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/settings" "golang.org/x/tools/gopls/internal/util/bug" "golang.org/x/tools/gopls/internal/util/persistent" "golang.org/x/tools/gopls/internal/vulncheck" @@ -218,7 +219,7 @@ func (s *Session) createView(ctx context.Context, def *viewDefinition) (*View, * ModCache: s.cache.modCache.dirCache(def.folder.Env.GOMODCACHE), } if def.folder.Options.VerboseOutput { - pe.Logf = func(format string, args ...interface{}) { + pe.Logf = func(format string, args ...any) { event.Log(ctx, fmt.Sprintf(format, args...)) } } @@ -237,6 +238,9 @@ func (s *Session) createView(ctx context.Context, def *viewDefinition) (*View, * viewDefinition: def, importsState: newImportsState(backgroundCtx, s.cache.modCache, pe), } + if def.folder.Options.ImportsSource != settings.ImportsSourceOff { + v.modcacheState = newModcacheState(def.folder.Env.GOMODCACHE) + } s.snapshotWG.Add(1) v.snapshot = &Snapshot{ @@ -772,6 +776,25 @@ func (s *Session) DidModifyFiles(ctx context.Context, modifications []file.Modif // changed on disk. checkViews := false + // Hack: collect folders from existing views. + // TODO(golang/go#57979): we really should track folders independent of + // Views, but since we always have a default View for each folder, this + // works for now. + var folders []*Folder // preserve folder order + workspaceFileGlobsSet := make(map[string]bool) + seen := make(map[*Folder]unit) + for _, v := range s.views { + if _, ok := seen[v.folder]; ok { + continue + } + seen[v.folder] = unit{} + folders = append(folders, v.folder) + for _, glob := range v.folder.Options.WorkspaceFiles { + workspaceFileGlobsSet[glob] = true + } + } + workspaceFileGlobs := slices.Collect(maps.Keys(workspaceFileGlobsSet)) + changed := make(map[protocol.DocumentURI]file.Handle) for _, c := range modifications { fh := mustReadFile(ctx, s, c.URI) @@ -787,7 +810,7 @@ func (s *Session) DidModifyFiles(ctx context.Context, modifications []file.Modif // TODO(rfindley): go.work files need not be named "go.work" -- we need to // check each view's source to handle the case of an explicit GOWORK value. // Write a test that fails, and fix this. - if (isGoWork(c.URI) || isGoMod(c.URI)) && (c.Action == file.Save || c.OnDisk) { + if (isGoWork(c.URI) || isGoMod(c.URI) || isWorkspaceFile(c.URI, workspaceFileGlobs)) && (c.Action == file.Save || c.OnDisk) { checkViews = true } @@ -814,28 +837,12 @@ func (s *Session) DidModifyFiles(ctx context.Context, modifications []file.Modif } if checkViews { - // Hack: collect folders from existing views. - // TODO(golang/go#57979): we really should track folders independent of - // Views, but since we always have a default View for each folder, this - // works for now. - var folders []*Folder // preserve folder order - seen := make(map[*Folder]unit) - for _, v := range s.views { - if _, ok := seen[v.folder]; ok { - continue - } - seen[v.folder] = unit{} - folders = append(folders, v.folder) - } - var openFiles []protocol.DocumentURI for _, o := range s.Overlays() { openFiles = append(openFiles, o.URI()) } // Sort for determinism. - sort.Slice(openFiles, func(i, j int) bool { - return openFiles[i] < openFiles[j] - }) + slices.Sort(openFiles) // TODO(rfindley): can we avoid running the go command (go env) // synchronously to change processing? Can we assume that the env did not @@ -1084,11 +1091,12 @@ func (b brokenFile) Content() ([]byte, error) { return nil, b.err } // // This set includes // 1. all go.mod and go.work files in the workspace; and -// 2. for each Snapshot, its modules (or directory for ad-hoc views). In +// 2. all files defined by the WorkspaceFiles option in BuildOptions (to support custom GOPACKAGESDRIVERS); and +// 3. for each Snapshot, its modules (or directory for ad-hoc views). In // module mode, this is the set of active modules (and for VS Code, all // workspace directories within them, due to golang/go#42348). // -// The watch for workspace go.work and go.mod files in (1) is sufficient to +// The watch for workspace files in (1) is sufficient to // capture changes to the repo structure that may affect the set of views. // Whenever this set changes, we reload the workspace and invalidate memoized // files. @@ -1124,9 +1132,7 @@ func (s *Session) FileWatchingGlobPatterns(ctx context.Context) map[protocol.Rel if err != nil { continue // view is shut down; continue with others } - for k, v := range snapshot.fileWatchingGlobPatterns() { - patterns[k] = v - } + maps.Copy(patterns, snapshot.fileWatchingGlobPatterns()) release() } return patterns diff --git a/gopls/internal/cache/snapshot.go b/gopls/internal/cache/snapshot.go index de4a52ff6cb..c341ac6e85a 100644 --- a/gopls/internal/cache/snapshot.go +++ b/gopls/internal/cache/snapshot.go @@ -183,9 +183,9 @@ type Snapshot struct { // vulns maps each go.mod file's URI to its known vulnerabilities. vulns *persistent.Map[protocol.DocumentURI, *vulncheck.Result] - // compilerOptDetails describes the packages for which we want - // compiler optimization details to be included in the diagnostics. - compilerOptDetails map[metadata.PackageID]unit + // compilerOptDetails is the set of directories whose packages + // and tests need compiler optimization details in the diagnostics. + compilerOptDetails map[protocol.DocumentURI]unit // Concurrent type checking: // typeCheckMu guards the ongoing type checking batch, and reference count of @@ -198,7 +198,7 @@ type Snapshot struct { var _ memoize.RefCounted = (*Snapshot)(nil) // snapshots are reference-counted -func (s *Snapshot) awaitPromise(ctx context.Context, p *memoize.Promise) (interface{}, error) { +func (s *Snapshot) awaitPromise(ctx context.Context, p *memoize.Promise) (any, error) { return p.Get(ctx, s) } @@ -344,11 +344,11 @@ func (s *Snapshot) Templates() map[protocol.DocumentURI]file.Handle { defer s.mu.Unlock() tmpls := map[protocol.DocumentURI]file.Handle{} - s.files.foreach(func(k protocol.DocumentURI, fh file.Handle) { + for k, fh := range s.files.all() { if s.FileKind(fh) == file.Tmpl { tmpls[k] = fh } - }) + } return tmpls } @@ -799,6 +799,10 @@ func (s *Snapshot) fileWatchingGlobPatterns() map[protocol.RelativePattern]unit patterns[workPattern] = unit{} } + for _, glob := range s.Options().WorkspaceFiles { + patterns[protocol.RelativePattern{Pattern: glob}] = unit{} + } + extensions := "go,mod,sum,work" for _, ext := range s.Options().TemplateExtensions { extensions += "," + ext @@ -864,13 +868,13 @@ func (s *Snapshot) addKnownSubdirs(patterns map[protocol.RelativePattern]unit, w s.mu.Lock() defer s.mu.Unlock() - s.files.getDirs().Range(func(dir string) { + for dir := range s.files.getDirs().All() { for _, wsDir := range wsDirs { if pathutil.InDir(wsDir, dir) { patterns[protocol.RelativePattern{Pattern: filepath.ToSlash(dir)}] = unit{} } } - }) + } } // watchSubdirs reports whether gopls should request separate file watchers for @@ -912,11 +916,11 @@ func (s *Snapshot) filesInDir(uri protocol.DocumentURI) []protocol.DocumentURI { return nil } var files []protocol.DocumentURI - s.files.foreach(func(uri protocol.DocumentURI, _ file.Handle) { + for uri := range s.files.all() { if pathutil.InDir(dir, uri.Path()) { files = append(files, uri) } - }) + } return files } @@ -1029,13 +1033,11 @@ func (s *Snapshot) clearShouldLoad(scopes ...loadScope) { case packageLoadScope: scopePath := PackagePath(scope) var toDelete []PackageID - s.shouldLoad.Range(func(id PackageID, pkgPaths []PackagePath) { - for _, pkgPath := range pkgPaths { - if pkgPath == scopePath { - toDelete = append(toDelete, id) - } + for id, pkgPaths := range s.shouldLoad.All() { + if slices.Contains(pkgPaths, scopePath) { + toDelete = append(toDelete, id) } - }) + } for _, id := range toDelete { s.shouldLoad.Delete(id) } @@ -1183,7 +1185,7 @@ func (s *Snapshot) reloadWorkspace(ctx context.Context) { var scopes []loadScope var seen map[PackagePath]bool s.mu.Lock() - s.shouldLoad.Range(func(_ PackageID, pkgPaths []PackagePath) { + for _, pkgPaths := range s.shouldLoad.All() { for _, pkgPath := range pkgPaths { if seen == nil { seen = make(map[PackagePath]bool) @@ -1194,7 +1196,7 @@ func (s *Snapshot) reloadWorkspace(ctx context.Context) { seen[pkgPath] = true scopes = append(scopes, packageLoadScope(pkgPath)) } - }) + } s.mu.Unlock() if len(scopes) == 0 { @@ -1525,15 +1527,15 @@ func (s *Snapshot) clone(ctx, bgCtx context.Context, changed StateChange, done f // Compute the new set of packages for which we want compiler // optimization details, after applying changed.CompilerOptDetails. if len(s.compilerOptDetails) > 0 || len(changed.CompilerOptDetails) > 0 { - newCompilerOptDetails := make(map[metadata.PackageID]unit) - for id := range s.compilerOptDetails { - if _, ok := changed.CompilerOptDetails[id]; !ok { - newCompilerOptDetails[id] = unit{} // no change + newCompilerOptDetails := make(map[protocol.DocumentURI]unit) + for dir := range s.compilerOptDetails { + if _, ok := changed.CompilerOptDetails[dir]; !ok { + newCompilerOptDetails[dir] = unit{} // no change } } - for id, want := range changed.CompilerOptDetails { + for dir, want := range changed.CompilerOptDetails { if want { - newCompilerOptDetails[id] = unit{} + newCompilerOptDetails[dir] = unit{} } } if len(newCompilerOptDetails) > 0 { @@ -1542,24 +1544,31 @@ func (s *Snapshot) clone(ctx, bgCtx context.Context, changed StateChange, done f } reinit := false - - // Changes to vendor tree may require reinitialization, - // either because of an initialization error - // (e.g. "inconsistent vendoring detected"), or because - // one or more modules may have moved into or out of the - // vendor tree after 'go mod vendor' or 'rm -fr vendor/'. - // - // In this case, we consider the actual modification to see if was a creation - // or deletion. - // - // TODO(rfindley): revisit the location of this check. for _, mod := range changed.Modifications { + // Changes to vendor tree may require reinitialization, + // either because of an initialization error + // (e.g. "inconsistent vendoring detected"), or because + // one or more modules may have moved into or out of the + // vendor tree after 'go mod vendor' or 'rm -fr vendor/'. + // + // In this case, we consider the actual modification to see if was a creation + // or deletion. + // + // TODO(rfindley): revisit the location of this check. if inVendor(mod.URI) && (mod.Action == file.Create || mod.Action == file.Delete) || strings.HasSuffix(string(mod.URI), "/vendor/modules.txt") { reinit = true break } + + // Changes to workspace files, as a rule of thumb, should require reinitialization. Since their behavior + // is generally user-defined, we want to do something sensible by re-triggering a query to the active GOPACKAGESDRIVER, + // and reloading the state of the workspace. + if isWorkspaceFile(mod.URI, s.view.folder.Options.WorkspaceFiles) && (mod.Action == file.Save || mod.OnDisk) { + reinit = true + break + } } // Collect observed file handles for changed URIs from the old snapshot, if @@ -1886,13 +1895,13 @@ func deleteMostRelevantModFile(m *persistent.Map[protocol.DocumentURI, *memoize. var mostRelevant protocol.DocumentURI changedFile := changed.Path() - m.Range(func(modURI protocol.DocumentURI, _ *memoize.Promise) { + for modURI := range m.All() { if len(modURI) > len(mostRelevant) { if pathutil.InDir(modURI.DirPath(), changedFile) { mostRelevant = modURI } } - }) + } if mostRelevant != "" { m.Delete(mostRelevant) } @@ -2162,9 +2171,9 @@ func (s *Snapshot) setBuiltin(path string) { } // WantCompilerOptDetails reports whether to compute compiler -// optimization details for the specified package. -func (s *Snapshot) WantCompilerOptDetails(id metadata.PackageID) bool { - _, ok := s.compilerOptDetails[id] +// optimization details for packages and tests in the given directory. +func (s *Snapshot) WantCompilerOptDetails(dir protocol.DocumentURI) bool { + _, ok := s.compilerOptDetails[dir] return ok } diff --git a/gopls/internal/cache/source.go b/gopls/internal/cache/source.go new file mode 100644 index 00000000000..3e21c641651 --- /dev/null +++ b/gopls/internal/cache/source.go @@ -0,0 +1,403 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cache + +import ( + "context" + "log" + "maps" + "slices" + "strings" + + "golang.org/x/tools/gopls/internal/cache/metadata" + "golang.org/x/tools/gopls/internal/cache/symbols" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/imports" +) + +// goplsSource is an imports.Source that provides import information using +// gopls and the module cache index. +type goplsSource struct { + S *Snapshot + envSource *imports.ProcessEnvSource + + // set by each invocation of ResolveReferences + ctx context.Context +} + +func (s *Snapshot) NewGoplsSource(is *imports.ProcessEnvSource) *goplsSource { + return &goplsSource{ + S: s, + envSource: is, + } +} + +func (s *goplsSource) LoadPackageNames(ctx context.Context, srcDir string, paths []imports.ImportPath) (map[imports.ImportPath]imports.PackageName, error) { + // TODO: use metadata graph. Aside from debugging, this is the only used of envSource + return s.envSource.LoadPackageNames(ctx, srcDir, paths) +} + +type result struct { + res *imports.Result + deprecated bool +} + +// ResolveReferences tries to find resolving imports in the workspace, and failing +// that, in the module cache. It uses heuristics to decide among alternatives. +// The heuristics will usually prefer a v2 version, if there is one. +// TODO: It does not take advantage of hints provided by the user: +// 1. syntactic context: pkg.Name().Foo +// 3. already imported files in the same module +func (s *goplsSource) ResolveReferences(ctx context.Context, filename string, missing imports.References) ([]*imports.Result, error) { + s.ctx = ctx + // get results from the workspace. There will at most one for each package name + fromWS, err := s.resolveWorkspaceReferences(filename, missing) + if err != nil { + return nil, err + } + // collect the ones that are still + needed := maps.Clone(missing) + for _, a := range fromWS { + if _, ok := needed[a.Package.Name]; ok { + delete(needed, a.Package.Name) + } + } + // when debug (below) is gone, change this to: if len(needed) == 0 {return fromWS, nil} + var fromCache []*result + if len(needed) != 0 { + var err error + fromCache, err = s.resolveCacheReferences(needed) + if err != nil { + return nil, err + } + // trim cans to one per missing package. + byPkgNm := make(map[string][]*result) + for _, c := range fromCache { + byPkgNm[c.res.Package.Name] = append(byPkgNm[c.res.Package.Name], c) + } + for k, v := range byPkgNm { + fromWS = append(fromWS, s.bestCache(k, v)) + } + } + const debug = false + if debug { // debugging. + // what does the old one find? + old, err := s.envSource.ResolveReferences(ctx, filename, missing) + if err != nil { + log.Fatal(err) + } + log.Printf("fromCache:%d %s", len(fromCache), filename) + for i, c := range fromCache { + log.Printf("cans%d %#v %#v %v", i, c.res.Import, c.res.Package, c.deprecated) + } + for k, v := range missing { + for x := range v { + log.Printf("missing %s.%s", k, x) + } + } + for k, v := range needed { + for x := range v { + log.Printf("needed %s.%s", k, x) + } + } + + dbgpr := func(hdr string, v []*imports.Result) { + for i := 0; i < len(v); i++ { + log.Printf("%s%d %+v %+v", hdr, i, v[i].Import, v[i].Package) + } + } + + dbgpr("fromWS", fromWS) + dbgpr("old", old) + s.S.workspacePackages.Range(func(k PackageID, v PackagePath) { + log.Printf("workspacePackages[%s]=%s", k, v) + }) + // anything in ans with >1 matches? + seen := make(map[string]int) + for _, a := range fromWS { + seen[a.Package.Name]++ + } + for k, v := range seen { + if v > 1 { + log.Printf("saw %d %s", v, k) + for i, x := range fromWS { + if x.Package.Name == k { + log.Printf("%d: %+v %+v", i, x.Package, x.Import) + } + } + } + } + } + return fromWS, nil + +} + +func (s *goplsSource) resolveCacheReferences(missing imports.References) ([]*result, error) { + state := s.S.view.modcacheState + ix, err := state.GetIndex() + if err != nil { + event.Error(s.ctx, "resolveCacheReferences", err) + } + + found := make(map[string]*result) + for pkg, nms := range missing { + var ks []string + for k := range nms { + ks = append(ks, k) + } + cs := ix.LookupAll(pkg, ks...) // map[importPath][]Candidate + for k, cands := range cs { + res := found[k] + if res == nil { + res = &result{ + &imports.Result{ + Import: &imports.ImportInfo{ImportPath: k}, + Package: &imports.PackageInfo{Name: pkg, Exports: make(map[string]bool)}, + }, + false, + } + found[k] = res + } + for _, c := range cands { + res.res.Package.Exports[c.Name] = true + // The import path is deprecated if a symbol that would be used is deprecated + res.deprecated = res.deprecated || c.Deprecated + } + } + + } + var ans []*result + for _, x := range found { + ans = append(ans, x) + } + return ans, nil +} + +type found struct { + sym *symbols.Package + res *imports.Result +} + +func (s *goplsSource) resolveWorkspaceReferences(filename string, missing imports.References) ([]*imports.Result, error) { + uri := protocol.URIFromPath(filename) + mypkgs, err := s.S.MetadataForFile(s.ctx, uri) + if len(mypkgs) != 1 { + // what does this mean? can it happen? + } + mypkg := mypkgs[0] + // search the metadata graph for package ids correstponding to missing + g := s.S.MetadataGraph() + var ids []metadata.PackageID + var pkgs []*metadata.Package + for pid, pkg := range g.Packages { + // no test packages, except perhaps for ourselves + if pkg.ForTest != "" && pkg != mypkg { + continue + } + if missingWants(missing, pkg.Name) { + ids = append(ids, pid) + pkgs = append(pkgs, pkg) + } + } + // find the symbols in those packages + // the syms occur in the same order as the ids and the pkgs + syms, err := s.S.Symbols(s.ctx, ids...) + if err != nil { + return nil, err + } + // keep track of used syms and found results by package name + // TODO: avoid import cycles (is current package in forward closure) + founds := make(map[string][]found) + for i := 0; i < len(ids); i++ { + nm := string(pkgs[i].Name) + if satisfies(syms[i], missing[nm]) { + got := &imports.Result{ + Import: &imports.ImportInfo{ + Name: "", + ImportPath: string(pkgs[i].PkgPath), + }, + Package: &imports.PackageInfo{ + Name: string(pkgs[i].Name), + Exports: missing[imports.PackageName(pkgs[i].Name)], + }, + } + founds[nm] = append(founds[nm], found{syms[i], got}) + } + } + var ans []*imports.Result + for _, v := range founds { + // make sure the elements of v are unique + // (Import.ImportPath or Package.Name must differ) + cmp := func(l, r found) int { + switch strings.Compare(l.res.Import.ImportPath, r.res.Import.ImportPath) { + case -1: + return -1 + case 1: + return 1 + } + return strings.Compare(l.res.Package.Name, r.res.Package.Name) + } + slices.SortFunc(v, cmp) + newv := make([]found, 0, len(v)) + newv = append(newv, v[0]) + for i := 1; i < len(v); i++ { + if cmp(v[i], v[i-1]) != 0 { + newv = append(newv, v[i]) + } + } + ans = append(ans, bestImport(filename, newv)) + } + return ans, nil +} + +// for each package name, choose one using heuristics +func bestImport(filename string, got []found) *imports.Result { + if len(got) == 1 { + return got[0].res + } + isTestFile := strings.HasSuffix(filename, "_test.go") + var leftovers []found + for _, g := range got { + // don't use _test packages unless isTestFile + testPkg := strings.HasSuffix(string(g.res.Package.Name), "_test") || strings.HasSuffix(string(g.res.Import.Name), "_test") + if testPkg && !isTestFile { + continue // no test covers this + } + if imports.CanUse(filename, g.sym.Files[0].DirPath()) { + leftovers = append(leftovers, g) + } + } + switch len(leftovers) { + case 0: + break // use got, they are all bad + case 1: + return leftovers[0].res // only one left + default: + got = leftovers // filtered some out + } + + // TODO: if there are versions (like /v2) prefer them + + // use distance to common ancestor with filename + // (TestDirectoryFilters_MultiRootImportScanning) + // filename is .../a/main.go, choices are + // .../a/hi/hi.go and .../b/hi/hi.go + longest := -1 + ix := -1 + for i := 0; i < len(got); i++ { + d := commonpref(filename, got[i].sym.Files[0].Path()) + if d > longest { + longest = d + ix = i + } + } + // it is possible that there were several tied, but we return the first + return got[ix].res +} + +// choose the best result for the package named nm from the module cache +func (s *goplsSource) bestCache(nm string, got []*result) *imports.Result { + if len(got) == 1 { + return got[0].res + } + // does the go.mod file choose one? + if ans := s.fromGoMod(got); ans != nil { + return ans + } + got = preferUndeprecated(got) + // want the best Import.ImportPath + // these are all for the package named nm, + // nm (probably) occurs in all the paths; + // choose the longest (after nm), so as to get /v2 + maxlen, which := -1, -1 + for i := 0; i < len(got); i++ { + ix := strings.Index(got[i].res.Import.ImportPath, nm) + if ix == -1 { + continue // now what? + } + cnt := len(got[i].res.Import.ImportPath) - ix + if cnt > maxlen { + maxlen = cnt + which = i + } + // what about ties? (e.g., /v2 and /v3) + } + if which >= 0 { + return got[which].res + } + return got[0].res // arbitrary guess +} + +// if go.mod requires one of the packages, return that +func (s *goplsSource) fromGoMod(got []*result) *imports.Result { + // should we use s.S.view.worsspaceModFiles, and the union of their requires? + // (note that there are no tests where it contains more than one) + modURI := s.S.view.gomod + modfh, ok := s.S.files.get(modURI) + if !ok { + return nil + } + parsed, err := s.S.ParseMod(s.ctx, modfh) + if err != nil { + return nil + } + reqs := parsed.File.Require + for _, g := range got { + for _, req := range reqs { + if strings.HasPrefix(g.res.Import.ImportPath, req.Syntax.Token[1]) { + return g.res + } + } + } + return nil +} + +func commonpref(filename string, path string) int { + k := 0 + for ; k < len(filename) && k < len(path) && filename[k] == path[k]; k++ { + } + return k +} + +func satisfies(pkg *symbols.Package, missing map[string]bool) bool { + syms := make(map[string]bool) + for _, x := range pkg.Symbols { + for _, s := range x { + syms[s.Name] = true + } + } + for k := range missing { + if !syms[k] { + return false + } + } + return true +} + +// does pkgPath potentially satisfy a missing reference? +func missingWants(missing imports.References, pkgPath metadata.PackageName) bool { + for k := range missing { + if string(k) == string(pkgPath) { + return true + } + } + return false +} + +// If there are both deprecated and undprecated ones +// then return only the undeprecated one +func preferUndeprecated(got []*result) []*result { + var ok []*result + for _, g := range got { + if !g.deprecated { + ok = append(ok, g) + } + } + if len(ok) > 0 { + return ok + } + return got +} diff --git a/gopls/internal/cache/testfuncs/tests.go b/gopls/internal/cache/testfuncs/tests.go index fca25e5db19..1182795b37b 100644 --- a/gopls/internal/cache/testfuncs/tests.go +++ b/gopls/internal/cache/testfuncs/tests.go @@ -8,8 +8,9 @@ import ( "go/ast" "go/constant" "go/types" - "regexp" "strings" + "unicode" + "unicode/utf8" "golang.org/x/tools/gopls/internal/cache/parsego" "golang.org/x/tools/gopls/internal/protocol" @@ -234,13 +235,6 @@ func findFunc(files []*parsego.File, info *types.Info, body *ast.BlockStmt, expr return nil, nil } -var ( - reTest = regexp.MustCompile(`^Test([A-Z]|$)`) - reBenchmark = regexp.MustCompile(`^Benchmark([A-Z]|$)`) - reFuzz = regexp.MustCompile(`^Fuzz([A-Z]|$)`) - reExample = regexp.MustCompile(`^Example([A-Z]|$)`) -) - // isTestOrExample reports whether the given func is a testing func or an // example func (or neither). isTestOrExample returns (true, false) for testing // funcs, (false, true) for example funcs, and (false, false) otherwise. @@ -248,7 +242,7 @@ func isTestOrExample(fn *types.Func) (isTest, isExample bool) { sig := fn.Type().(*types.Signature) if sig.Params().Len() == 0 && sig.Results().Len() == 0 { - return false, reExample.MatchString(fn.Name()) + return false, isTestName(fn.Name(), "Example") } kind, ok := testKind(sig) @@ -257,16 +251,33 @@ func isTestOrExample(fn *types.Func) (isTest, isExample bool) { } switch kind.Name() { case "T": - return reTest.MatchString(fn.Name()), false + return isTestName(fn.Name(), "Test"), false case "B": - return reBenchmark.MatchString(fn.Name()), false + return isTestName(fn.Name(), "Benchmark"), false case "F": - return reFuzz.MatchString(fn.Name()), false + return isTestName(fn.Name(), "Fuzz"), false default: return false, false // "can't happen" (see testKind) } } +// isTestName reports whether name is a valid test name for the test kind +// indicated by the given prefix ("Test", "Benchmark", etc.). +// +// Adapted from go/analysis/passes/tests. +func isTestName(name, prefix string) bool { + suffix, ok := strings.CutPrefix(name, prefix) + if !ok { + return false + } + if len(suffix) == 0 { + // "Test" is ok. + return true + } + r, _ := utf8.DecodeRuneInString(suffix) + return !unicode.IsLower(r) +} + // testKind returns the parameter type TypeName of a test, benchmark, or fuzz // function (one of testing.[TBF]). func testKind(sig *types.Signature) (*types.TypeName, bool) { diff --git a/gopls/internal/cache/view.go b/gopls/internal/cache/view.go index 5fb03cb1152..26f0de86125 100644 --- a/gopls/internal/cache/view.go +++ b/gopls/internal/cache/view.go @@ -15,6 +15,7 @@ import ( "errors" "fmt" "log" + "maps" "os" "os/exec" "path" @@ -26,7 +27,6 @@ import ( "sync" "time" - "golang.org/x/tools/gopls/internal/cache/metadata" "golang.org/x/tools/gopls/internal/cache/typerefs" "golang.org/x/tools/gopls/internal/file" "golang.org/x/tools/gopls/internal/protocol" @@ -106,8 +106,12 @@ type View struct { // background contexts created for this view. baseCtx context.Context + // importsState is for the old imports code importsState *importsState + // maintain the current module cache index + modcacheState *modcacheState + // pkgIndex is an index of package IDs, for efficient storage of typerefs. pkgIndex *typerefs.PackageIndex @@ -488,6 +492,7 @@ func (v *View) shutdown() { // Cancel the initial workspace load if it is still running. v.cancelInitialWorkspaceLoad() v.importsState.stopTimer() + v.modcacheState.stopTimer() v.snapshotMu.Lock() if v.snapshot != nil { @@ -740,7 +745,7 @@ type StateChange struct { Files map[protocol.DocumentURI]file.Handle ModuleUpgrades map[protocol.DocumentURI]map[string]string Vulns map[protocol.DocumentURI]*vulncheck.Result - CompilerOptDetails map[metadata.PackageID]bool // package -> whether or not we want details + CompilerOptDetails map[protocol.DocumentURI]bool // package directory -> whether or not we want details } // InvalidateView processes the provided state change, invalidating any derived @@ -1143,9 +1148,7 @@ func (s *Snapshot) ModuleUpgrades(modfile protocol.DocumentURI) map[string]strin defer s.mu.Unlock() upgrades := map[string]string{} orig, _ := s.moduleUpgrades.Get(modfile) - for mod, ver := range orig { - upgrades[mod] = ver - } + maps.Copy(upgrades, orig) return upgrades } @@ -1171,7 +1174,7 @@ func (s *Snapshot) Vulnerabilities(modfiles ...protocol.DocumentURI) map[protoco defer s.mu.Unlock() if len(modfiles) == 0 { // empty means all modfiles - modfiles = s.vulns.Keys() + modfiles = slices.Collect(s.vulns.Keys()) } for _, modfile := range modfiles { vuln, _ := s.vulns.Get(modfile) diff --git a/gopls/internal/cache/workspace.go b/gopls/internal/cache/workspace.go index 07134b3da00..0621d17a537 100644 --- a/gopls/internal/cache/workspace.go +++ b/gopls/internal/cache/workspace.go @@ -13,6 +13,7 @@ import ( "golang.org/x/mod/modfile" "golang.org/x/tools/gopls/internal/file" "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/test/integration/fake/glob" ) // isGoWork reports if uri is a go.work file. @@ -65,6 +66,21 @@ func isGoMod(uri protocol.DocumentURI) bool { return filepath.Base(uri.Path()) == "go.mod" } +// isWorkspaceFile reports if uri matches a set of globs defined in workspaceFiles +func isWorkspaceFile(uri protocol.DocumentURI, workspaceFiles []string) bool { + for _, workspaceFile := range workspaceFiles { + g, err := glob.Parse(workspaceFile) + if err != nil { + continue + } + + if g.Match(uri.Path()) { + return true + } + } + return false +} + // goModModules returns the URIs of "workspace" go.mod files defined by a // go.mod file. This set is defined to be the given go.mod file itself, as well // as the modfiles of any locally replaced modules in the go.mod file. diff --git a/gopls/internal/cmd/cmd.go b/gopls/internal/cmd/cmd.go index d27542f79fb..a647b3198df 100644 --- a/gopls/internal/cmd/cmd.go +++ b/gopls/internal/cmd/cmd.go @@ -27,10 +27,12 @@ import ( "golang.org/x/tools/gopls/internal/lsprpc" "golang.org/x/tools/gopls/internal/protocol" "golang.org/x/tools/gopls/internal/protocol/command" + "golang.org/x/tools/gopls/internal/protocol/semtok" "golang.org/x/tools/gopls/internal/server" "golang.org/x/tools/gopls/internal/settings" "golang.org/x/tools/gopls/internal/util/browser" bugpkg "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/gopls/internal/util/moreslices" "golang.org/x/tools/internal/diff" "golang.org/x/tools/internal/jsonrpc2" "golang.org/x/tools/internal/tool" @@ -299,7 +301,7 @@ func (app *Application) featureCommands() []tool.Application { &prepareRename{app: app}, &references{app: app}, &rename{app: app}, - &semtok{app: app}, + &semanticToken{app: app}, &signature{app: app}, &stats{app: app}, &symbols{app: app}, @@ -322,7 +324,6 @@ func (app *Application) connect(ctx context.Context) (*connection, error) { options := settings.DefaultOptions(app.options) svr = server.New(cache.NewSession(ctx, cache.New(nil)), client, options) ctx = protocol.WithClient(ctx, client) - } else { // remote netConn, err := lsprpc.ConnectToRemote(ctx, app.Remote) @@ -362,8 +363,8 @@ func (c *connection) initialize(ctx context.Context, options func(*settings.Opti params.Capabilities.TextDocument.SemanticTokens.Requests.Range = &protocol.Or_ClientSemanticTokensRequestOptions_range{Value: true} //params.Capabilities.TextDocument.SemanticTokens.Requests.Range.Value = true params.Capabilities.TextDocument.SemanticTokens.Requests.Full = &protocol.Or_ClientSemanticTokensRequestOptions_full{Value: true} - params.Capabilities.TextDocument.SemanticTokens.TokenTypes = protocol.SemanticTypes() - params.Capabilities.TextDocument.SemanticTokens.TokenModifiers = protocol.SemanticModifiers() + params.Capabilities.TextDocument.SemanticTokens.TokenTypes = moreslices.ConvertStrings[string](semtok.TokenTypes) + params.Capabilities.TextDocument.SemanticTokens.TokenModifiers = moreslices.ConvertStrings[string](semtok.TokenModifiers) params.Capabilities.TextDocument.CodeAction = protocol.CodeActionClientCapabilities{ CodeActionLiteralSupport: protocol.ClientCodeActionLiteralOptions{ CodeActionKind: protocol.ClientCodeActionKindOptions{ @@ -376,7 +377,7 @@ func (c *connection) initialize(ctx context.Context, options func(*settings.Opti params.InitializationOptions = map[string]interface{}{ "symbolMatcher": string(opts.SymbolMatcher), } - if _, err := c.Server.Initialize(ctx, params); err != nil { + if c.initializeResult, err = c.Server.Initialize(ctx, params); err != nil { return err } if err := c.Server.Initialized(ctx, &protocol.InitializedParams{}); err != nil { @@ -388,6 +389,9 @@ func (c *connection) initialize(ctx context.Context, options func(*settings.Opti type connection struct { protocol.Server client *cmdClient + // initializeResult keep the initialize protocol response from server + // including server capabilities. + initializeResult *protocol.InitializeResult } // cmdClient defines the protocol.Client interface behavior of the gopls CLI tool. @@ -768,9 +772,6 @@ func (c *cmdClient) openFile(uri protocol.DocumentURI) *cmdFile { return c.getFile(uri) } -// TODO(adonovan): provide convenience helpers to: -// - map a (URI, protocol.Range) to a MappedRange; -// - parse a command-line argument to a MappedRange. func (c *connection) openFile(ctx context.Context, uri protocol.DocumentURI) (*cmdFile, error) { file := c.client.openFile(uri) if file.err != nil { diff --git a/gopls/internal/cmd/codelens.go b/gopls/internal/cmd/codelens.go index 75db4d04843..074733e58f5 100644 --- a/gopls/internal/cmd/codelens.go +++ b/gopls/internal/cmd/codelens.go @@ -44,8 +44,8 @@ Example: $ gopls codelens a_test.go # list code lenses in a file $ gopls codelens a_test.go:10 # list code lenses on line 10 - $ gopls codelens a_test.go gopls.test # list gopls.test commands - $ gopls codelens -exec a_test.go:10 gopls.test # run a specific test + $ gopls codelens a_test.go "run test" # list gopls.run_tests commands + $ gopls codelens -exec a_test.go:10 "run test" # run a specific test codelens-flags: `) diff --git a/gopls/internal/cmd/integration_test.go b/gopls/internal/cmd/integration_test.go index d819279d699..42812a870a4 100644 --- a/gopls/internal/cmd/integration_test.go +++ b/gopls/internal/cmd/integration_test.go @@ -213,8 +213,8 @@ func TestFail(t *testing.T) { t.Fatal("fail") } { res := gopls(t, tree, "codelens", "./a/a_test.go") res.checkExit(true) - res.checkStdout(`a_test.go:3: "run test" \[gopls.test\]`) - res.checkStdout(`a_test.go:4: "run test" \[gopls.test\]`) + res.checkStdout(`a_test.go:3: "run test" \[gopls.run_tests\]`) + res.checkStdout(`a_test.go:4: "run test" \[gopls.run_tests\]`) } // no codelens with title/position { @@ -950,12 +950,12 @@ func TestCodeAction(t *testing.T) { module example.com go 1.18 --- a.go -- +-- a/a.go -- package a type T int func f() (int, string) { return } --- b.go -- +-- a/b.go -- package a import "io" var _ io.Reader = C{} @@ -970,14 +970,14 @@ type C struct{} } // list code actions in file { - res := gopls(t, tree, "codeaction", "a.go") + res := gopls(t, tree, "codeaction", "a/a.go") res.checkExit(true) res.checkStdout(`edit "Fill in return values" \[quickfix\]`) res.checkStdout(`command "Browse documentation for package a" \[source.doc\]`) } // list code actions in file, filtering by title { - res := gopls(t, tree, "codeaction", "-title=Browse.*doc", "a.go") + res := gopls(t, tree, "codeaction", "-title=Browse.*doc", "a/a.go") res.checkExit(true) got := res.stdout want := `command "Browse gopls feature documentation" [gopls.doc.features]` + @@ -990,12 +990,12 @@ type C struct{} } // list code actions in file, filtering (hierarchically) by kind { - res := gopls(t, tree, "codeaction", "-kind=source", "a.go") + res := gopls(t, tree, "codeaction", "-kind=source", "a/a.go") res.checkExit(true) got := res.stdout want := `command "Browse documentation for package a" [source.doc]` + "\n" + - `command "Toggle compiler optimization details" [source.toggleCompilerOptDetails]` + + `command "Show compiler optimization details for \"a\"" [source.toggleCompilerOptDetails]` + "\n" if got != want { t.Errorf("codeaction: got <<%s>>, want <<%s>>\nstderr:\n%s", got, want, res.stderr) @@ -1003,13 +1003,13 @@ type C struct{} } // list code actions at position (of io.Reader) { - res := gopls(t, tree, "codeaction", "b.go:#31") + res := gopls(t, tree, "codeaction", "a/b.go:#31") res.checkExit(true) res.checkStdout(`command "Browse documentation for type io.Reader" \[source.doc]`) } // list quick fixes at position (of type T) { - res := gopls(t, tree, "codeaction", "-kind=quickfix", "a.go:#15") + res := gopls(t, tree, "codeaction", "-kind=quickfix", "a/a.go:#15") res.checkExit(true) got := res.stdout want := `edit "Fill in return values" [quickfix]` + "\n" @@ -1019,7 +1019,7 @@ type C struct{} } // success, with explicit CodeAction kind and diagnostics span. { - res := gopls(t, tree, "codeaction", "-kind=quickfix", "-exec", "b.go:#40") + res := gopls(t, tree, "codeaction", "-kind=quickfix", "-exec", "a/b.go:#40") res.checkExit(true) got := res.stdout want := ` diff --git a/gopls/internal/cmd/semantictokens.go b/gopls/internal/cmd/semantictokens.go index 77e8a03939c..8d3dff68e2b 100644 --- a/gopls/internal/cmd/semantictokens.go +++ b/gopls/internal/cmd/semantictokens.go @@ -14,6 +14,7 @@ import ( "unicode/utf8" "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/protocol/semtok" "golang.org/x/tools/gopls/internal/settings" ) @@ -40,15 +41,15 @@ import ( // 0-based: lines and character positions are 1 less than in // the gopls coordinate system -type semtok struct { +type semanticToken struct { app *Application } -func (c *semtok) Name() string { return "semtok" } -func (c *semtok) Parent() string { return c.app.Name() } -func (c *semtok) Usage() string { return "" } -func (c *semtok) ShortHelp() string { return "show semantic tokens for the specified file" } -func (c *semtok) DetailedHelp(f *flag.FlagSet) { +func (c *semanticToken) Name() string { return "semtok" } +func (c *semanticToken) Parent() string { return c.app.Name() } +func (c *semanticToken) Usage() string { return "" } +func (c *semanticToken) ShortHelp() string { return "show semantic tokens for the specified file" } +func (c *semanticToken) DetailedHelp(f *flag.FlagSet) { fmt.Fprint(f.Output(), ` Example: show the semantic tokens for this file: @@ -59,7 +60,7 @@ Example: show the semantic tokens for this file: // Run performs the semtok on the files specified by args and prints the // results to stdout in the format described above. -func (c *semtok) Run(ctx context.Context, args ...string) error { +func (c *semanticToken) Run(ctx context.Context, args ...string) error { if len(args) != 1 { return fmt.Errorf("expected one file name, got %d", len(args)) } @@ -97,14 +98,16 @@ func (c *semtok) Run(ctx context.Context, args ...string) error { if err != nil { return err } - return decorate(file, resp.Data) + return decorate(conn.initializeResult.Capabilities.SemanticTokensProvider.(protocol.SemanticTokensOptions).Legend, file, resp.Data) } +// mark provides a human-readable representation of protocol.SemanticTokens. +// It translates token types and modifiers to strings instead of uint32 values. type mark struct { line, offset int // 1-based, from RangeSpan len int // bytes, not runes - typ string - mods []string + typ semtok.Type + mods []semtok.Modifier } // prefixes for semantic token comments @@ -136,8 +139,10 @@ func markLine(m mark, lines [][]byte) { lines[m.line-1] = l } -func decorate(file *cmdFile, result []uint32) error { - marks := newMarks(file, result) +// decorate translates semantic token data (protocol.SemanticTokens) from its +// raw []uint32 format into a human-readable representation and prints it to stdout. +func decorate(legend protocol.SemanticTokensLegend, file *cmdFile, data []uint32) error { + marks := newMarks(legend, file, data) if len(marks) == 0 { return nil } @@ -150,25 +155,25 @@ func decorate(file *cmdFile, result []uint32) error { return nil } -func newMarks(file *cmdFile, d []uint32) []mark { +func newMarks(legend protocol.SemanticTokensLegend, file *cmdFile, data []uint32) []mark { ans := []mark{} // the following two loops could be merged, at the cost // of making the logic slightly more complicated to understand // first, convert from deltas to absolute, in LSP coordinates - lspLine := make([]uint32, len(d)/5) - lspChar := make([]uint32, len(d)/5) + lspLine := make([]uint32, len(data)/5) + lspChar := make([]uint32, len(data)/5) var line, char uint32 - for i := 0; 5*i < len(d); i++ { - lspLine[i] = line + d[5*i+0] - if d[5*i+0] > 0 { + for i := 0; 5*i < len(data); i++ { + lspLine[i] = line + data[5*i+0] + if data[5*i+0] > 0 { char = 0 } - lspChar[i] = char + d[5*i+1] + lspChar[i] = char + data[5*i+1] char = lspChar[i] line = lspLine[i] } // second, convert to gopls coordinates - for i := 0; 5*i < len(d); i++ { + for i := 0; 5*i < len(data); i++ { pr := protocol.Range{ Start: protocol.Position{ Line: lspLine[i], @@ -176,19 +181,30 @@ func newMarks(file *cmdFile, d []uint32) []mark { }, End: protocol.Position{ Line: lspLine[i], - Character: lspChar[i] + d[5*i+2], + Character: lspChar[i] + data[5*i+2], }, } spn, err := file.rangeSpan(pr) if err != nil { log.Fatal(err) } + + var mods []semtok.Modifier + { + n := int(data[5*i+4]) + for i, mod := range legend.TokenModifiers { + if (n & (1 << i)) != 0 { + mods = append(mods, semtok.Modifier(mod)) + } + } + } + m := mark{ line: spn.Start().Line(), offset: spn.Start().Column(), len: spn.End().Column() - spn.Start().Column(), - typ: protocol.SemType(int(d[5*i+3])), - mods: protocol.SemMods(int(d[5*i+4])), + typ: semtok.Type(legend.TokenTypes[data[5*i+3]]), + mods: mods, } ans = append(ans, m) } diff --git a/gopls/internal/cmd/span.go b/gopls/internal/cmd/span.go index 4753d534350..44a3223c235 100644 --- a/gopls/internal/cmd/span.go +++ b/gopls/internal/cmd/span.go @@ -185,8 +185,7 @@ func (p *_point) clean() { // The format produced is one that can be read back in using parseSpan. // // TODO(adonovan): this is esoteric, and the formatting options are -// never used outside of TestFormat. Replace with something simpler -// along the lines of MappedRange.String. +// never used outside of TestFormat. func (s span) Format(f fmt.State, c rune) { fullForm := f.Flag('+') preferOffset := f.Flag('#') diff --git a/gopls/internal/cmd/usage/codelens.hlp b/gopls/internal/cmd/usage/codelens.hlp index 59afe0d3a27..f72bb465e07 100644 --- a/gopls/internal/cmd/usage/codelens.hlp +++ b/gopls/internal/cmd/usage/codelens.hlp @@ -19,8 +19,8 @@ Example: $ gopls codelens a_test.go # list code lenses in a file $ gopls codelens a_test.go:10 # list code lenses on line 10 - $ gopls codelens a_test.go gopls.test # list gopls.test commands - $ gopls codelens -exec a_test.go:10 gopls.test # run a specific test + $ gopls codelens a_test.go "run test" # list gopls.run_tests commands + $ gopls codelens -exec a_test.go:10 "run test" # run a specific test codelens-flags: -d,-diff diff --git a/gopls/internal/doc/api.go b/gopls/internal/doc/api.go index a096f5ad63e..258f90d49ae 100644 --- a/gopls/internal/doc/api.go +++ b/gopls/internal/doc/api.go @@ -27,14 +27,15 @@ type API struct { } type Option struct { - Name string - Type string // T = bool | string | int | enum | any | []T | map[T]T | time.Duration - Doc string - EnumKeys EnumKeys - EnumValues []EnumValue - Default string - Status string - Hierarchy string + Name string + Type string // T = bool | string | int | enum | any | []T | map[T]T | time.Duration + Doc string + EnumKeys EnumKeys + EnumValues []EnumValue + Default string + Status string + Hierarchy string + DeprecationMessage string } type EnumKeys struct { diff --git a/gopls/internal/doc/api.json b/gopls/internal/doc/api.json index 982ec34909b..8f101079a9c 100644 --- a/gopls/internal/doc/api.json +++ b/gopls/internal/doc/api.json @@ -12,7 +12,8 @@ "EnumValues": null, "Default": "[]", "Status": "", - "Hierarchy": "build" + "Hierarchy": "build", + "DeprecationMessage": "" }, { "Name": "env", @@ -25,7 +26,8 @@ "EnumValues": null, "Default": "{}", "Status": "", - "Hierarchy": "build" + "Hierarchy": "build", + "DeprecationMessage": "" }, { "Name": "directoryFilters", @@ -38,7 +40,8 @@ "EnumValues": null, "Default": "[\"-**/node_modules\"]", "Status": "", - "Hierarchy": "build" + "Hierarchy": "build", + "DeprecationMessage": "" }, { "Name": "templateExtensions", @@ -51,7 +54,8 @@ "EnumValues": null, "Default": "[]", "Status": "", - "Hierarchy": "build" + "Hierarchy": "build", + "DeprecationMessage": "" }, { "Name": "memoryMode", @@ -64,7 +68,8 @@ "EnumValues": null, "Default": "\"\"", "Status": "experimental", - "Hierarchy": "build" + "Hierarchy": "build", + "DeprecationMessage": "" }, { "Name": "expandWorkspaceToModule", @@ -77,7 +82,8 @@ "EnumValues": null, "Default": "true", "Status": "experimental", - "Hierarchy": "build" + "Hierarchy": "build", + "DeprecationMessage": "" }, { "Name": "standaloneTags", @@ -90,7 +96,22 @@ "EnumValues": null, "Default": "[\"ignore\"]", "Status": "", - "Hierarchy": "build" + "Hierarchy": "build", + "DeprecationMessage": "" + }, + { + "Name": "workspaceFiles", + "Type": "[]string", + "Doc": "workspaceFiles configures the set of globs that match files defining the\nlogical build of the current workspace. Any on-disk changes to any files\nmatching a glob specified here will trigger a reload of the workspace.\n\nThis setting need only be customized in environments with a custom\nGOPACKAGESDRIVER.\n", + "EnumKeys": { + "ValueType": "", + "Keys": null + }, + "EnumValues": null, + "Default": "[]", + "Status": "", + "Hierarchy": "build", + "DeprecationMessage": "" }, { "Name": "hoverKind", @@ -120,7 +141,8 @@ ], "Default": "\"FullDocumentation\"", "Status": "", - "Hierarchy": "ui.documentation" + "Hierarchy": "ui.documentation", + "DeprecationMessage": "" }, { "Name": "linkTarget", @@ -133,7 +155,8 @@ "EnumValues": null, "Default": "\"pkg.go.dev\"", "Status": "", - "Hierarchy": "ui.documentation" + "Hierarchy": "ui.documentation", + "DeprecationMessage": "" }, { "Name": "linksInHover", @@ -159,7 +182,8 @@ ], "Default": "true", "Status": "", - "Hierarchy": "ui.documentation" + "Hierarchy": "ui.documentation", + "DeprecationMessage": "" }, { "Name": "usePlaceholders", @@ -172,7 +196,8 @@ "EnumValues": null, "Default": "false", "Status": "", - "Hierarchy": "ui.completion" + "Hierarchy": "ui.completion", + "DeprecationMessage": "" }, { "Name": "completionBudget", @@ -185,7 +210,8 @@ "EnumValues": null, "Default": "\"100ms\"", "Status": "debug", - "Hierarchy": "ui.completion" + "Hierarchy": "ui.completion", + "DeprecationMessage": "" }, { "Name": "matcher", @@ -211,7 +237,8 @@ ], "Default": "\"Fuzzy\"", "Status": "advanced", - "Hierarchy": "ui.completion" + "Hierarchy": "ui.completion", + "DeprecationMessage": "" }, { "Name": "experimentalPostfixCompletions", @@ -224,7 +251,8 @@ "EnumValues": null, "Default": "true", "Status": "experimental", - "Hierarchy": "ui.completion" + "Hierarchy": "ui.completion", + "DeprecationMessage": "" }, { "Name": "completeFunctionCalls", @@ -237,7 +265,8 @@ "EnumValues": null, "Default": "true", "Status": "", - "Hierarchy": "ui.completion" + "Hierarchy": "ui.completion", + "DeprecationMessage": "" }, { "Name": "importShortcut", @@ -263,7 +292,8 @@ ], "Default": "\"Both\"", "Status": "", - "Hierarchy": "ui.navigation" + "Hierarchy": "ui.navigation", + "DeprecationMessage": "" }, { "Name": "symbolMatcher", @@ -293,7 +323,8 @@ ], "Default": "\"FastFuzzy\"", "Status": "advanced", - "Hierarchy": "ui.navigation" + "Hierarchy": "ui.navigation", + "DeprecationMessage": "" }, { "Name": "symbolStyle", @@ -319,7 +350,8 @@ ], "Default": "\"Dynamic\"", "Status": "advanced", - "Hierarchy": "ui.navigation" + "Hierarchy": "ui.navigation", + "DeprecationMessage": "" }, { "Name": "symbolScope", @@ -341,7 +373,8 @@ ], "Default": "\"all\"", "Status": "", - "Hierarchy": "ui.navigation" + "Hierarchy": "ui.navigation", + "DeprecationMessage": "" }, { "Name": "analyses", @@ -440,6 +473,16 @@ "Doc": "report assembly that clobbers the frame pointer before saving it", "Default": "true" }, + { + "Name": "\"gofix\"", + "Doc": "apply fixes based on go:fix comment directives\n\nThe gofix analyzer inlines functions and constants that are marked for inlining.", + "Default": "true" + }, + { + "Name": "\"hostport\"", + "Doc": "check format of addresses passed to net.Dial\n\nThis analyzer flags code that produce network address strings using\nfmt.Sprintf, as in this example:\n\n addr := fmt.Sprintf(\"%s:%d\", host, 12345) // \"will not work with IPv6\"\n ...\n conn, err := net.Dial(\"tcp\", addr) // \"when passed to dial here\"\n\nThe analyzer suggests a fix to use the correct approach, a call to\nnet.JoinHostPort:\n\n addr := net.JoinHostPort(host, \"12345\")\n ...\n conn, err := net.Dial(\"tcp\", addr)\n\nA similar diagnostic and fix are produced for a format string of \"%s:%s\".\n", + "Default": "true" + }, { "Name": "\"httpresponse\"", "Doc": "check for mistakes using HTTP responses\n\nA common mistake when using the net/http package is to defer a function\ncall to close the http.Response Body before checking the error that\ndetermines whether the response is valid:\n\n\tresp, err := http.Head(url)\n\tdefer resp.Body.Close()\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\t// (defer statement belongs here)\n\nThis checker helps uncover latent nil dereference bugs by reporting a\ndiagnostic for such mistakes.", @@ -467,7 +510,7 @@ }, { "Name": "\"modernize\"", - "Doc": "simplify code by using modern constructs\n\nThis analyzer reports opportunities for simplifying and clarifying\nexisting code by using more modern features of Go, such as:\n\n - replacing an if/else conditional assignment by a call to the\n built-in min or max functions added in go1.21;\n - replacing sort.Slice(x, func(i, j int) bool) { return s[i] \u003c s[j] }\n by a call to slices.Sort(s), added in go1.21;\n - replacing interface{} by the 'any' type added in go1.18;\n - replacing append([]T(nil), s...) by slices.Clone(s) or\n slices.Concat(s), added in go1.21;\n - replacing a loop around an m[k]=v map update by a call\n to one of the Collect, Copy, Clone, or Insert functions\n from the maps package, added in go1.21;\n - replacing []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...),\n added in go1.19;", + "Doc": "simplify code by using modern constructs\n\nThis analyzer reports opportunities for simplifying and clarifying\nexisting code by using more modern features of Go, such as:\n\n - replacing an if/else conditional assignment by a call to the\n built-in min or max functions added in go1.21;\n - replacing sort.Slice(x, func(i, j int) bool) { return s[i] \u003c s[j] }\n by a call to slices.Sort(s), added in go1.21;\n - replacing interface{} by the 'any' type added in go1.18;\n - replacing append([]T(nil), s...) by slices.Clone(s) or\n slices.Concat(s), added in go1.21;\n - replacing a loop around an m[k]=v map update by a call\n to one of the Collect, Copy, Clone, or Insert functions\n from the maps package, added in go1.21;\n - replacing []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...),\n added in go1.19;\n - replacing uses of context.WithCancel in tests with t.Context, added in\n go1.24;\n - replacing omitempty by omitzero on structs, added in go1.24;\n - replacing append(s[:i], s[i+1]...) by slices.Delete(s, i, i+1),\n added in go1.21\n - replacing a 3-clause for i := 0; i \u003c n; i++ {} loop by\n for i := range n {}, added in go1.22;\n - replacing Split in \"for range strings.Split(...)\" by go1.24's\n more efficient SplitSeq;", "Default": "true" }, { @@ -577,7 +620,7 @@ }, { "Name": "\"unreachable\"", - "Doc": "check for unreachable code\n\nThe unreachable analyzer finds statements that execution can never reach\nbecause they are preceded by an return statement, a call to panic, an\ninfinite loop, or similar constructs.", + "Doc": "check for unreachable code\n\nThe unreachable analyzer finds statements that execution can never reach\nbecause they are preceded by a return statement, a call to panic, an\ninfinite loop, or similar constructs.", "Default": "true" }, { @@ -592,7 +635,7 @@ }, { "Name": "\"unusedparams\"", - "Doc": "check for unused parameters of functions\n\nThe unusedparams analyzer checks functions to see if there are\nany parameters that are not being used.\n\nTo ensure soundness, it ignores:\n - \"address-taken\" functions, that is, functions that are used as\n a value rather than being called directly; their signatures may\n be required to conform to a func type.\n - exported functions or methods, since they may be address-taken\n in another package.\n - unexported methods whose name matches an interface method\n declared in the same package, since the method's signature\n may be required to conform to the interface type.\n - functions with empty bodies, or containing just a call to panic.\n - parameters that are unnamed, or named \"_\", the blank identifier.\n\nThe analyzer suggests a fix of replacing the parameter name by \"_\",\nbut in such cases a deeper fix can be obtained by invoking the\n\"Refactor: remove unused parameter\" code action, which will\neliminate the parameter entirely, along with all corresponding\narguments at call sites, while taking care to preserve any side\neffects in the argument expressions; see\nhttps://github.com/golang/tools/releases/tag/gopls%2Fv0.14.", + "Doc": "check for unused parameters of functions\n\nThe unusedparams analyzer checks functions to see if there are\nany parameters that are not being used.\n\nTo ensure soundness, it ignores:\n - \"address-taken\" functions, that is, functions that are used as\n a value rather than being called directly; their signatures may\n be required to conform to a func type.\n - exported functions or methods, since they may be address-taken\n in another package.\n - unexported methods whose name matches an interface method\n declared in the same package, since the method's signature\n may be required to conform to the interface type.\n - functions with empty bodies, or containing just a call to panic.\n - parameters that are unnamed, or named \"_\", the blank identifier.\n\nThe analyzer suggests a fix of replacing the parameter name by \"_\",\nbut in such cases a deeper fix can be obtained by invoking the\n\"Refactor: remove unused parameter\" code action, which will\neliminate the parameter entirely, along with all corresponding\narguments at call sites, while taking care to preserve any side\neffects in the argument expressions; see\nhttps://github.com/golang/tools/releases/tag/gopls%2Fv0.14.\n\nThis analyzer ignores generated code.", "Default": "true" }, { @@ -603,7 +646,7 @@ { "Name": "\"unusedvariable\"", "Doc": "check for unused variables and suggest fixes", - "Default": "false" + "Default": "true" }, { "Name": "\"unusedwrite\"", @@ -625,7 +668,8 @@ "EnumValues": null, "Default": "{}", "Status": "", - "Hierarchy": "ui.diagnostic" + "Hierarchy": "ui.diagnostic", + "DeprecationMessage": "" }, { "Name": "staticcheck", @@ -638,7 +682,8 @@ "EnumValues": null, "Default": "false", "Status": "experimental", - "Hierarchy": "ui.diagnostic" + "Hierarchy": "ui.diagnostic", + "DeprecationMessage": "" }, { "Name": "vulncheck", @@ -660,7 +705,8 @@ ], "Default": "\"Off\"", "Status": "experimental", - "Hierarchy": "ui.diagnostic" + "Hierarchy": "ui.diagnostic", + "DeprecationMessage": "" }, { "Name": "diagnosticsDelay", @@ -673,7 +719,8 @@ "EnumValues": null, "Default": "\"1s\"", "Status": "advanced", - "Hierarchy": "ui.diagnostic" + "Hierarchy": "ui.diagnostic", + "DeprecationMessage": "" }, { "Name": "diagnosticsTrigger", @@ -695,7 +742,8 @@ ], "Default": "\"Edit\"", "Status": "experimental", - "Hierarchy": "ui.diagnostic" + "Hierarchy": "ui.diagnostic", + "DeprecationMessage": "" }, { "Name": "analysisProgressReporting", @@ -708,7 +756,8 @@ "EnumValues": null, "Default": "true", "Status": "", - "Hierarchy": "ui.diagnostic" + "Hierarchy": "ui.diagnostic", + "DeprecationMessage": "" }, { "Name": "hints", @@ -757,7 +806,8 @@ "EnumValues": null, "Default": "{}", "Status": "experimental", - "Hierarchy": "ui.inlayhint" + "Hierarchy": "ui.inlayhint", + "DeprecationMessage": "" }, { "Name": "codelenses", @@ -811,7 +861,8 @@ "EnumValues": null, "Default": "{\"generate\":true,\"regenerate_cgo\":true,\"run_govulncheck\":false,\"tidy\":true,\"upgrade_dependency\":true,\"vendor\":true}", "Status": "", - "Hierarchy": "ui" + "Hierarchy": "ui", + "DeprecationMessage": "" }, { "Name": "semanticTokens", @@ -824,12 +875,13 @@ "EnumValues": null, "Default": "false", "Status": "experimental", - "Hierarchy": "ui" + "Hierarchy": "ui", + "DeprecationMessage": "" }, { "Name": "noSemanticString", "Type": "bool", - "Doc": "noSemanticString turns off the sending of the semantic token 'string'\n", + "Doc": "noSemanticString turns off the sending of the semantic token 'string'\n\nDeprecated: Use SemanticTokenTypes[\"string\"] = false instead. See\ngolang/vscode-go#3632\n", "EnumKeys": { "ValueType": "", "Keys": null @@ -837,12 +889,13 @@ "EnumValues": null, "Default": "false", "Status": "experimental", - "Hierarchy": "ui" + "Hierarchy": "ui", + "DeprecationMessage": "use SemanticTokenTypes[\"string\"] = false instead. See\ngolang/vscode-go#3632\n" }, { "Name": "noSemanticNumber", "Type": "bool", - "Doc": "noSemanticNumber turns off the sending of the semantic token 'number'\n", + "Doc": "noSemanticNumber turns off the sending of the semantic token 'number'\n\nDeprecated: Use SemanticTokenTypes[\"number\"] = false instead. See\ngolang/vscode-go#3632.\n", "EnumKeys": { "ValueType": "", "Keys": null @@ -850,7 +903,36 @@ "EnumValues": null, "Default": "false", "Status": "experimental", - "Hierarchy": "ui" + "Hierarchy": "ui", + "DeprecationMessage": "use SemanticTokenTypes[\"number\"] = false instead. See\ngolang/vscode-go#3632.\n" + }, + { + "Name": "semanticTokenTypes", + "Type": "map[string]bool", + "Doc": "semanticTokenTypes configures the semantic token types. It allows\ndisabling types by setting each value to false.\nBy default, all types are enabled.\n", + "EnumKeys": { + "ValueType": "", + "Keys": null + }, + "EnumValues": null, + "Default": "{}", + "Status": "experimental", + "Hierarchy": "ui", + "DeprecationMessage": "" + }, + { + "Name": "semanticTokenModifiers", + "Type": "map[string]bool", + "Doc": "semanticTokenModifiers configures the semantic token modifiers. It allows\ndisabling modifiers by setting each value to false.\nBy default, all modifiers are enabled.\n", + "EnumKeys": { + "ValueType": "", + "Keys": null + }, + "EnumValues": null, + "Default": "{}", + "Status": "experimental", + "Hierarchy": "ui", + "DeprecationMessage": "" }, { "Name": "local", @@ -863,7 +945,8 @@ "EnumValues": null, "Default": "\"\"", "Status": "", - "Hierarchy": "formatting" + "Hierarchy": "formatting", + "DeprecationMessage": "" }, { "Name": "gofumpt", @@ -876,7 +959,8 @@ "EnumValues": null, "Default": "false", "Status": "", - "Hierarchy": "formatting" + "Hierarchy": "formatting", + "DeprecationMessage": "" }, { "Name": "verboseOutput", @@ -889,7 +973,8 @@ "EnumValues": null, "Default": "false", "Status": "debug", - "Hierarchy": "" + "Hierarchy": "", + "DeprecationMessage": "" } ] }, @@ -1060,6 +1145,18 @@ "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/framepointer", "Default": true }, + { + "Name": "gofix", + "Doc": "apply fixes based on go:fix comment directives\n\nThe gofix analyzer inlines functions and constants that are marked for inlining.", + "URL": "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/gofix", + "Default": true + }, + { + "Name": "hostport", + "Doc": "check format of addresses passed to net.Dial\n\nThis analyzer flags code that produce network address strings using\nfmt.Sprintf, as in this example:\n\n addr := fmt.Sprintf(\"%s:%d\", host, 12345) // \"will not work with IPv6\"\n ...\n conn, err := net.Dial(\"tcp\", addr) // \"when passed to dial here\"\n\nThe analyzer suggests a fix to use the correct approach, a call to\nnet.JoinHostPort:\n\n addr := net.JoinHostPort(host, \"12345\")\n ...\n conn, err := net.Dial(\"tcp\", addr)\n\nA similar diagnostic and fix are produced for a format string of \"%s:%s\".\n", + "URL": "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/hostport", + "Default": true + }, { "Name": "httpresponse", "Doc": "check for mistakes using HTTP responses\n\nA common mistake when using the net/http package is to defer a function\ncall to close the http.Response Body before checking the error that\ndetermines whether the response is valid:\n\n\tresp, err := http.Head(url)\n\tdefer resp.Body.Close()\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\t// (defer statement belongs here)\n\nThis checker helps uncover latent nil dereference bugs by reporting a\ndiagnostic for such mistakes.", @@ -1092,7 +1189,7 @@ }, { "Name": "modernize", - "Doc": "simplify code by using modern constructs\n\nThis analyzer reports opportunities for simplifying and clarifying\nexisting code by using more modern features of Go, such as:\n\n - replacing an if/else conditional assignment by a call to the\n built-in min or max functions added in go1.21;\n - replacing sort.Slice(x, func(i, j int) bool) { return s[i] \u003c s[j] }\n by a call to slices.Sort(s), added in go1.21;\n - replacing interface{} by the 'any' type added in go1.18;\n - replacing append([]T(nil), s...) by slices.Clone(s) or\n slices.Concat(s), added in go1.21;\n - replacing a loop around an m[k]=v map update by a call\n to one of the Collect, Copy, Clone, or Insert functions\n from the maps package, added in go1.21;\n - replacing []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...),\n added in go1.19;", + "Doc": "simplify code by using modern constructs\n\nThis analyzer reports opportunities for simplifying and clarifying\nexisting code by using more modern features of Go, such as:\n\n - replacing an if/else conditional assignment by a call to the\n built-in min or max functions added in go1.21;\n - replacing sort.Slice(x, func(i, j int) bool) { return s[i] \u003c s[j] }\n by a call to slices.Sort(s), added in go1.21;\n - replacing interface{} by the 'any' type added in go1.18;\n - replacing append([]T(nil), s...) by slices.Clone(s) or\n slices.Concat(s), added in go1.21;\n - replacing a loop around an m[k]=v map update by a call\n to one of the Collect, Copy, Clone, or Insert functions\n from the maps package, added in go1.21;\n - replacing []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...),\n added in go1.19;\n - replacing uses of context.WithCancel in tests with t.Context, added in\n go1.24;\n - replacing omitempty by omitzero on structs, added in go1.24;\n - replacing append(s[:i], s[i+1]...) by slices.Delete(s, i, i+1),\n added in go1.21\n - replacing a 3-clause for i := 0; i \u003c n; i++ {} loop by\n for i := range n {}, added in go1.22;\n - replacing Split in \"for range strings.Split(...)\" by go1.24's\n more efficient SplitSeq;", "URL": "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/modernize", "Default": true }, @@ -1224,7 +1321,7 @@ }, { "Name": "unreachable", - "Doc": "check for unreachable code\n\nThe unreachable analyzer finds statements that execution can never reach\nbecause they are preceded by an return statement, a call to panic, an\ninfinite loop, or similar constructs.", + "Doc": "check for unreachable code\n\nThe unreachable analyzer finds statements that execution can never reach\nbecause they are preceded by a return statement, a call to panic, an\ninfinite loop, or similar constructs.", "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/unreachable", "Default": true }, @@ -1242,7 +1339,7 @@ }, { "Name": "unusedparams", - "Doc": "check for unused parameters of functions\n\nThe unusedparams analyzer checks functions to see if there are\nany parameters that are not being used.\n\nTo ensure soundness, it ignores:\n - \"address-taken\" functions, that is, functions that are used as\n a value rather than being called directly; their signatures may\n be required to conform to a func type.\n - exported functions or methods, since they may be address-taken\n in another package.\n - unexported methods whose name matches an interface method\n declared in the same package, since the method's signature\n may be required to conform to the interface type.\n - functions with empty bodies, or containing just a call to panic.\n - parameters that are unnamed, or named \"_\", the blank identifier.\n\nThe analyzer suggests a fix of replacing the parameter name by \"_\",\nbut in such cases a deeper fix can be obtained by invoking the\n\"Refactor: remove unused parameter\" code action, which will\neliminate the parameter entirely, along with all corresponding\narguments at call sites, while taking care to preserve any side\neffects in the argument expressions; see\nhttps://github.com/golang/tools/releases/tag/gopls%2Fv0.14.", + "Doc": "check for unused parameters of functions\n\nThe unusedparams analyzer checks functions to see if there are\nany parameters that are not being used.\n\nTo ensure soundness, it ignores:\n - \"address-taken\" functions, that is, functions that are used as\n a value rather than being called directly; their signatures may\n be required to conform to a func type.\n - exported functions or methods, since they may be address-taken\n in another package.\n - unexported methods whose name matches an interface method\n declared in the same package, since the method's signature\n may be required to conform to the interface type.\n - functions with empty bodies, or containing just a call to panic.\n - parameters that are unnamed, or named \"_\", the blank identifier.\n\nThe analyzer suggests a fix of replacing the parameter name by \"_\",\nbut in such cases a deeper fix can be obtained by invoking the\n\"Refactor: remove unused parameter\" code action, which will\neliminate the parameter entirely, along with all corresponding\narguments at call sites, while taking care to preserve any side\neffects in the argument expressions; see\nhttps://github.com/golang/tools/releases/tag/gopls%2Fv0.14.\n\nThis analyzer ignores generated code.", "URL": "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/unusedparams", "Default": true }, @@ -1256,7 +1353,7 @@ "Name": "unusedvariable", "Doc": "check for unused variables and suggest fixes", "URL": "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/unusedvariable", - "Default": false + "Default": true }, { "Name": "unusedwrite", diff --git a/gopls/internal/golang/assembly.go b/gopls/internal/golang/assembly.go index 7f0ace4daf6..3b778a54697 100644 --- a/gopls/internal/golang/assembly.go +++ b/gopls/internal/golang/assembly.go @@ -21,6 +21,7 @@ import ( "strings" "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/util/morestrings" ) // AssemblyHTML returns an HTML document containing an assembly listing of the selected function. @@ -103,7 +104,7 @@ func AssemblyHTML(ctx context.Context, snapshot *cache.Snapshot, pkg *cache.Pack // Skip filenames of the form "". if parts := insnRx.FindStringSubmatch(line); parts != nil { link := " " // if unknown - if file, linenum, ok := cutLast(parts[2], ":"); ok && !strings.HasPrefix(file, "<") { + if file, linenum, ok := morestrings.CutLast(parts[2], ":"); ok && !strings.HasPrefix(file, "<") { if linenum, err := strconv.Atoi(linenum); err == nil { text := fmt.Sprintf("L%04d", linenum) link = sourceLink(text, web.SrcURL(file, linenum, 1)) @@ -117,11 +118,3 @@ func AssemblyHTML(ctx context.Context, snapshot *cache.Snapshot, pkg *cache.Pack } return buf.Bytes(), nil } - -// cutLast is the "last" analogue of [strings.Cut]. -func cutLast(s, sep string) (before, after string, ok bool) { - if i := strings.LastIndex(s, sep); i >= 0 { - return s[:i], s[i+len(sep):], true - } - return s, "", false -} diff --git a/gopls/internal/golang/code_lens.go b/gopls/internal/golang/code_lens.go index 1359d0d0148..b04724e0cbc 100644 --- a/gopls/internal/golang/code_lens.go +++ b/gopls/internal/golang/code_lens.go @@ -47,13 +47,19 @@ func runTestCodeLens(ctx context.Context, snapshot *cache.Snapshot, fh file.Hand } puri := fh.URI() for _, fn := range testFuncs { - cmd := command.NewTestCommand("run test", puri, []string{fn.name}, nil) + cmd := command.NewRunTestsCommand("run test", command.RunTestsArgs{ + URI: puri, + Tests: []string{fn.name}, + }) rng := protocol.Range{Start: fn.rng.Start, End: fn.rng.Start} codeLens = append(codeLens, protocol.CodeLens{Range: rng, Command: cmd}) } for _, fn := range benchFuncs { - cmd := command.NewTestCommand("run benchmark", puri, nil, []string{fn.name}) + cmd := command.NewRunTestsCommand("run benchmark", command.RunTestsArgs{ + URI: puri, + Benchmarks: []string{fn.name}, + }) rng := protocol.Range{Start: fn.rng.Start, End: fn.rng.Start} codeLens = append(codeLens, protocol.CodeLens{Range: rng, Command: cmd}) } @@ -72,7 +78,10 @@ func runTestCodeLens(ctx context.Context, snapshot *cache.Snapshot, fh file.Hand for _, fn := range benchFuncs { benches = append(benches, fn.name) } - cmd := command.NewTestCommand("run file benchmarks", puri, nil, benches) + cmd := command.NewRunTestsCommand("run file benchmarks", command.RunTestsArgs{ + URI: puri, + Benchmarks: benches, + }) codeLens = append(codeLens, protocol.CodeLens{Range: rng, Command: cmd}) } return codeLens, nil diff --git a/gopls/internal/golang/codeaction.go b/gopls/internal/golang/codeaction.go index 627ba1a60d6..34ac7426019 100644 --- a/gopls/internal/golang/codeaction.go +++ b/gopls/internal/golang/codeaction.go @@ -11,6 +11,7 @@ import ( "go/ast" "go/token" "go/types" + "path/filepath" "reflect" "slices" "sort" @@ -105,6 +106,8 @@ func CodeActions(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, req.pkg = nil } if err := p.fn(ctx, req); err != nil { + // TODO(adonovan): most errors in code action providers should + // not block other providers; see https://go.dev/issue/71275. return nil, err } } @@ -151,7 +154,14 @@ func (req *codeActionsRequest) addApplyFixAction(title, fix string, loc protocol // then the command is embedded into the code action data field so // that the client can later ask the server to "resolve" a command // into an edit that they can preview and apply selectively. -// Set allowResolveEdits only for actions that generate edits. +// IMPORTANT: set allowResolveEdits only for actions that are 'edit aware', +// meaning they can detect when they are being executed in the context of a +// codeAction/resolve request, and return edits rather than applying them using +// workspace/applyEdit. In golang/go#71405, edits were being apply during the +// codeAction/resolve request handler. +// TODO(rfindley): refactor the command and code lens registration APIs so that +// resolve edit support is inferred from the command signature, not dependent +// on coordination between codeAction and command logic. // // Otherwise, the command is set as the code action operation. func (req *codeActionsRequest) addCommandAction(cmd *protocol.Command, allowResolveEdits bool) { @@ -335,9 +345,9 @@ func quickFix(ctx context.Context, req *codeActionsRequest) error { req.addApplyFixAction(msg, fixMissingCalledFunction, req.loc) } - // "undeclared name: x" or "undefined: x" compiler error. - // Offer a "Create variable/function x" code action. - // See [fixUndeclared] for command implementation. + // "undeclared name: X" or "undefined: X" compiler error. + // Offer a "Create variable/function X" code action. + // See [createUndeclared] for command implementation. case strings.HasPrefix(msg, "undeclared name: "), strings.HasPrefix(msg, "undefined: "): path, _ := astutil.PathEnclosingInterval(req.pgf.File, start, end) @@ -525,7 +535,7 @@ func refactorExtractVariableAll(ctx context.Context, req *codeActionsRequest) er func refactorExtractToNewFile(ctx context.Context, req *codeActionsRequest) error { if canExtractToNewFile(req.pgf, req.start, req.end) { cmd := command.NewExtractToNewFileCommand("Extract declarations to new file", req.loc) - req.addCommandAction(cmd, true) + req.addCommandAction(cmd, false) } return nil } @@ -559,7 +569,7 @@ func addTest(ctx context.Context, req *codeActionsRequest) error { } cmd := command.NewAddTestCommand("Add test for "+decl.Name.String(), req.loc) - req.addCommandAction(cmd, true) + req.addCommandAction(cmd, false) // TODO(hxjiang): add code action for generate test for package/file. return nil @@ -803,7 +813,11 @@ func goTest(ctx context.Context, req *codeActionsRequest) error { return nil } - cmd := command.NewTestCommand("Run tests and benchmarks", req.loc.URI, tests, benchmarks) + cmd := command.NewRunTestsCommand("Run tests and benchmarks", command.RunTestsArgs{ + URI: req.loc.URI, + Tests: tests, + Benchmarks: benchmarks, + }) req.addCommandAction(cmd, false) return nil } @@ -875,10 +889,22 @@ func goAssembly(ctx context.Context, req *codeActionsRequest) error { return nil } -// toggleCompilerOptDetails produces "Toggle compiler optimization details" code action. -// See [server.commandHandler.ToggleCompilerOptDetails] for command implementation. +// toggleCompilerOptDetails produces "{Show,Hide} compiler optimization details" code action. +// See [server.commandHandler.GCDetails] for command implementation. func toggleCompilerOptDetails(ctx context.Context, req *codeActionsRequest) error { - cmd := command.NewGCDetailsCommand("Toggle compiler optimization details", req.fh.URI()) - req.addCommandAction(cmd, false) + // TODO(adonovan): errors from code action providers should probably be + // logged, even if they aren't visible to the client; see https://go.dev/issue/71275. + if meta, err := NarrowestMetadataForFile(ctx, req.snapshot, req.fh.URI()); err == nil { + if len(meta.CompiledGoFiles) == 0 { + return fmt.Errorf("package %q does not compile file %q", meta.ID, req.fh.URI()) + } + dir := meta.CompiledGoFiles[0].Dir() + + title := fmt.Sprintf("%s compiler optimization details for %q", + cond(req.snapshot.WantCompilerOptDetails(dir), "Hide", "Show"), + filepath.Base(dir.Path())) + cmd := command.NewGCDetailsCommand(title, req.fh.URI()) + req.addCommandAction(cmd, false) + } return nil } diff --git a/gopls/internal/golang/compileropt.go b/gopls/internal/golang/compileropt.go index 2a39a5b5ee1..f9f046463f6 100644 --- a/gopls/internal/golang/compileropt.go +++ b/gopls/internal/golang/compileropt.go @@ -11,22 +11,19 @@ import ( "fmt" "os" "path/filepath" + "runtime" "strings" "golang.org/x/tools/gopls/internal/cache" - "golang.org/x/tools/gopls/internal/cache/metadata" "golang.org/x/tools/gopls/internal/protocol" "golang.org/x/tools/internal/event" ) // CompilerOptDetails invokes the Go compiler with the "-json=0,dir" -// flag on the specified package, parses its log of optimization -// decisions, and returns them as a set of diagnostics. -func CompilerOptDetails(ctx context.Context, snapshot *cache.Snapshot, mp *metadata.Package) (map[protocol.DocumentURI][]*cache.Diagnostic, error) { - if len(mp.CompiledGoFiles) == 0 { - return nil, nil - } - pkgDir := mp.CompiledGoFiles[0].DirPath() +// flag on the packages and tests in the specified directory, parses +// its log of optimization decisions, and returns them as a set of +// diagnostics. +func CompilerOptDetails(ctx context.Context, snapshot *cache.Snapshot, pkgDir protocol.DocumentURI) (map[protocol.DocumentURI][]*cache.Diagnostic, error) { outDir, err := os.MkdirTemp("", fmt.Sprintf("gopls-%d.details", os.Getpid())) if err != nil { return nil, err @@ -37,22 +34,20 @@ func CompilerOptDetails(ctx context.Context, snapshot *cache.Snapshot, mp *metad } }() - tmpFile, err := os.CreateTemp(os.TempDir(), "gopls-x") - if err != nil { - return nil, err - } - tmpFile.Close() // ignore error - defer os.Remove(tmpFile.Name()) - outDirURI := protocol.URIFromPath(outDir) // details doesn't handle Windows URIs in the form of "file:///C:/...", // so rewrite them to "file://C:/...". See golang/go#41614. if !strings.HasPrefix(outDir, "/") { outDirURI = protocol.DocumentURI(strings.Replace(string(outDirURI), "file:///", "file://", 1)) } - inv, cleanupInvocation, err := snapshot.GoCommandInvocation(cache.NoNetwork, pkgDir, "build", []string{ + + // We use "go test -c" not "go build" as it covers all three packages + // (p, "p [p.test]", "p_test [p.test]") in the directory, if they exist. + inv, cleanupInvocation, err := snapshot.GoCommandInvocation(cache.NoNetwork, pkgDir.Path(), "test", []string{ + "-c", + "-vet=off", // weirdly -c doesn't disable vet fmt.Sprintf("-gcflags=-json=0,%s", outDirURI), // JSON schema version 0 - fmt.Sprintf("-o=%s", tmpFile.Name()), + fmt.Sprintf("-o=%s", cond(runtime.GOOS == "windows", "NUL", "/dev/null")), ".", }) if err != nil { @@ -79,7 +74,8 @@ func CompilerOptDetails(ctx context.Context, snapshot *cache.Snapshot, mp *metad if fh == nil { continue } - if pkgDir != fh.URI().DirPath() { + if pkgDir != fh.URI().Dir() { + // Filter compiler diagnostics to the requested directory. // https://github.com/golang/go/issues/42198 // sometimes the detail diagnostics generated for files // outside the package can never be taken back. diff --git a/gopls/internal/golang/completion/completion.go b/gopls/internal/golang/completion/completion.go index f438a220000..4c340055233 100644 --- a/gopls/internal/golang/completion/completion.go +++ b/gopls/internal/golang/completion/completion.go @@ -31,6 +31,7 @@ import ( "golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/gopls/internal/cache" "golang.org/x/tools/gopls/internal/cache/metadata" + "golang.org/x/tools/gopls/internal/cache/parsego" "golang.org/x/tools/gopls/internal/file" "golang.org/x/tools/gopls/internal/fuzzy" "golang.org/x/tools/gopls/internal/golang" @@ -218,17 +219,16 @@ type completer struct { // filename is the name of the file associated with this completion request. filename string - // file is the AST of the file associated with this completion request. - file *ast.File + // pgf is the AST of the file associated with this completion request. + pgf *parsego.File // debugging // goversion is the version of Go in force in the file, as // defined by x/tools/internal/versions. Empty if unknown. // Since go1.22 it should always be known. goversion string - // (tokFile, pos) is the position at which the request was triggered. - tokFile *token.File - pos token.Pos + // pos is the position at which the request was triggered. + pos token.Pos // path is the path of AST nodes enclosing the position. path []ast.Node @@ -410,7 +410,7 @@ func (c *completer) setSurrounding(ident *ast.Ident) { content: ident.Name, cursor: c.pos, // Overwrite the prefix only. - tokFile: c.tokFile, + tokFile: c.pgf.Tok, start: ident.Pos(), end: ident.End(), mapper: c.mapper, @@ -435,7 +435,7 @@ func (c *completer) getSurrounding() *Selection { c.surrounding = &Selection{ content: "", cursor: c.pos, - tokFile: c.tokFile, + tokFile: c.pgf.Tok, start: c.pos, end: c.pos, mapper: c.mapper, @@ -609,8 +609,7 @@ func Completion(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, p }, fh: fh, filename: fh.URI().Path(), - tokFile: pgf.Tok, - file: pgf.File, + pgf: pgf, goversion: goversion, path: path, pos: pos, @@ -711,7 +710,7 @@ func Completion(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, p // search queue or completion items directly for different completion contexts. func (c *completer) collectCompletions(ctx context.Context) error { // Inside import blocks, return completions for unimported packages. - for _, importSpec := range c.file.Imports { + for _, importSpec := range c.pgf.File.Imports { if !(importSpec.Path.Pos() <= c.pos && c.pos <= importSpec.Path.End()) { continue } @@ -719,7 +718,7 @@ func (c *completer) collectCompletions(ctx context.Context) error { } // Inside comments, offer completions for the name of the relevant symbol. - for _, comment := range c.file.Comments { + for _, comment := range c.pgf.File.Comments { if comment.Pos() < c.pos && c.pos <= comment.End() { c.populateCommentCompletions(comment) return nil @@ -749,7 +748,7 @@ func (c *completer) collectCompletions(ctx context.Context) error { switch n := c.path[0].(type) { case *ast.Ident: - if c.file.Name == n { + if c.pgf.File.Name == n { return c.packageNameCompletions(ctx, c.fh.URI(), n) } else if sel, ok := c.path[1].(*ast.SelectorExpr); ok && sel.Sel == n { // Is this the Sel part of a selector? @@ -921,14 +920,14 @@ func (c *completer) populateImportCompletions(searchImport *ast.ImportSpec) erro c.surrounding = &Selection{ content: content, cursor: c.pos, - tokFile: c.tokFile, + tokFile: c.pgf.Tok, start: start, end: end, mapper: c.mapper, } seenImports := make(map[string]struct{}) - for _, importSpec := range c.file.Imports { + for _, importSpec := range c.pgf.File.Imports { if importSpec.Path.Value == importPath { continue } @@ -1024,7 +1023,7 @@ func (c *completer) populateCommentCompletions(comment *ast.CommentGroup) { c.setSurroundingForComment(comment) // Using the next line pos, grab and parse the exported symbol on that line - for _, n := range c.file.Decls { + for _, n := range c.pgf.File.Decls { declLine := safetoken.Line(file, n.Pos()) // if the comment is not in, directly above or on the same line as a declaration if declLine != commentLine && declLine != commentLine+1 && @@ -1080,8 +1079,33 @@ func (c *completer) populateCommentCompletions(comment *ast.CommentGroup) { // collect receiver struct fields if node.Recv != nil { - sig := c.pkg.TypesInfo().Defs[node.Name].(*types.Func).Signature() - _, named := typesinternal.ReceiverNamed(sig.Recv()) // may be nil if ill-typed + obj := c.pkg.TypesInfo().Defs[node.Name] + switch obj.(type) { + case nil: + report := func() { + bug.Reportf("missing def for func %s", node.Name) + } + // Debugging golang/go#71273. + if !slices.Contains(c.pkg.CompiledGoFiles(), c.pgf) { + if c.snapshot.View().Type() == cache.GoPackagesDriverView { + report() + } else { + report() + } + } else { + report() + } + continue + case *types.Func: + default: + bug.Reportf("unexpected func obj type %T for %s", obj, node.Name) + } + sig := obj.(*types.Func).Signature() + recv := sig.Recv() + if recv == nil { + continue // may be nil if ill-typed + } + _, named := typesinternal.ReceiverNamed(recv) if named != nil { if recvStruct, ok := named.Underlying().(*types.Struct); ok { for i := 0; i < recvStruct.NumFields(); i++ { @@ -1133,7 +1157,7 @@ func (c *completer) setSurroundingForComment(comments *ast.CommentGroup) { c.surrounding = &Selection{ content: cursorComment.Text[start:end], cursor: c.pos, - tokFile: c.tokFile, + tokFile: c.pgf.Tok, start: token.Pos(int(cursorComment.Slash) + start), end: token.Pos(int(cursorComment.Slash) + end), mapper: c.mapper, @@ -1437,7 +1461,7 @@ func (c *completer) selector(ctx context.Context, sel *ast.SelectorExpr) error { return nil } - goversion := c.pkg.TypesInfo().FileVersions[c.file] + goversion := c.pkg.TypesInfo().FileVersions[c.pgf.File] // Extract the package-level candidates using a quick parse. var g errgroup.Group @@ -1694,7 +1718,7 @@ func (c *completer) lexical(ctx context.Context) error { // Make sure the package name isn't already in use by another // object, and that this file doesn't import the package yet. // TODO(adonovan): what if pkg.Path has vendor/ prefix? - if _, ok := seen[pkg.Name()]; !ok && pkg != c.pkg.Types() && !alreadyImports(c.file, golang.ImportPath(pkg.Path())) { + if _, ok := seen[pkg.Name()]; !ok && pkg != c.pkg.Types() && !alreadyImports(c.pgf.File, golang.ImportPath(pkg.Path())) { seen[pkg.Name()] = struct{}{} obj := types.NewPkgName(0, nil, pkg.Name(), pkg) imp := &importInfo{ diff --git a/gopls/internal/golang/completion/format.go b/gopls/internal/golang/completion/format.go index f4fc7339b95..69339bffe84 100644 --- a/gopls/internal/golang/completion/format.go +++ b/gopls/internal/golang/completion/format.go @@ -18,9 +18,9 @@ import ( "golang.org/x/tools/gopls/internal/protocol" "golang.org/x/tools/gopls/internal/util/safetoken" "golang.org/x/tools/gopls/internal/util/typesutil" + internalastutil "golang.org/x/tools/internal/astutil" "golang.org/x/tools/internal/event" "golang.org/x/tools/internal/imports" - "golang.org/x/tools/internal/typesinternal" ) var ( @@ -60,9 +60,12 @@ func (c *completer) item(ctx context.Context, cand candidate) (CompletionItem, e if obj.Type() == nil { detail = "" } - if isTypeName(obj) && c.wantTypeParams() { - // obj is a *types.TypeName, so its type must be Alias|Named. - tparams := typesinternal.TypeParams(obj.Type().(typesinternal.NamedOrAlias)) + + type hasTypeParams interface{ TypeParams() *types.TypeParamList } + if genericType, _ := obj.Type().(hasTypeParams); genericType != nil && isTypeName(obj) && c.wantTypeParams() { + // golang/go#71044: note that type names can be basic types, even in + // receiver position, for invalid code. + tparams := genericType.TypeParams() label += typesutil.FormatTypeParams(tparams) insert = label // maintain invariant above (label == insert) } @@ -259,10 +262,7 @@ Suffixes: } else { item.Documentation = doc.Synopsis(comment.Text()) } - // The desired pattern is `^// Deprecated`, but the prefix has been removed - // TODO(rfindley): It doesn't look like this does the right thing for - // multi-line comments. - if strings.HasPrefix(comment.Text(), "Deprecated") { + if internalastutil.Deprecation(comment) != "" { if c.snapshot.Options().CompletionTags { item.Tags = []protocol.CompletionItemTag{protocol.ComplDeprecated} } else if c.snapshot.Options().CompletionDeprecated { diff --git a/gopls/internal/golang/completion/postfix_snippets.go b/gopls/internal/golang/completion/postfix_snippets.go index 4ffd14225fa..1bafe848490 100644 --- a/gopls/internal/golang/completion/postfix_snippets.go +++ b/gopls/internal/golang/completion/postfix_snippets.go @@ -653,7 +653,7 @@ func (c *completer) importIfNeeded(pkgPath string, scope *types.Scope) (string, defaultName := imports.ImportPathToAssumedName(pkgPath) // Check if file already imports pkgPath. - for _, s := range c.file.Imports { + for _, s := range c.pgf.File.Imports { // TODO(adonovan): what if pkgPath has a vendor/ suffix? // This may be the cause of go.dev/issue/56291. if string(metadata.UnquoteImportPath(s)) == pkgPath { diff --git a/gopls/internal/golang/completion/util.go b/gopls/internal/golang/completion/util.go index 766484e2fc8..7a4729413ae 100644 --- a/gopls/internal/golang/completion/util.go +++ b/gopls/internal/golang/completion/util.go @@ -15,6 +15,7 @@ import ( "golang.org/x/tools/gopls/internal/util/safetoken" "golang.org/x/tools/internal/diff" "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/internal/typesinternal" ) // exprAtPos returns the index of the expression containing pos. @@ -126,7 +127,9 @@ func resolveInvalid(fset *token.FileSet, obj types.Object, node ast.Node, info * // Construct a fake type for the object and return a fake object with this type. typename := golang.FormatNode(fset, resultExpr) typ := types.NewNamed(types.NewTypeName(token.NoPos, obj.Pkg(), typename, nil), types.Typ[types.Invalid], nil) - return types.NewVar(obj.Pos(), obj.Pkg(), obj.Name(), typ) + v := types.NewVar(obj.Pos(), obj.Pkg(), obj.Name(), typ) + typesinternal.SetVarKind(v, typesinternal.PackageVar) + return v } // TODO(adonovan): inline these. @@ -284,7 +287,7 @@ func isBasicKind(t types.Type, k types.BasicInfo) bool { } func (c *completer) editText(from, to token.Pos, newText string) ([]protocol.TextEdit, error) { - start, end, err := safetoken.Offsets(c.tokFile, from, to) + start, end, err := safetoken.Offsets(c.pgf.Tok, from, to) if err != nil { return nil, err // can't happen: from/to came from c } diff --git a/gopls/internal/golang/extracttofile.go b/gopls/internal/golang/extracttofile.go index cda9cd51e6d..39fb28e624b 100644 --- a/gopls/internal/golang/extracttofile.go +++ b/gopls/internal/golang/extracttofile.go @@ -95,7 +95,7 @@ func ExtractToNewFile(ctx context.Context, snapshot *cache.Snapshot, fh file.Han start, end, firstSymbol, ok := selectedToplevelDecls(pgf, start, end) if !ok { - return nil, bug.Errorf("invalid selection") + return nil, fmt.Errorf("invalid selection") } pgf.CheckPos(start) // #70553 // Inv: start is valid wrt pgf.Tok. diff --git a/gopls/internal/golang/fix.go b/gopls/internal/golang/fix.go index 7e83c1d6700..e812c677541 100644 --- a/gopls/internal/golang/fix.go +++ b/gopls/internal/golang/fix.go @@ -112,7 +112,7 @@ func ApplyFix(ctx context.Context, fix string, snapshot *cache.Snapshot, fh file fixInvertIfCondition: singleFile(invertIfCondition), fixSplitLines: singleFile(splitLines), fixJoinLines: singleFile(joinLines), - fixCreateUndeclared: singleFile(CreateUndeclared), + fixCreateUndeclared: singleFile(createUndeclared), fixMissingInterfaceMethods: stubMissingInterfaceMethodsFixer, fixMissingCalledFunction: stubMissingCalledFunctionFixer, } diff --git a/gopls/internal/golang/folding_range.go b/gopls/internal/golang/folding_range.go index c61802d1b58..9d80cc8de29 100644 --- a/gopls/internal/golang/folding_range.go +++ b/gopls/internal/golang/folding_range.go @@ -9,7 +9,7 @@ import ( "context" "go/ast" "go/token" - "sort" + "slices" "strings" "golang.org/x/tools/gopls/internal/cache" @@ -22,8 +22,8 @@ import ( // FoldingRangeInfo holds range and kind info of folding for an ast.Node type FoldingRangeInfo struct { - MappedRange protocol.MappedRange - Kind protocol.FoldingRangeKind + Range protocol.Range + Kind protocol.FoldingRangeKind } // FoldingRange gets all of the folding range for f. @@ -60,10 +60,8 @@ func FoldingRange(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, // Walk the ast and collect folding ranges. ast.Inspect(pgf.File, visit) - sort.Slice(ranges, func(i, j int) bool { - irng := ranges[i].MappedRange.Range() - jrng := ranges[j].MappedRange.Range() - return protocol.CompareRange(irng, jrng) < 0 + slices.SortFunc(ranges, func(x, y *FoldingRangeInfo) int { + return protocol.CompareRange(x.Range, y.Range) }) return ranges, nil @@ -121,14 +119,14 @@ func foldingRangeFunc(pgf *parsego.File, n ast.Node, lineFoldingOnly bool) *Fold if lineFoldingOnly && safetoken.Line(pgf.Tok, start) == safetoken.Line(pgf.Tok, end) { return nil } - mrng, err := pgf.PosMappedRange(start, end) + rng, err := pgf.PosRange(start, end) if err != nil { - bug.Reportf("failed to create mapped range: %s", err) // can't happen + bug.Reportf("failed to create range: %s", err) // can't happen return nil } return &FoldingRangeInfo{ - MappedRange: mrng, - Kind: kind, + Range: rng, + Kind: kind, } } @@ -215,15 +213,15 @@ func commentsFoldingRange(pgf *parsego.File) (comments []*FoldingRangeInfo) { // folding range start at the end of the first line. endLinePos = token.Pos(int(startPos) + len(strings.Split(firstComment.Text, "\n")[0])) } - mrng, err := pgf.PosMappedRange(endLinePos, commentGrp.End()) + rng, err := pgf.PosRange(endLinePos, commentGrp.End()) if err != nil { bug.Reportf("failed to create mapped range: %s", err) // can't happen continue } comments = append(comments, &FoldingRangeInfo{ // Fold from the end of the first line comment to the end of the comment block. - MappedRange: mrng, - Kind: protocol.Comment, + Range: rng, + Kind: protocol.Comment, }) } return comments diff --git a/gopls/internal/golang/format.go b/gopls/internal/golang/format.go index fa255e6b1c6..de4ec3a642c 100644 --- a/gopls/internal/golang/format.go +++ b/gopls/internal/golang/format.go @@ -21,6 +21,7 @@ import ( "golang.org/x/tools/gopls/internal/cache/parsego" "golang.org/x/tools/gopls/internal/file" "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/settings" "golang.org/x/tools/gopls/internal/util/safetoken" "golang.org/x/tools/internal/diff" "golang.org/x/tools/internal/event" @@ -120,7 +121,7 @@ func allImportsFixes(ctx context.Context, snapshot *cache.Snapshot, pgf *parsego defer done() if err := snapshot.RunProcessEnvFunc(ctx, func(ctx context.Context, opts *imports.Options) error { - allFixEdits, editsPerFix, err = computeImportEdits(ctx, pgf, snapshot.View().Folder().Env.GOROOT, opts) + allFixEdits, editsPerFix, err = computeImportEdits(ctx, pgf, snapshot, opts) return err }); err != nil { return nil, nil, fmt.Errorf("allImportsFixes: %v", err) @@ -130,12 +131,22 @@ func allImportsFixes(ctx context.Context, snapshot *cache.Snapshot, pgf *parsego // computeImportEdits computes a set of edits that perform one or all of the // necessary import fixes. -func computeImportEdits(ctx context.Context, pgf *parsego.File, goroot string, options *imports.Options) (allFixEdits []protocol.TextEdit, editsPerFix []*importFix, err error) { +func computeImportEdits(ctx context.Context, pgf *parsego.File, snapshot *cache.Snapshot, options *imports.Options) (allFixEdits []protocol.TextEdit, editsPerFix []*importFix, err error) { + goroot := snapshot.View().Folder().Env.GOROOT filename := pgf.URI.Path() // Build up basic information about the original file. isource, err := imports.NewProcessEnvSource(options.Env, filename, pgf.File.Name.Name) - allFixes, err := imports.FixImports(ctx, filename, pgf.Src, goroot, options.Env.Logf, isource) + var source imports.Source + switch snapshot.Options().ImportsSource { + case settings.ImportsSourceGopls: + source = snapshot.NewGoplsSource(isource) + case settings.ImportsSourceOff: // for cider, which has no file system + source = nil + case settings.ImportsSourceGoimports: + source = isource + } + allFixes, err := imports.FixImports(ctx, filename, pgf.Src, goroot, options.Env.Logf, source) if err != nil { return nil, nil, err } diff --git a/gopls/internal/golang/freesymbols.go b/gopls/internal/golang/freesymbols.go index bbda8f7d948..2c9e25165f6 100644 --- a/gopls/internal/golang/freesymbols.go +++ b/gopls/internal/golang/freesymbols.go @@ -297,7 +297,7 @@ func freeRefs(pkg *types.Package, info *types.Info, file *ast.File, start, end t // Compute dotted path. objects := append(suffix, obj) - if obj.Pkg() != nil && obj.Pkg() != pkg && isPackageLevel(obj) { // dot import + if obj.Pkg() != nil && obj.Pkg() != pkg && typesinternal.IsPackageLevel(obj) { // dot import // Synthesize the implicit PkgName. pkgName := types.NewPkgName(token.NoPos, pkg, obj.Pkg().Name(), obj.Pkg()) parent = fileScope diff --git a/gopls/internal/golang/highlight.go b/gopls/internal/golang/highlight.go index 1174ce7f7d4..ee82b622a71 100644 --- a/gopls/internal/golang/highlight.go +++ b/gopls/internal/golang/highlight.go @@ -10,12 +10,17 @@ import ( "go/ast" "go/token" "go/types" + "strconv" + "strings" - "golang.org/x/tools/go/ast/astutil" + astutil "golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/gopls/internal/cache" "golang.org/x/tools/gopls/internal/file" "golang.org/x/tools/gopls/internal/protocol" + goplsastutil "golang.org/x/tools/gopls/internal/util/astutil" + internalastutil "golang.org/x/tools/internal/astutil" "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/fmtstr" ) func Highlight(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, position protocol.Position) ([]protocol.DocumentHighlight, error) { @@ -49,7 +54,7 @@ func Highlight(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, po } } } - result, err := highlightPath(path, pgf.File, pkg.TypesInfo()) + result, err := highlightPath(pkg.TypesInfo(), path, pos) if err != nil { return nil, err } @@ -69,8 +74,22 @@ func Highlight(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, po // highlightPath returns ranges to highlight for the given enclosing path, // which should be the result of astutil.PathEnclosingInterval. -func highlightPath(path []ast.Node, file *ast.File, info *types.Info) (map[posRange]protocol.DocumentHighlightKind, error) { +func highlightPath(info *types.Info, path []ast.Node, pos token.Pos) (map[posRange]protocol.DocumentHighlightKind, error) { result := make(map[posRange]protocol.DocumentHighlightKind) + + // Inside a call to a printf-like function (as identified + // by a simple heuristic). + // Treat each corresponding ("%v", arg) pair as a highlight class. + for _, node := range path { + if call, ok := node.(*ast.CallExpr); ok { + lit, idx := formatStringAndIndex(info, call) + if idx != -1 { + highlightPrintf(call, idx, pos, lit, result) + } + } + } + + file := path[len(path)-1].(*ast.File) switch node := path[0].(type) { case *ast.BasicLit: // Import path string literal? @@ -131,6 +150,130 @@ func highlightPath(path []ast.Node, file *ast.File, info *types.Info) (map[posRa return result, nil } +// formatStringAndIndex returns the BasicLit and index of the BasicLit (the last +// non-variadic parameter) within the given printf-like call +// expression, returns -1 as index if unknown. +func formatStringAndIndex(info *types.Info, call *ast.CallExpr) (*ast.BasicLit, int) { + typ := info.Types[call.Fun].Type + if typ == nil { + return nil, -1 // missing type + } + sig, ok := typ.(*types.Signature) + if !ok { + return nil, -1 // ill-typed + } + if !sig.Variadic() { + // Skip checking non-variadic functions. + return nil, -1 + } + idx := sig.Params().Len() - 2 + if !(0 <= idx && idx < len(call.Args)) { + // Skip checking functions without a format string parameter, or + // missing the corresponding format argument. + return nil, -1 + } + // We only care about literal format strings, so fmt.Sprint("a"+"b%s", "bar") won't be highlighted. + if lit, ok := call.Args[idx].(*ast.BasicLit); ok && lit.Kind == token.STRING { + return lit, idx + } + return nil, -1 +} + +// highlightPrintf highlights operations in a format string and their corresponding +// variadic arguments in a (possible) printf-style function call. +// For example: +// +// fmt.Printf("Hello %s, you scored %d", name, score) +// +// If the cursor is on %s or name, it will highlight %s as a write operation, +// and name as a read operation. +func highlightPrintf(call *ast.CallExpr, idx int, cursorPos token.Pos, lit *ast.BasicLit, result map[posRange]protocol.DocumentHighlightKind) { + format, err := strconv.Unquote(lit.Value) + if err != nil { + return + } + if !strings.Contains(format, "%") { + return + } + operations, err := fmtstr.Parse(format, idx) + if err != nil { + return + } + + // fmt.Printf("%[1]d %[1].2d", 3) + // + // When cursor is in `%[1]d`, we record `3` being successfully highlighted. + // And because we will also record `%[1].2d`'s corresponding arguments index is `3` + // in `visited`, even though it will not highlight any item in the first pass, + // in the second pass we can correctly highlight it. So the three are the same class. + succeededArg := 0 + visited := make(map[posRange]int, 0) + + // highlightPair highlights the operation and its potential argument pair if the cursor is within either range. + highlightPair := func(rang fmtstr.Range, argIndex int) { + rangeStart, rangeEnd, err := internalastutil.RangeInStringLiteral(lit, rang.Start, rang.End) + if err != nil { + return + } + visited[posRange{rangeStart, rangeEnd}] = argIndex + + var arg ast.Expr + if argIndex < len(call.Args) { + arg = call.Args[argIndex] + } + + // cursorPos can't equal to end position, otherwise the two + // neighborhood such as (%[2]*d) are both highlighted if cursor in "d" (ending of [2]*). + if rangeStart <= cursorPos && cursorPos < rangeEnd || + arg != nil && goplsastutil.NodeContains(arg, cursorPos) { + highlightRange(result, rangeStart, rangeEnd, protocol.Write) + if arg != nil { + succeededArg = argIndex + highlightRange(result, arg.Pos(), arg.End(), protocol.Read) + } + } + } + + for _, op := range operations { + // If width or prec has any *, we can not highlight the full range from % to verb, + // because it will overlap with the sub-range of *, for example: + // + // fmt.Printf("%*[3]d", 4, 5, 6) + // ^ ^ we can only highlight this range when cursor in 6. '*' as a one-rune range will + // highlight for 4. + hasAsterisk := false + + // Try highlight Width if there is a *. + if op.Width.Dynamic != -1 { + hasAsterisk = true + highlightPair(op.Width.Range, op.Width.Dynamic) + } + + // Try highlight Precision if there is a *. + if op.Prec.Dynamic != -1 { + hasAsterisk = true + highlightPair(op.Prec.Range, op.Prec.Dynamic) + } + + // Try highlight Verb. + if op.Verb.Verb != '%' { + // If any * is found inside operation, narrow the highlight range. + if hasAsterisk { + highlightPair(op.Verb.Range, op.Verb.ArgIndex) + } else { + highlightPair(op.Range, op.Verb.ArgIndex) + } + } + } + + // Second pass, try to highlight those missed operations. + for rang, argIndex := range visited { + if succeededArg == argIndex { + highlightRange(result, rang.start, rang.end, protocol.Write) + } + } +} + type posRange struct { start, end token.Pos } diff --git a/gopls/internal/golang/hover.go b/gopls/internal/golang/hover.go index 80c47470215..7fc584f2c1a 100644 --- a/gopls/internal/golang/hover.go +++ b/gopls/internal/golang/hover.go @@ -280,12 +280,13 @@ func hover(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pp pro // // There's not much useful information to provide. if selectedType != nil { - fakeObj := types.NewVar(obj.Pos(), obj.Pkg(), obj.Name(), selectedType) - signature := types.ObjectString(fakeObj, qual) + v := types.NewVar(obj.Pos(), obj.Pkg(), obj.Name(), selectedType) + typesinternal.SetVarKind(v, typesinternal.LocalVar) + signature := types.ObjectString(v, qual) return *hoverRange, &hoverResult{ signature: signature, singleLine: signature, - symbolName: fakeObj.Name(), + symbolName: v.Name(), }, nil } @@ -587,13 +588,13 @@ func hover(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pp pro pkg := obj.Pkg() if recv != nil { linkName = fmt.Sprintf("(%s.%s).%s", pkg.Name(), recv.Name(), obj.Name()) - if obj.Exported() && recv.Exported() && isPackageLevel(recv) { + if obj.Exported() && recv.Exported() && typesinternal.IsPackageLevel(recv) { linkPath = pkg.Path() anchor = fmt.Sprintf("%s.%s", recv.Name(), obj.Name()) } } else { linkName = fmt.Sprintf("%s.%s", pkg.Name(), obj.Name()) - if obj.Exported() && isPackageLevel(obj) { + if obj.Exported() && typesinternal.IsPackageLevel(obj) { linkPath = pkg.Path() anchor = obj.Name() } @@ -1333,7 +1334,7 @@ func StdSymbolOf(obj types.Object) *stdlib.Symbol { } // Handle Function, Type, Const & Var. - if isPackageLevel(obj) { + if obj != nil && typesinternal.IsPackageLevel(obj) { for _, s := range symbols { if s.Kind == stdlib.Method || s.Kind == stdlib.Field { continue @@ -1348,7 +1349,7 @@ func StdSymbolOf(obj types.Object) *stdlib.Symbol { // Handle Method. if fn, _ := obj.(*types.Func); fn != nil { isPtr, named := typesinternal.ReceiverNamed(fn.Signature().Recv()) - if named != nil && isPackageLevel(named.Obj()) { + if named != nil && typesinternal.IsPackageLevel(named.Obj()) { for _, s := range symbols { if s.Kind != stdlib.Method { continue diff --git a/gopls/internal/golang/pkgdoc.go b/gopls/internal/golang/pkgdoc.go index 8050937a88b..a5f9cc97fa4 100644 --- a/gopls/internal/golang/pkgdoc.go +++ b/gopls/internal/golang/pkgdoc.go @@ -140,7 +140,7 @@ func DocFragment(pkg *cache.Package, pgf *parsego.File, start, end token.Pos) (p } // package-level symbol? - if isPackageLevel(sym) { + if typesinternal.IsPackageLevel(sym) { return pkgpath, sym.Name(), makeTitle(objectKind(sym), sym.Pkg(), sym.Name()) } @@ -667,7 +667,7 @@ window.addEventListener('load', function() { cloneTparams(sig.TypeParams()), types.NewTuple(append( slices.Collect(tupleVariables(sig.Params()))[:3], - types.NewVar(0, nil, "", types.Typ[types.Invalid]))...), + types.NewParam(0, nil, "", types.Typ[types.Invalid]))...), sig.Results(), false) // any final ...T parameter is truncated } diff --git a/gopls/internal/golang/rename.go b/gopls/internal/golang/rename.go index 914cd2b66ed..26e9d0a5a52 100644 --- a/gopls/internal/golang/rename.go +++ b/gopls/internal/golang/rename.go @@ -555,7 +555,7 @@ func renameOrdinary(ctx context.Context, snapshot *cache.Snapshot, f file.Handle // objectpath, the classifies them as local vars, but as // they came from export data they lack syntax and the // correct scope tree (issue #61294). - if !obj.(*types.Var).IsField() && !isPackageLevel(obj) { + if !obj.(*types.Var).IsField() && !typesinternal.IsPackageLevel(obj) { goto skipObjectPath } } @@ -1345,7 +1345,7 @@ func (r *renamer) updateCommentDocLinks() (map[protocol.DocumentURI][]diff.Edit, recvName := "" // Doc links can reference only exported package-level objects // and methods of exported package-level named types. - if !isPackageLevel(obj) { + if !typesinternal.IsPackageLevel(obj) { obj, isFunc := obj.(*types.Func) if !isFunc { continue @@ -1363,7 +1363,7 @@ func (r *renamer) updateCommentDocLinks() (map[protocol.DocumentURI][]diff.Edit, continue } name := named.Origin().Obj() - if !name.Exported() || !isPackageLevel(name) { + if !name.Exported() || !typesinternal.IsPackageLevel(name) { continue } recvName = name.Name() diff --git a/gopls/internal/golang/rename_check.go b/gopls/internal/golang/rename_check.go index ed6424c918f..280795abe5e 100644 --- a/gopls/internal/golang/rename_check.go +++ b/gopls/internal/golang/rename_check.go @@ -100,7 +100,7 @@ func (r *renamer) check(from types.Object) { r.checkInFileBlock(from_) } else if from_, ok := from.(*types.Label); ok { r.checkLabel(from_) - } else if isPackageLevel(from) { + } else if typesinternal.IsPackageLevel(from) { r.checkInPackageBlock(from) } else if v, ok := from.(*types.Var); ok && v.IsField() { r.checkStructField(v) @@ -949,13 +949,6 @@ func isLocal(obj types.Object) bool { return depth >= 4 } -func isPackageLevel(obj types.Object) bool { - if obj == nil { - return false - } - return obj.Pkg().Scope().Lookup(obj.Name()) == obj -} - // -- Plundered from go/scanner: --------------------------------------- func isLetter(ch rune) bool { diff --git a/gopls/internal/golang/semtok.go b/gopls/internal/golang/semtok.go index 2043f9aaacc..cb3f2cfd478 100644 --- a/gopls/internal/golang/semtok.go +++ b/gopls/internal/golang/semtok.go @@ -17,6 +17,7 @@ import ( "log" "path/filepath" "regexp" + "strconv" "strings" "time" @@ -28,7 +29,9 @@ import ( "golang.org/x/tools/gopls/internal/protocol/semtok" "golang.org/x/tools/gopls/internal/util/bug" "golang.org/x/tools/gopls/internal/util/safetoken" + "golang.org/x/tools/internal/astutil" "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/fmtstr" ) // semDebug enables comprehensive logging of decisions @@ -82,10 +85,8 @@ func SemanticTokens(ctx context.Context, snapshot *cache.Snapshot, fh file.Handl return &protocol.SemanticTokens{ Data: semtok.Encode( tv.tokens, - snapshot.Options().NoSemanticString, - snapshot.Options().NoSemanticNumber, - snapshot.Options().SemanticTypes, - snapshot.Options().SemanticMods), + snapshot.Options().EnabledSemanticTokenTypes(), + snapshot.Options().EnabledSemanticTokenModifiers()), ResultID: time.Now().String(), // for delta requests, but we've never seen any }, nil } @@ -242,7 +243,7 @@ func (tv *tokenVisitor) comment(c *ast.Comment, importByName map[string]*types.P } // token emits a token of the specified extent and semantics. -func (tv *tokenVisitor) token(start token.Pos, length int, typ semtok.TokenType, modifiers ...semtok.Modifier) { +func (tv *tokenVisitor) token(start token.Pos, length int, typ semtok.Type, modifiers ...semtok.Modifier) { if !start.IsValid() { return } @@ -325,16 +326,17 @@ func (tv *tokenVisitor) inspect(n ast.Node) (descend bool) { case *ast.AssignStmt: tv.token(n.TokPos, len(n.Tok.String()), semtok.TokOperator) case *ast.BasicLit: - if strings.Contains(n.Value, "\n") { - // has to be a string. - tv.multiline(n.Pos(), n.End(), semtok.TokString) - break - } - what := semtok.TokNumber if n.Kind == token.STRING { - what = semtok.TokString + if strings.Contains(n.Value, "\n") { + // has to be a string. + tv.multiline(n.Pos(), n.End(), semtok.TokString) + } else if !tv.formatString(n) { + // not a format string, color the whole as a TokString. + tv.token(n.Pos(), len(n.Value), semtok.TokString) + } + } else { + tv.token(n.Pos(), len(n.Value), semtok.TokNumber) } - tv.token(n.Pos(), len(n.Value), what) case *ast.BinaryExpr: tv.token(n.OpPos, len(n.Op.String()), semtok.TokOperator) case *ast.BlockStmt: @@ -463,7 +465,57 @@ func (tv *tokenVisitor) inspect(n ast.Node) (descend bool) { return true } -func (tv *tokenVisitor) appendObjectModifiers(mods []semtok.Modifier, obj types.Object) (semtok.TokenType, []semtok.Modifier) { +// formatString tries to report directives and string literals +// inside a (possible) printf-like call, it returns false and does nothing +// if the string is not a format string. +func (tv *tokenVisitor) formatString(lit *ast.BasicLit) bool { + if len(tv.stack) <= 1 { + return false + } + call, ok := tv.stack[len(tv.stack)-2].(*ast.CallExpr) + if !ok { + return false + } + lastNonVariadic, idx := formatStringAndIndex(tv.info, call) + if idx == -1 || lit != lastNonVariadic { + return false + } + format, err := strconv.Unquote(lit.Value) + if err != nil { + return false + } + if !strings.Contains(format, "%") { + return false + } + operations, err := fmtstr.Parse(format, idx) + if err != nil { + return false + } + + // It's a format string, compute interleaved sub range of directives and literals. + // pos tracks literal substring position within the overall BasicLit. + pos := lit.ValuePos + for _, op := range operations { + // Skip "%%". + if op.Verb.Verb == '%' { + continue + } + rangeStart, rangeEnd, err := astutil.RangeInStringLiteral(lit, op.Range.Start, op.Range.End) + if err != nil { + return false + } + // Report literal substring. + tv.token(pos, int(rangeStart-pos), semtok.TokString) + // Report formatting directive. + tv.token(rangeStart, int(rangeEnd-rangeStart), semtok.TokString, semtok.ModFormat) + pos = rangeEnd + } + // Report remaining literal substring. + tv.token(pos, int(lit.End()-pos), semtok.TokString) + return true +} + +func (tv *tokenVisitor) appendObjectModifiers(mods []semtok.Modifier, obj types.Object) (semtok.Type, []semtok.Modifier) { if obj.Pkg() == nil { mods = append(mods, semtok.ModDefaultLibrary) } @@ -559,7 +611,7 @@ func appendTypeModifiers(mods []semtok.Modifier, t types.Type) []semtok.Modifier func (tv *tokenVisitor) ident(id *ast.Ident) { var ( - tok semtok.TokenType + tok semtok.Type mods []semtok.Modifier obj types.Object ok bool @@ -623,7 +675,7 @@ func (tv *tokenVisitor) isParam(pos token.Pos) bool { // def), use the parse stack. // A lot of these only happen when the package doesn't compile, // but in that case it is all best-effort from the parse tree. -func (tv *tokenVisitor) unkIdent(id *ast.Ident) (semtok.TokenType, []semtok.Modifier) { +func (tv *tokenVisitor) unkIdent(id *ast.Ident) (semtok.Type, []semtok.Modifier) { def := []semtok.Modifier{semtok.ModDefinition} n := len(tv.stack) - 2 // parent of Ident; stack is [File ... Ident] if n < 0 { @@ -746,7 +798,7 @@ func (tv *tokenVisitor) unkIdent(id *ast.Ident) (semtok.TokenType, []semtok.Modi } // multiline emits a multiline token (`string` or /*comment*/). -func (tv *tokenVisitor) multiline(start, end token.Pos, tok semtok.TokenType) { +func (tv *tokenVisitor) multiline(start, end token.Pos, tok semtok.Type) { // TODO(adonovan): test with non-ASCII. f := tv.fset.File(start) diff --git a/gopls/internal/golang/signature_help.go b/gopls/internal/golang/signature_help.go index 2211a45de61..1dbd76d57d0 100644 --- a/gopls/internal/golang/signature_help.go +++ b/gopls/internal/golang/signature_help.go @@ -72,9 +72,10 @@ loop: fnval = callExpr.Fun break loop } - case *ast.FuncLit, *ast.FuncType: - // The user is within an anonymous function, - // which may be the parameter to the *ast.CallExpr. + case *ast.FuncLit, *ast.FuncType, *ast.CompositeLit: + // The user is within an anonymous function or + // a composite literal, which may be the argument + // to the *ast.CallExpr. // Don't show signature help in this case. return nil, 0, nil case *ast.BasicLit: diff --git a/gopls/internal/golang/symbols.go b/gopls/internal/golang/symbols.go index 35959c2de7a..14f2703441c 100644 --- a/gopls/internal/golang/symbols.go +++ b/gopls/internal/golang/symbols.go @@ -15,6 +15,8 @@ import ( "golang.org/x/tools/gopls/internal/cache/parsego" "golang.org/x/tools/gopls/internal/file" "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/protocol/command" + "golang.org/x/tools/gopls/internal/util/astutil" "golang.org/x/tools/internal/event" ) @@ -74,6 +76,110 @@ func DocumentSymbols(ctx context.Context, snapshot *cache.Snapshot, fh file.Hand return symbols, nil } +// PackageSymbols returns a list of symbols in the narrowest package for the given file (specified +// by its URI). +// Methods with receivers are stored as children under the symbol for their receiver type. +// The PackageSymbol data type contains the same fields as protocol.DocumentSymbol, with +// an additional int field "File" that stores the index of that symbol's file in the +// PackageSymbolsResult.Files. +func PackageSymbols(ctx context.Context, snapshot *cache.Snapshot, uri protocol.DocumentURI) (command.PackageSymbolsResult, error) { + ctx, done := event.Start(ctx, "source.PackageSymbols") + defer done() + + mp, err := NarrowestMetadataForFile(ctx, snapshot, uri) + if err != nil { + return command.PackageSymbolsResult{}, err + } + pkgfiles := mp.CompiledGoFiles + // Maps receiver name to the methods that use it + receiverToMethods := make(map[string][]command.PackageSymbol) + // Maps type symbol name to its index in symbols + typeSymbolToIdx := make(map[string]int) + var symbols []command.PackageSymbol + for fidx, f := range pkgfiles { + fh, err := snapshot.ReadFile(ctx, f) + if err != nil { + return command.PackageSymbolsResult{}, err + } + pgf, err := snapshot.ParseGo(ctx, fh, parsego.Full) + if err != nil { + return command.PackageSymbolsResult{}, err + } + for _, decl := range pgf.File.Decls { + switch decl := decl.(type) { + case *ast.FuncDecl: + if decl.Name.Name == "_" { + continue + } + if fs, err := funcSymbol(pgf.Mapper, pgf.Tok, decl); err == nil { + // If function is a method, prepend the type of the method. + // Don't add the method as its own symbol; store it so we can + // add it as a child of the receiver type later + if decl.Recv != nil && len(decl.Recv.List) > 0 { + _, rname, _ := astutil.UnpackRecv(decl.Recv.List[0].Type) + receiverToMethods[rname.String()] = append(receiverToMethods[rname.String()], toPackageSymbol(fidx, fs)) + } else { + symbols = append(symbols, toPackageSymbol(fidx, fs)) + } + } + case *ast.GenDecl: + for _, spec := range decl.Specs { + switch spec := spec.(type) { + case *ast.TypeSpec: + if spec.Name.Name == "_" { + continue + } + if ts, err := typeSymbol(pgf.Mapper, pgf.Tok, spec); err == nil { + typeSymbolToIdx[ts.Name] = len(symbols) + symbols = append(symbols, toPackageSymbol(fidx, ts)) + } + case *ast.ValueSpec: + for _, name := range spec.Names { + if name.Name == "_" { + continue + } + if vs, err := varSymbol(pgf.Mapper, pgf.Tok, spec, name, decl.Tok == token.CONST); err == nil { + symbols = append(symbols, toPackageSymbol(fidx, vs)) + } + } + } + } + } + } + } + // Add methods as the child of their receiver type symbol + for recv, methods := range receiverToMethods { + if i, ok := typeSymbolToIdx[recv]; ok { + symbols[i].Children = append(symbols[i].Children, methods...) + } + } + return command.PackageSymbolsResult{ + PackageName: string(mp.Name), + Files: pkgfiles, + Symbols: symbols, + }, nil + +} + +func toPackageSymbol(fileIndex int, s protocol.DocumentSymbol) command.PackageSymbol { + var res command.PackageSymbol + res.Name = s.Name + res.Detail = s.Detail + res.Kind = s.Kind + res.Tags = s.Tags + res.Range = s.Range + res.SelectionRange = s.SelectionRange + + children := make([]command.PackageSymbol, len(s.Children)) + for i, c := range s.Children { + children[i] = toPackageSymbol(fileIndex, c) + } + res.Children = children + + res.File = fileIndex + return res +} + func funcSymbol(m *protocol.Mapper, tf *token.File, decl *ast.FuncDecl) (protocol.DocumentSymbol, error) { s := protocol.DocumentSymbol{ Name: decl.Name.Name, diff --git a/gopls/internal/golang/undeclared.go b/gopls/internal/golang/undeclared.go index 35a5c7a1e57..0615386e9bf 100644 --- a/gopls/internal/golang/undeclared.go +++ b/gopls/internal/golang/undeclared.go @@ -68,8 +68,8 @@ func undeclaredFixTitle(path []ast.Node, errMsg string) string { return fmt.Sprintf("Create %s %s", noun, name) } -// CreateUndeclared generates a suggested declaration for an undeclared variable or function. -func CreateUndeclared(fset *token.FileSet, start, end token.Pos, content []byte, file *ast.File, pkg *types.Package, info *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) { +// createUndeclared generates a suggested declaration for an undeclared variable or function. +func createUndeclared(fset *token.FileSet, start, end token.Pos, content []byte, file *ast.File, pkg *types.Package, info *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) { pos := start // don't use the end path, _ := astutil.PathEnclosingInterval(file, pos, pos) if len(path) < 2 { diff --git a/gopls/internal/protocol/command/command_gen.go b/gopls/internal/protocol/command/command_gen.go index 28a7f44e88f..c9b18a40cb8 100644 --- a/gopls/internal/protocol/command/command_gen.go +++ b/gopls/internal/protocol/command/command_gen.go @@ -47,6 +47,7 @@ const ( MaybePromptForTelemetry Command = "gopls.maybe_prompt_for_telemetry" MemStats Command = "gopls.mem_stats" Modules Command = "gopls.modules" + PackageSymbols Command = "gopls.package_symbols" Packages Command = "gopls.packages" RegenerateCgo Command = "gopls.regenerate_cgo" RemoveDependency Command = "gopls.remove_dependency" @@ -58,7 +59,6 @@ const ( StartDebugging Command = "gopls.start_debugging" StartProfile Command = "gopls.start_profile" StopProfile Command = "gopls.stop_profile" - Test Command = "gopls.test" Tidy Command = "gopls.tidy" UpdateGoSum Command = "gopls.update_go_sum" UpgradeDependency Command = "gopls.upgrade_dependency" @@ -92,6 +92,7 @@ var Commands = []Command{ MaybePromptForTelemetry, MemStats, Modules, + PackageSymbols, Packages, RegenerateCgo, RemoveDependency, @@ -103,7 +104,6 @@ var Commands = []Command{ StartDebugging, StartProfile, StopProfile, - Test, Tidy, UpdateGoSum, UpgradeDependency, @@ -113,7 +113,7 @@ var Commands = []Command{ WorkspaceStats, } -func Dispatch(ctx context.Context, params *protocol.ExecuteCommandParams, s Interface) (interface{}, error) { +func Dispatch(ctx context.Context, params *protocol.ExecuteCommandParams, s Interface) (any, error) { switch Command(params.Command) { case AddDependency: var a0 DependencyArgs @@ -248,6 +248,12 @@ func Dispatch(ctx context.Context, params *protocol.ExecuteCommandParams, s Inte return nil, err } return s.Modules(ctx, a0) + case PackageSymbols: + var a0 PackageSymbolsArgs + if err := UnmarshalArgs(params.Arguments, &a0); err != nil { + return nil, err + } + return s.PackageSymbols(ctx, a0) case Packages: var a0 PackagesArgs if err := UnmarshalArgs(params.Arguments, &a0); err != nil { @@ -310,14 +316,6 @@ func Dispatch(ctx context.Context, params *protocol.ExecuteCommandParams, s Inte return nil, err } return s.StopProfile(ctx, a0) - case Test: - var a0 protocol.DocumentURI - var a1 []string - var a2 []string - if err := UnmarshalArgs(params.Arguments, &a0, &a1, &a2); err != nil { - return nil, err - } - return nil, s.Test(ctx, a0, a1, a2) case Tidy: var a0 URIArgs if err := UnmarshalArgs(params.Arguments, &a0); err != nil { @@ -540,6 +538,14 @@ func NewModulesCommand(title string, a0 ModulesArgs) *protocol.Command { } } +func NewPackageSymbolsCommand(title string, a0 PackageSymbolsArgs) *protocol.Command { + return &protocol.Command{ + Title: title, + Command: PackageSymbols.String(), + Arguments: MustMarshalArgs(a0), + } +} + func NewPackagesCommand(title string, a0 PackagesArgs) *protocol.Command { return &protocol.Command{ Title: title, @@ -628,14 +634,6 @@ func NewStopProfileCommand(title string, a0 StopProfileArgs) *protocol.Command { } } -func NewTestCommand(title string, a0 protocol.DocumentURI, a1 []string, a2 []string) *protocol.Command { - return &protocol.Command{ - Title: title, - Command: Test.String(), - Arguments: MustMarshalArgs(a0, a1, a2), - } -} - func NewTidyCommand(title string, a0 URIArgs) *protocol.Command { return &protocol.Command{ Title: title, diff --git a/gopls/internal/protocol/command/gen/gen.go b/gopls/internal/protocol/command/gen/gen.go index 98155282499..d4935020b38 100644 --- a/gopls/internal/protocol/command/gen/gen.go +++ b/gopls/internal/protocol/command/gen/gen.go @@ -53,7 +53,7 @@ var Commands = []Command { {{- end}} } -func Dispatch(ctx context.Context, params *protocol.ExecuteCommandParams, s Interface) (interface{}, error) { +func Dispatch(ctx context.Context, params *protocol.ExecuteCommandParams, s Interface) (any, error) { switch Command(params.Command) { {{- range .Commands}} case {{.MethodName}}: diff --git a/gopls/internal/protocol/command/interface.go b/gopls/internal/protocol/command/interface.go index b0e80a4129e..32e03dd388a 100644 --- a/gopls/internal/protocol/command/interface.go +++ b/gopls/internal/protocol/command/interface.go @@ -47,19 +47,7 @@ type Interface interface { // Applies a fix to a region of source code. ApplyFix(context.Context, ApplyFixArgs) (*protocol.WorkspaceEdit, error) - // Test: Run test(s) (legacy) - // - // Runs `go test` for a specific set of test or benchmark functions. - // - // This command is asynchronous; wait for the 'end' progress notification. - // - // This command is an alias for RunTests; the only difference - // is the form of the parameters. - // - // TODO(adonovan): eliminate it. - Test(context.Context, protocol.DocumentURI, []string, []string) error - - // Test: Run test(s) + // RunTests: Run tests // // Runs `go test` for a specific set of test or benchmark functions. // @@ -306,6 +294,9 @@ type Interface interface { // language server client), there should never be a case where Modules is // called on a path that has not already been loaded. Modules(context.Context, ModulesArgs) (ModulesResult, error) + + // PackageSymbols: Return information about symbols in the given file's package. + PackageSymbols(context.Context, PackageSymbolsArgs) (PackageSymbolsResult, error) } type RunTestsArgs struct { @@ -528,6 +519,9 @@ type RunVulncheckResult struct { type VulncheckResult struct { // Result holds the result of running vulncheck. Result *vulncheck.Result + // Token holds the progress token used to report progress during back to the + // LSP client during vulncheck execution. + Token protocol.ProgressToken } // MemStatsResult holds selected fields from runtime.MemStats. @@ -801,3 +795,35 @@ type ModulesArgs struct { type ModulesResult struct { Modules []Module } + +type PackageSymbolsArgs struct { + URI protocol.DocumentURI +} + +type PackageSymbolsResult struct { + PackageName string + // Files is a list of files in the given URI's package. + Files []protocol.DocumentURI + Symbols []PackageSymbol +} + +// PackageSymbol has the same fields as DocumentSymbol, with an additional int field "File" +// which stores the index of the symbol's file in the PackageSymbolsResult.Files array +type PackageSymbol struct { + Name string `json:"name"` + + Detail string `json:"detail,omitempty"` + + Kind protocol.SymbolKind `json:"kind"` + + Tags []protocol.SymbolTag `json:"tags,omitempty"` + + Range protocol.Range `json:"range"` + + SelectionRange protocol.Range `json:"selectionRange"` + + Children []PackageSymbol `json:"children,omitempty"` + + // Index of this symbol's file in PackageSymbolsResult.Files + File int `json:"file,omitempty"` +} diff --git a/gopls/internal/protocol/command/util.go b/gopls/internal/protocol/command/util.go index d07cd863f1c..3753b1e8eb1 100644 --- a/gopls/internal/protocol/command/util.go +++ b/gopls/internal/protocol/command/util.go @@ -21,7 +21,7 @@ func (c Command) String() string { return string(c) } // Example usage: // // jsonArgs, err := MarshalArgs(1, "hello", true, StructuredArg{42, 12.6}) -func MarshalArgs(args ...interface{}) ([]json.RawMessage, error) { +func MarshalArgs(args ...any) ([]json.RawMessage, error) { var out []json.RawMessage for _, arg := range args { argJSON, err := json.Marshal(arg) @@ -34,7 +34,7 @@ func MarshalArgs(args ...interface{}) ([]json.RawMessage, error) { } // MustMarshalArgs is like MarshalArgs, but panics on error. -func MustMarshalArgs(args ...interface{}) []json.RawMessage { +func MustMarshalArgs(args ...any) []json.RawMessage { msg, err := MarshalArgs(args...) if err != nil { panic(err) @@ -54,7 +54,7 @@ func MustMarshalArgs(args ...interface{}) []json.RawMessage { // structured StructuredArg // ) // err := UnmarshalArgs(args, &num, &str, &bul, &structured) -func UnmarshalArgs(jsonArgs []json.RawMessage, args ...interface{}) error { +func UnmarshalArgs(jsonArgs []json.RawMessage, args ...any) error { if len(args) != len(jsonArgs) { return fmt.Errorf("DecodeArgs: expected %d input arguments, got %d JSON arguments", len(args), len(jsonArgs)) } diff --git a/gopls/internal/protocol/generate/generate.go b/gopls/internal/protocol/generate/generate.go index 7418918f51f..2bb14790940 100644 --- a/gopls/internal/protocol/generate/generate.go +++ b/gopls/internal/protocol/generate/generate.go @@ -64,7 +64,7 @@ func propStar(name string, t NameType, gotype string) (string, string) { star = "" // passed by reference, so no need for * } else { switch gotype { - case "bool", "uint32", "int32", "string", "interface{}": + case "bool", "uint32", "int32", "string", "interface{}", "any": star = "" // gopls compatibility if t.Optional } } diff --git a/gopls/internal/protocol/generate/main_test.go b/gopls/internal/protocol/generate/main_test.go index 73c22048a80..cc616b66195 100644 --- a/gopls/internal/protocol/generate/main_test.go +++ b/gopls/internal/protocol/generate/main_test.go @@ -40,7 +40,7 @@ func TestParseContents(t *testing.T) { if err != nil { t.Fatal(err) } - var our interface{} + var our any if err := json.Unmarshal(out, &our); err != nil { t.Fatal(err) } @@ -50,7 +50,7 @@ func TestParseContents(t *testing.T) { if err != nil { t.Fatalf("could not read metaModel.json: %v", err) } - var raw interface{} + var raw any if err := json.Unmarshal(buf, &raw); err != nil { t.Fatal(err) } diff --git a/gopls/internal/protocol/generate/output.go b/gopls/internal/protocol/generate/output.go index c981bf9c383..ba9d0cb909f 100644 --- a/gopls/internal/protocol/generate/output.go +++ b/gopls/internal/protocol/generate/output.go @@ -8,6 +8,7 @@ import ( "bytes" "fmt" "log" + "slices" "sort" "strings" ) @@ -219,8 +220,8 @@ func genStructs(model *Model) { fmt.Fprintf(out, "//\n") out.WriteString(lspLink(model, camelCase(s.Name))) fmt.Fprintf(out, "type %s struct {%s\n", nm, linex(s.Line)) - // for gpls compatibilitye, embed most extensions, but expand the rest some day - props := append([]NameType{}, s.Properties...) + // for gopls compatibility, embed most extensions, but expand the rest some day + props := slices.Clone(s.Properties) if s.Name == "SymbolInformation" { // but expand this one for _, ex := range s.Extends { fmt.Fprintf(out, "\t// extends %s\n", ex.Name) @@ -242,7 +243,7 @@ func genStructs(model *Model) { // base types // (For URI and DocumentURI, see ../uri.go.) - types["LSPAny"] = "type LSPAny = interface{}\n" + types["LSPAny"] = "type LSPAny = any\n" // A special case, the only previously existing Or type types["DocumentDiagnosticReport"] = "type DocumentDiagnosticReport = Or_DocumentDiagnosticReport // (alias) \n" @@ -318,7 +319,7 @@ func genGenTypes() { sort.Strings(names) fmt.Fprintf(out, "// created for Or %v\n", names) fmt.Fprintf(out, "type %s struct {%s\n", nm, linex(nt.line+1)) - fmt.Fprintf(out, "\tValue interface{} `json:\"value\"`\n") + fmt.Fprintf(out, "\tValue any `json:\"value\"`\n") case "and": fmt.Fprintf(out, "// created for And\n") fmt.Fprintf(out, "type %s struct {%s\n", nm, linex(nt.line+1)) diff --git a/gopls/internal/protocol/generate/tables.go b/gopls/internal/protocol/generate/tables.go index c80337f187b..c0841a2334b 100644 --- a/gopls/internal/protocol/generate/tables.go +++ b/gopls/internal/protocol/generate/tables.go @@ -57,32 +57,32 @@ var usedGoplsStar = make(map[prop]bool) // For gopls compatibility, use a different, typically more restrictive, type for some fields. var renameProp = map[prop]string{ - {"CancelParams", "id"}: "interface{}", + {"CancelParams", "id"}: "any", {"Command", "arguments"}: "[]json.RawMessage", {"CodeAction", "data"}: "json.RawMessage", // delay unmarshalling commands - {"Diagnostic", "code"}: "interface{}", + {"Diagnostic", "code"}: "any", {"Diagnostic", "data"}: "json.RawMessage", // delay unmarshalling quickfixes - {"DocumentDiagnosticReportPartialResult", "relatedDocuments"}: "map[DocumentURI]interface{}", + {"DocumentDiagnosticReportPartialResult", "relatedDocuments"}: "map[DocumentURI]any", {"ExecuteCommandParams", "arguments"}: "[]json.RawMessage", {"FoldingRange", "kind"}: "string", {"Hover", "contents"}: "MarkupContent", {"InlayHint", "label"}: "[]InlayHintLabelPart", - {"RelatedFullDocumentDiagnosticReport", "relatedDocuments"}: "map[DocumentURI]interface{}", - {"RelatedUnchangedDocumentDiagnosticReport", "relatedDocuments"}: "map[DocumentURI]interface{}", + {"RelatedFullDocumentDiagnosticReport", "relatedDocuments"}: "map[DocumentURI]any", + {"RelatedUnchangedDocumentDiagnosticReport", "relatedDocuments"}: "map[DocumentURI]any", // PJW: this one is tricky. - {"ServerCapabilities", "codeActionProvider"}: "interface{}", + {"ServerCapabilities", "codeActionProvider"}: "any", - {"ServerCapabilities", "inlayHintProvider"}: "interface{}", + {"ServerCapabilities", "inlayHintProvider"}: "any", // slightly tricky - {"ServerCapabilities", "renameProvider"}: "interface{}", + {"ServerCapabilities", "renameProvider"}: "any", // slightly tricky - {"ServerCapabilities", "semanticTokensProvider"}: "interface{}", + {"ServerCapabilities", "semanticTokensProvider"}: "any", // slightly tricky - {"ServerCapabilities", "textDocumentSync"}: "interface{}", + {"ServerCapabilities", "textDocumentSync"}: "any", {"TextDocumentSyncOptions", "save"}: "SaveOptions", {"WorkspaceEdit", "documentChanges"}: "[]DocumentChange", } @@ -122,7 +122,7 @@ var goplsType = map[string]string{ "ConfigurationParams": "ParamConfiguration", "DocumentUri": "DocumentURI", "InitializeParams": "ParamInitialize", - "LSPAny": "interface{}", + "LSPAny": "any", "Lit_SemanticTokensOptions_range_Item1": "PRangeESemanticTokensOptions", @@ -130,18 +130,18 @@ var goplsType = map[string]string{ "Or_DidChangeConfigurationRegistrationOptions_section": "OrPSection_workspace_didChangeConfiguration", "Or_InlayHintLabelPart_tooltip": "OrPTooltipPLabel", "Or_InlayHint_tooltip": "OrPTooltip_textDocument_inlayHint", - "Or_LSPAny": "interface{}", + "Or_LSPAny": "any", "Or_ParameterInformation_documentation": "string", "Or_ParameterInformation_label": "string", "Or_PrepareRenameResult": "PrepareRenamePlaceholder", - "Or_ProgressToken": "interface{}", + "Or_ProgressToken": "any", "Or_Result_textDocument_completion": "CompletionList", "Or_Result_textDocument_declaration": "Or_textDocument_declaration", "Or_Result_textDocument_definition": "[]Location", - "Or_Result_textDocument_documentSymbol": "[]interface{}", + "Or_Result_textDocument_documentSymbol": "[]any", "Or_Result_textDocument_implementation": "[]Location", - "Or_Result_textDocument_semanticTokens_full_delta": "interface{}", + "Or_Result_textDocument_semanticTokens_full_delta": "any", "Or_Result_textDocument_typeDefinition": "[]Location", "Or_Result_workspace_symbol": "[]SymbolInformation", "Or_TextDocumentContentChangeEvent": "TextDocumentContentChangePartial", @@ -152,7 +152,7 @@ var goplsType = map[string]string{ "Tuple_ParameterInformation_label_Item1": "UIntCommaUInt", "WorkspaceFoldersServerCapabilities": "WorkspaceFolders5Gn", - "[]LSPAny": "[]interface{}", + "[]LSPAny": "[]any", "[]Or_Result_textDocument_codeAction_Item0_Elem": "[]CodeAction", "[]PreviousResultId": "[]PreviousResultID", diff --git a/gopls/internal/protocol/mapper.go b/gopls/internal/protocol/mapper.go index 85997c24dc4..a4aa2e2efe8 100644 --- a/gopls/internal/protocol/mapper.go +++ b/gopls/internal/protocol/mapper.go @@ -39,10 +39,9 @@ package protocol // All fields are optional. // // These types are useful as intermediate conversions of validated -// ranges (though MappedRange is superior as it is self contained -// and universally convertible). Since their fields are optional -// they are also useful for parsing user-provided positions (e.g. in -// the CLI) before we have access to file contents. +// ranges. Since their fields are optional they are also useful for +// parsing user-provided positions (e.g. in the CLI) before we have +// access to file contents. // // 4. protocol, the LSP RPC message format. // @@ -56,10 +55,6 @@ package protocol // protocol.Mapper holds the (URI, Content) of a file, enabling // efficient mapping between byte offsets, cmd ranges, and // protocol ranges. -// -// protocol.MappedRange holds a protocol.Mapper and valid (start, -// end int) byte offsets, enabling infallible, efficient conversion -// to any other format. import ( "bytes" @@ -67,7 +62,6 @@ import ( "go/ast" "go/token" "sort" - "strings" "sync" "unicode/utf8" @@ -195,7 +189,6 @@ func (m *Mapper) OffsetPosition(offset int) (Position, error) { } // No error may be returned after this point, // even if the offset does not fall at a rune boundary. - // (See panic in MappedRange.Range reachable.) line, col16 := m.lineCol16(offset) return Position{Line: uint32(line), Character: uint32(col16)}, nil @@ -251,15 +244,6 @@ func (m *Mapper) line(offset int) (int, int, bool) { return line, m.lineStart[line], cr } -// OffsetMappedRange returns a MappedRange for the given byte offsets. -// A MappedRange can be converted to any other form. -func (m *Mapper) OffsetMappedRange(start, end int) (MappedRange, error) { - if !(0 <= start && start <= end && end <= len(m.Content)) { - return MappedRange{}, fmt.Errorf("invalid offsets (%d, %d) (file %s has size %d)", start, end, m.URI, len(m.Content)) - } - return MappedRange{m, start, end}, nil -} - // -- conversions from protocol (UTF-16) domain -- // RangeOffsets converts a protocol (UTF-16) range to start/end byte offsets. @@ -362,78 +346,6 @@ func (m *Mapper) RangeLocation(rng Range) Location { return Location{URI: m.URI, Range: rng} } -// PosMappedRange returns a MappedRange for the given token.Pos range. -func (m *Mapper) PosMappedRange(tf *token.File, start, end token.Pos) (MappedRange, error) { - startOffset, endOffset, err := safetoken.Offsets(tf, start, end) - if err != nil { - return MappedRange{}, nil - } - return m.OffsetMappedRange(startOffset, endOffset) -} - -// NodeMappedRange returns a MappedRange for the given node range. -func (m *Mapper) NodeMappedRange(tf *token.File, node ast.Node) (MappedRange, error) { - return m.PosMappedRange(tf, node.Pos(), node.End()) -} - -// -- MappedRange -- - -// A MappedRange represents a valid byte-offset range of a file. -// Through its Mapper it can be converted into other forms such -// as protocol.Range or UTF-8. -// -// Construct one by calling Mapper.OffsetMappedRange with start/end offsets. -// From the go/token domain, call safetoken.Offsets first, -// or use a helper such as parsego.File.MappedPosRange. -// -// Two MappedRanges produced the same Mapper are equal if and only if they -// denote the same range. Two MappedRanges produced by different Mappers -// are unequal even when they represent the same range of the same file. -type MappedRange struct { - Mapper *Mapper - start, end int // valid byte offsets: 0 <= start <= end <= len(Mapper.Content) -} - -// Offsets returns the (start, end) byte offsets of this range. -func (mr MappedRange) Offsets() (start, end int) { return mr.start, mr.end } - -// -- convenience functions -- - -// URI returns the URI of the range's file. -func (mr MappedRange) URI() DocumentURI { - return mr.Mapper.URI -} - -// Range returns the range in protocol (UTF-16) form. -func (mr MappedRange) Range() Range { - rng, err := mr.Mapper.OffsetRange(mr.start, mr.end) - if err != nil { - panic(err) // can't happen - } - return rng -} - -// Location returns the range in protocol location (UTF-16) form. -func (mr MappedRange) Location() Location { - return mr.Mapper.RangeLocation(mr.Range()) -} - -// String formats the range in UTF-8 notation. -func (mr MappedRange) String() string { - var s strings.Builder - startLine, startCol8 := mr.Mapper.OffsetLineCol8(mr.start) - fmt.Fprintf(&s, "%d:%d", startLine, startCol8) - if mr.end != mr.start { - endLine, endCol8 := mr.Mapper.OffsetLineCol8(mr.end) - if endLine == startLine { - fmt.Fprintf(&s, "-%d", endCol8) - } else { - fmt.Fprintf(&s, "-%d:%d", endLine, endCol8) - } - } - return s.String() -} - // LocationTextDocumentPositionParams converts its argument to its result. func LocationTextDocumentPositionParams(loc Location) TextDocumentPositionParams { return TextDocumentPositionParams{ diff --git a/gopls/internal/protocol/mapper_test.go b/gopls/internal/protocol/mapper_test.go index 8ba611a99f9..4326cc7be74 100644 --- a/gopls/internal/protocol/mapper_test.go +++ b/gopls/internal/protocol/mapper_test.go @@ -318,9 +318,9 @@ func getPrePost(content []byte, offset int) (string, string) { // -- these are the historical lsppos tests -- type testCase struct { - content string // input text - substrOrOffset interface{} // explicit integer offset, or a substring - wantLine, wantChar int // expected LSP position information + content string // input text + substrOrOffset any // explicit integer offset, or a substring + wantLine, wantChar int // expected LSP position information } // offset returns the test case byte offset diff --git a/gopls/internal/protocol/protocol.go b/gopls/internal/protocol/protocol.go index 7cc5589aa0b..f98d6371273 100644 --- a/gopls/internal/protocol/protocol.go +++ b/gopls/internal/protocol/protocol.go @@ -33,8 +33,8 @@ type ClientCloser interface { type connSender interface { io.Closer - Notify(ctx context.Context, method string, params interface{}) error - Call(ctx context.Context, method string, params, result interface{}) error + Notify(ctx context.Context, method string, params any) error + Call(ctx context.Context, method string, params, result any) error } type clientDispatcher struct { @@ -59,11 +59,11 @@ func (c clientConn) Close() error { return c.conn.Close() } -func (c clientConn) Notify(ctx context.Context, method string, params interface{}) error { +func (c clientConn) Notify(ctx context.Context, method string, params any) error { return c.conn.Notify(ctx, method, params) } -func (c clientConn) Call(ctx context.Context, method string, params interface{}, result interface{}) error { +func (c clientConn) Call(ctx context.Context, method string, params any, result any) error { id, err := c.conn.Call(ctx, method, params, result) if ctx.Err() != nil { cancelCall(ctx, c, id) @@ -83,11 +83,11 @@ func (c clientConnV2) Close() error { return c.conn.Close() } -func (c clientConnV2) Notify(ctx context.Context, method string, params interface{}) error { +func (c clientConnV2) Notify(ctx context.Context, method string, params any) error { return c.conn.Notify(ctx, method, params) } -func (c clientConnV2) Call(ctx context.Context, method string, params interface{}, result interface{}) error { +func (c clientConnV2) Call(ctx context.Context, method string, params any, result any) error { call := c.conn.Call(ctx, method, params) err := call.Await(ctx, result) if ctx.Err() != nil { @@ -126,16 +126,16 @@ func ClientHandler(client Client, handler jsonrpc2.Handler) jsonrpc2.Handler { } func ClientHandlerV2(client Client) jsonrpc2_v2.Handler { - return jsonrpc2_v2.HandlerFunc(func(ctx context.Context, req *jsonrpc2_v2.Request) (interface{}, error) { + return jsonrpc2_v2.HandlerFunc(func(ctx context.Context, req *jsonrpc2_v2.Request) (any, error) { if ctx.Err() != nil { return nil, RequestCancelledErrorV2 } req1 := req2to1(req) var ( - result interface{} + result any resErr error ) - replier := func(_ context.Context, res interface{}, err error) error { + replier := func(_ context.Context, res any, err error) error { if err != nil { resErr = err return nil @@ -166,16 +166,16 @@ func ServerHandler(server Server, handler jsonrpc2.Handler) jsonrpc2.Handler { } func ServerHandlerV2(server Server) jsonrpc2_v2.Handler { - return jsonrpc2_v2.HandlerFunc(func(ctx context.Context, req *jsonrpc2_v2.Request) (interface{}, error) { + return jsonrpc2_v2.HandlerFunc(func(ctx context.Context, req *jsonrpc2_v2.Request) (any, error) { if ctx.Err() != nil { return nil, RequestCancelledErrorV2 } req1 := req2to1(req) var ( - result interface{} + result any resErr error ) - replier := func(_ context.Context, res interface{}, err error) error { + replier := func(_ context.Context, res any, err error) error { if err != nil { resErr = err return nil @@ -232,7 +232,7 @@ func CancelHandler(handler jsonrpc2.Handler) jsonrpc2.Handler { // be careful about racing between the two paths. // TODO(iancottrell): Add a test that watches the stream and verifies the response // for the cancelled request flows. - replyWithDetachedContext := func(ctx context.Context, resp interface{}, err error) error { + replyWithDetachedContext := func(ctx context.Context, resp any, err error) error { // https://microsoft.github.io/language-server-protocol/specifications/specification-current/#cancelRequest if ctx.Err() != nil && err == nil { err = RequestCancelledError @@ -257,7 +257,7 @@ func CancelHandler(handler jsonrpc2.Handler) jsonrpc2.Handler { } } -func Call(ctx context.Context, conn jsonrpc2.Conn, method string, params interface{}, result interface{}) error { +func Call(ctx context.Context, conn jsonrpc2.Conn, method string, params any, result any) error { id, err := conn.Call(ctx, method, params, result) if ctx.Err() != nil { cancelCall(ctx, clientConn{conn}, id) diff --git a/gopls/internal/protocol/semantic.go b/gopls/internal/protocol/semantic.go deleted file mode 100644 index 23356dd8ef2..00000000000 --- a/gopls/internal/protocol/semantic.go +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package protocol - -// The file defines helpers for semantics tokens. - -import "fmt" - -// SemanticTypes to use in case there is no client, as in the command line, or tests. -func SemanticTypes() []string { - return semanticTypes[:] -} - -// SemanticModifiers to use in case there is no client. -func SemanticModifiers() []string { - return semanticModifiers[:] -} - -// SemType returns a string equivalent of the type, for gopls semtok -func SemType(n int) string { - tokTypes := SemanticTypes() - tokMods := SemanticModifiers() - if n >= 0 && n < len(tokTypes) { - return tokTypes[n] - } - // not found for some reason - return fmt.Sprintf("?%d[%d,%d]?", n, len(tokTypes), len(tokMods)) -} - -// SemMods returns the []string equivalent of the mods, for gopls semtok. -func SemMods(n int) []string { - tokMods := SemanticModifiers() - mods := []string{} - for i := 0; i < len(tokMods); i++ { - if (n & (1 << uint(i))) != 0 { - mods = append(mods, tokMods[i]) - } - } - return mods -} - -// From https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_semanticTokens -var ( - semanticTypes = [...]string{ - "namespace", "type", "class", "enum", "interface", - "struct", "typeParameter", "parameter", "variable", "property", "enumMember", - "event", "function", "method", "macro", "keyword", "modifier", "comment", - "string", "number", "regexp", "operator", - } - semanticModifiers = [...]string{ - "declaration", "definition", "readonly", "static", - "deprecated", "abstract", "async", "modification", "documentation", "defaultLibrary", - // Additional modifiers - "interface", "struct", "signature", "pointer", "array", "map", "slice", "chan", "string", "number", "bool", "invalid", - } -) diff --git a/gopls/internal/protocol/semtok/semtok.go b/gopls/internal/protocol/semtok/semtok.go index fc269c38759..6b05b8bb5e2 100644 --- a/gopls/internal/protocol/semtok/semtok.go +++ b/gopls/internal/protocol/semtok/semtok.go @@ -11,33 +11,35 @@ import "sort" type Token struct { Line, Start uint32 Len uint32 - Type TokenType + Type Type Modifiers []Modifier } -type TokenType string +type Type string const ( // These are the tokens defined by LSP 3.18, but a client is // free to send its own set; any tokens that the server emits // that are not in this set are simply not encoded in the bitfield. + TokComment Type = "comment" // for a comment + TokFunction Type = "function" // for a function + TokKeyword Type = "keyword" // for a keyword + TokLabel Type = "label" // for a control label (LSP 3.18) + TokMacro Type = "macro" // for text/template tokens + TokMethod Type = "method" // for a method + TokNamespace Type = "namespace" // for an imported package name + TokNumber Type = "number" // for a numeric literal + TokOperator Type = "operator" // for an operator + TokParameter Type = "parameter" // for a parameter variable + TokString Type = "string" // for a string literal + TokType Type = "type" // for a type name (plus other uses) + TokTypeParam Type = "typeParameter" // for a type parameter + TokVariable Type = "variable" // for a var or const + // The section below defines a subset of token types in standard token types + // that gopls does not use. // - // If you add or uncomment a token type, document it in + // If you move types to above, document it in // gopls/doc/features/passive.md#semantic-tokens. - TokComment TokenType = "comment" // for a comment - TokFunction TokenType = "function" // for a function - TokKeyword TokenType = "keyword" // for a keyword - TokLabel TokenType = "label" // for a control label (LSP 3.18) - TokMacro TokenType = "macro" // for text/template tokens - TokMethod TokenType = "method" // for a method - TokNamespace TokenType = "namespace" // for an imported package name - TokNumber TokenType = "number" // for a numeric literal - TokOperator TokenType = "operator" // for an operator - TokParameter TokenType = "parameter" // for a parameter variable - TokString TokenType = "string" // for a string literal - TokType TokenType = "type" // for a type name (plus other uses) - TokTypeParam TokenType = "typeParameter" // for a type parameter - TokVariable TokenType = "variable" // for a var or const // TokClass TokenType = "class" // TokDecorator TokenType = "decorator" // TokEnum TokenType = "enum" @@ -50,24 +52,47 @@ const ( // TokStruct TokenType = "struct" ) +// TokenTypes is a slice of types gopls will return as its server capabilities. +var TokenTypes = []Type{ + TokNamespace, + TokType, + TokTypeParam, + TokParameter, + TokVariable, + TokFunction, + TokMethod, + TokMacro, + TokKeyword, + TokComment, + TokString, + TokNumber, + TokOperator, + TokLabel, +} + type Modifier string const ( // LSP 3.18 standard modifiers // As with TokenTypes, clients get only the modifiers they request. // - // If you add or uncomment a modifier, document it in - // gopls/doc/features/passive.md#semantic-tokens. + // The section below defines a subset of modifiers in standard modifiers + // that gopls understand. ModDefaultLibrary Modifier = "defaultLibrary" // for predeclared symbols ModDefinition Modifier = "definition" // for the declaring identifier of a symbol ModReadonly Modifier = "readonly" // for constants (TokVariable) - // ModAbstract Modifier = "abstract" - // ModAsync Modifier = "async" - // ModDeclaration Modifier = "declaration" - // ModDeprecated Modifier = "deprecated" - // ModDocumentation Modifier = "documentation" - // ModModification Modifier = "modification" - // ModStatic Modifier = "static" + // The section below defines the rest of the modifiers in standard modifiers + // that gopls does not use. + // + // If you move modifiers to above, document it in + // gopls/doc/features/passive.md#semantic-tokens. + // ModAbstract Modifier = "abstract" + // ModAsync Modifier = "async" + // ModDeclaration Modifier = "declaration" + // ModDeprecated Modifier = "deprecated" + // ModDocumentation Modifier = "documentation" + // ModModification Modifier = "modification" + // ModStatic Modifier = "static" // non-standard modifiers // @@ -77,6 +102,7 @@ const ( ModArray Modifier = "array" ModBool Modifier = "bool" ModChan Modifier = "chan" + ModFormat Modifier = "format" // for format string directives such as "%s" ModInterface Modifier = "interface" ModMap Modifier = "map" ModNumber Modifier = "number" @@ -87,13 +113,36 @@ const ( ModStruct Modifier = "struct" ) +// TokenModifiers is a slice of modifiers gopls will return as its server +// capabilities. +var TokenModifiers = []Modifier{ + // LSP 3.18 standard modifiers. + ModDefinition, + ModReadonly, + ModDefaultLibrary, + // Additional custom modifiers. + ModArray, + ModBool, + ModChan, + ModFormat, + ModInterface, + ModMap, + ModNumber, + ModPointer, + ModSignature, + ModSlice, + ModString, + ModStruct, +} + // Encode returns the LSP encoding of a sequence of tokens. -// The noStrings, noNumbers options cause strings, numbers to be skipped. -// The lists of types and modifiers determines the bitfield encoding. +// encodeType and encodeModifier maps control which types and modifiers are +// excluded in the response. If a type or modifier maps to false, it will be +// omitted from the output. func Encode( tokens []Token, - noStrings, noNumbers bool, - types, modifiers []string) []uint32 { + encodeType map[Type]bool, + encodeModifier map[Modifier]bool) []uint32 { // binary operators, at least, will be out of order sort.Slice(tokens, func(i, j int) bool { @@ -103,17 +152,23 @@ func Encode( return tokens[i].Start < tokens[j].Start }) - typeMap := make(map[TokenType]int) - for i, t := range types { - typeMap[TokenType(t)] = i + typeMap := make(map[Type]int) + for i, t := range TokenTypes { + if enable, ok := encodeType[t]; ok && !enable { + continue + } + typeMap[Type(t)] = i } modMap := make(map[Modifier]int) - for i, m := range modifiers { + for i, m := range TokenModifiers { + if enable, ok := encodeModifier[m]; ok && !enable { + continue + } modMap[Modifier(m)] = 1 << i } - // each semantic token needs five values + // each semantic token needs five values but some tokens might be skipped. // (see Integer Encoding for Tokens in the LSP spec) x := make([]uint32, 5*len(tokens)) var j int @@ -122,13 +177,7 @@ func Encode( item := tokens[i] typ, ok := typeMap[item.Type] if !ok { - continue // client doesn't want typeStr - } - if item.Type == TokString && noStrings { - continue - } - if item.Type == TokNumber && noNumbers { - continue + continue // client doesn't want semantic token info. } if j == 0 { x[0] = tokens[0].Line diff --git a/gopls/internal/protocol/tsclient.go b/gopls/internal/protocol/tsclient.go index 8fd322d424a..51eef36b4bf 100644 --- a/gopls/internal/protocol/tsclient.go +++ b/gopls/internal/protocol/tsclient.go @@ -26,7 +26,7 @@ type Client interface { // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#client_unregisterCapability UnregisterCapability(context.Context, *UnregistrationParams) error // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#telemetry_event - Event(context.Context, *interface{}) error + Event(context.Context, *any) error // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_publishDiagnostics PublishDiagnostics(context.Context, *PublishDiagnosticsParams) error // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#window_logMessage @@ -97,7 +97,7 @@ func clientDispatch(ctx context.Context, client Client, reply jsonrpc2.Replier, return true, reply(ctx, nil, err) case "telemetry/event": - var params interface{} + var params any if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } @@ -236,7 +236,7 @@ func (s *clientDispatcher) RegisterCapability(ctx context.Context, params *Regis func (s *clientDispatcher) UnregisterCapability(ctx context.Context, params *UnregistrationParams) error { return s.sender.Call(ctx, "client/unregisterCapability", params, nil) } -func (s *clientDispatcher) Event(ctx context.Context, params *interface{}) error { +func (s *clientDispatcher) Event(ctx context.Context, params *any) error { return s.sender.Notify(ctx, "telemetry/event", params) } func (s *clientDispatcher) PublishDiagnostics(ctx context.Context, params *PublishDiagnosticsParams) error { diff --git a/gopls/internal/protocol/tsprotocol.go b/gopls/internal/protocol/tsprotocol.go index 198aeae7d01..444e51e0717 100644 --- a/gopls/internal/protocol/tsprotocol.go +++ b/gopls/internal/protocol/tsprotocol.go @@ -135,7 +135,7 @@ type CallHierarchyItem struct { SelectionRange Range `json:"selectionRange"` // A data entry field that is preserved between a call hierarchy prepare and // incoming calls or outgoing calls requests. - Data interface{} `json:"data,omitempty"` + Data any `json:"data,omitempty"` } // Call hierarchy options used during static registration. @@ -196,7 +196,7 @@ type CallHierarchyRegistrationOptions struct { // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#cancelParams type CancelParams struct { // The request id to cancel. - ID interface{} `json:"id"` + ID any `json:"id"` } // Additional information that describes document changes. @@ -249,7 +249,7 @@ type ClientCapabilities struct { // @since 3.16.0 General *GeneralClientCapabilities `json:"general,omitempty"` // Experimental client capabilities. - Experimental interface{} `json:"experimental,omitempty"` + Experimental any `json:"experimental,omitempty"` } // @since 3.18.0 @@ -758,7 +758,7 @@ type CodeLens struct { Command *Command `json:"command,omitempty"` // A data entry field that is preserved on a code lens item between // a {@link CodeLensRequest} and a {@link CodeLensResolveRequest} - Data interface{} `json:"data,omitempty"` + Data any `json:"data,omitempty"` } // The client capabilities of a {@link CodeLensRequest}. @@ -1047,7 +1047,7 @@ type CompletionItem struct { Command *Command `json:"command,omitempty"` // A data entry field that is preserved on a completion item between a // {@link CompletionRequest} and a {@link CompletionResolveRequest}. - Data interface{} `json:"data,omitempty"` + Data any `json:"data,omitempty"` } // In many cases the items of an actual completion result share the same @@ -1085,7 +1085,7 @@ type CompletionItemDefaults struct { // A default data value. // // @since 3.17.0 - Data interface{} `json:"data,omitempty"` + Data any `json:"data,omitempty"` } // The kind of a completion entry. @@ -1413,7 +1413,7 @@ type Diagnostic struct { // always provide a severity value. Severity DiagnosticSeverity `json:"severity,omitempty"` // The diagnostic's code, which usually appear in the user interface. - Code interface{} `json:"code,omitempty"` + Code any `json:"code,omitempty"` // An optional property to describe the error code. // Requires the code field (above) to be present/not null. // @@ -1563,7 +1563,7 @@ type DidChangeConfigurationClientCapabilities struct { // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didChangeConfigurationParams type DidChangeConfigurationParams struct { // The actual changed settings - Settings interface{} `json:"settings"` + Settings any `json:"settings"` } // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didChangeConfigurationRegistrationOptions @@ -1789,7 +1789,7 @@ type DocumentDiagnosticReportKind string // // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentDiagnosticReportPartialResult type DocumentDiagnosticReportPartialResult struct { - RelatedDocuments map[DocumentURI]interface{} `json:"relatedDocuments"` + RelatedDocuments map[DocumentURI]any `json:"relatedDocuments"` } // A document filter describes a top level text document or @@ -1899,7 +1899,7 @@ type DocumentLink struct { Tooltip string `json:"tooltip,omitempty"` // A data entry field that is preserved on a document link between a // DocumentLinkRequest and a DocumentLinkResolveRequest. - Data interface{} `json:"data,omitempty"` + Data any `json:"data,omitempty"` } // The client capabilities of a {@link DocumentLinkRequest}. @@ -2702,7 +2702,7 @@ type InlayHint struct { PaddingRight bool `json:"paddingRight,omitempty"` // A data entry field that is preserved on an inlay hint between // a `textDocument/inlayHint` and a `inlayHint/resolve` request. - Data interface{} `json:"data,omitempty"` + Data any `json:"data,omitempty"` } // Inlay hint client capabilities. @@ -3053,13 +3053,13 @@ type InsertTextFormat uint32 // // @since 3.16.0 type InsertTextMode uint32 -type LSPAny = interface{} +type LSPAny = any // LSP arrays. // @since 3.17.0 // // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#lSPArray -type LSPArray = []interface{} // (alias) +type LSPArray = []any // (alias) type LSPErrorCodes int32 // LSP object definition. @@ -3623,337 +3623,337 @@ type OptionalVersionedTextDocumentIdentifier struct { // created for Or [Location LocationUriOnly] type OrPLocation_workspace_symbol struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [[]string string] type OrPSection_workspace_didChangeConfiguration struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [MarkupContent string] type OrPTooltipPLabel struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [MarkupContent string] type OrPTooltip_textDocument_inlayHint struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [int32 string] type Or_CancelParams_id struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [ClientSemanticTokensRequestFullDelta bool] type Or_ClientSemanticTokensRequestOptions_full struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [Lit_ClientSemanticTokensRequestOptions_range_Item1 bool] type Or_ClientSemanticTokensRequestOptions_range struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [EditRangeWithInsertReplace Range] type Or_CompletionItemDefaults_editRange struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [MarkupContent string] type Or_CompletionItem_documentation struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [InsertReplaceEdit TextEdit] type Or_CompletionItem_textEdit struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [Location []Location] type Or_Definition struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [int32 string] type Or_Diagnostic_code struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [RelatedFullDocumentDiagnosticReport RelatedUnchangedDocumentDiagnosticReport] type Or_DocumentDiagnosticReport struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport] type Or_DocumentDiagnosticReportPartialResult_relatedDocuments_Value struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [NotebookCellTextDocumentFilter TextDocumentFilter] type Or_DocumentFilter struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [Pattern RelativePattern] type Or_GlobPattern struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [MarkedString MarkupContent []MarkedString] type Or_Hover_contents struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [[]InlayHintLabelPart string] type Or_InlayHint_label struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [StringValue string] type Or_InlineCompletionItem_insertText struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [InlineValueEvaluatableExpression InlineValueText InlineValueVariableLookup] type Or_InlineValue struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [MarkedStringWithLanguage string] type Or_MarkedString struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [NotebookDocumentFilter string] type Or_NotebookCellTextDocumentFilter_notebook struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [NotebookDocumentFilterNotebookType NotebookDocumentFilterPattern NotebookDocumentFilterScheme] type Or_NotebookDocumentFilter struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [NotebookDocumentFilter string] type Or_NotebookDocumentFilterWithCells_notebook struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [NotebookDocumentFilter string] type Or_NotebookDocumentFilterWithNotebook_notebook struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [NotebookDocumentFilterWithCells NotebookDocumentFilterWithNotebook] type Or_NotebookDocumentSyncOptions_notebookSelector_Elem struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport] type Or_RelatedFullDocumentDiagnosticReport_relatedDocuments_Value struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport] type Or_RelatedUnchangedDocumentDiagnosticReport_relatedDocuments_Value struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [CodeAction Command] type Or_Result_textDocument_codeAction_Item0_Elem struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [InlineCompletionList []InlineCompletionItem] type Or_Result_textDocument_inlineCompletion struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [SemanticTokensFullDelta bool] type Or_SemanticTokensOptions_full struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [PRangeESemanticTokensOptions bool] type Or_SemanticTokensOptions_range struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [CallHierarchyOptions CallHierarchyRegistrationOptions bool] type Or_ServerCapabilities_callHierarchyProvider struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [CodeActionOptions bool] type Or_ServerCapabilities_codeActionProvider struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [DocumentColorOptions DocumentColorRegistrationOptions bool] type Or_ServerCapabilities_colorProvider struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [DeclarationOptions DeclarationRegistrationOptions bool] type Or_ServerCapabilities_declarationProvider struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [DefinitionOptions bool] type Or_ServerCapabilities_definitionProvider struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [DiagnosticOptions DiagnosticRegistrationOptions] type Or_ServerCapabilities_diagnosticProvider struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [DocumentFormattingOptions bool] type Or_ServerCapabilities_documentFormattingProvider struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [DocumentHighlightOptions bool] type Or_ServerCapabilities_documentHighlightProvider struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [DocumentRangeFormattingOptions bool] type Or_ServerCapabilities_documentRangeFormattingProvider struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [DocumentSymbolOptions bool] type Or_ServerCapabilities_documentSymbolProvider struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [FoldingRangeOptions FoldingRangeRegistrationOptions bool] type Or_ServerCapabilities_foldingRangeProvider struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [HoverOptions bool] type Or_ServerCapabilities_hoverProvider struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [ImplementationOptions ImplementationRegistrationOptions bool] type Or_ServerCapabilities_implementationProvider struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [InlayHintOptions InlayHintRegistrationOptions bool] type Or_ServerCapabilities_inlayHintProvider struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [InlineCompletionOptions bool] type Or_ServerCapabilities_inlineCompletionProvider struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [InlineValueOptions InlineValueRegistrationOptions bool] type Or_ServerCapabilities_inlineValueProvider struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [LinkedEditingRangeOptions LinkedEditingRangeRegistrationOptions bool] type Or_ServerCapabilities_linkedEditingRangeProvider struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [MonikerOptions MonikerRegistrationOptions bool] type Or_ServerCapabilities_monikerProvider struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [NotebookDocumentSyncOptions NotebookDocumentSyncRegistrationOptions] type Or_ServerCapabilities_notebookDocumentSync struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [ReferenceOptions bool] type Or_ServerCapabilities_referencesProvider struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [RenameOptions bool] type Or_ServerCapabilities_renameProvider struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [SelectionRangeOptions SelectionRangeRegistrationOptions bool] type Or_ServerCapabilities_selectionRangeProvider struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [SemanticTokensOptions SemanticTokensRegistrationOptions] type Or_ServerCapabilities_semanticTokensProvider struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [TextDocumentSyncKind TextDocumentSyncOptions] type Or_ServerCapabilities_textDocumentSync struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [TypeDefinitionOptions TypeDefinitionRegistrationOptions bool] type Or_ServerCapabilities_typeDefinitionProvider struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [TypeHierarchyOptions TypeHierarchyRegistrationOptions bool] type Or_ServerCapabilities_typeHierarchyProvider struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [WorkspaceSymbolOptions bool] type Or_ServerCapabilities_workspaceSymbolProvider struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [MarkupContent string] type Or_SignatureInformation_documentation struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [AnnotatedTextEdit SnippetTextEdit TextEdit] type Or_TextDocumentEdit_edits_Elem struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [TextDocumentFilterLanguage TextDocumentFilterPattern TextDocumentFilterScheme] type Or_TextDocumentFilter struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [SaveOptions bool] type Or_TextDocumentSyncOptions_save struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [WorkspaceFullDocumentDiagnosticReport WorkspaceUnchangedDocumentDiagnosticReport] type Or_WorkspaceDocumentDiagnosticReport struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [CreateFile DeleteFile RenameFile TextDocumentEdit] type Or_WorkspaceEdit_documentChanges_Elem struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [TextDocumentContentOptions TextDocumentContentRegistrationOptions] type Or_WorkspaceOptions_textDocumentContent struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Or [Declaration []DeclarationLink] type Or_textDocument_declaration struct { - Value interface{} `json:"value"` + Value any `json:"value"` } // created for Literal (Lit_SemanticTokensOptions_range_Item1) @@ -4122,11 +4122,11 @@ type ProgressParams struct { // The progress token provided by the client or server. Token ProgressToken `json:"token"` // The progress data. - Value interface{} `json:"value"` + Value any `json:"value"` } // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#progressToken -type ProgressToken = interface{} // (alias) +type ProgressToken = any // (alias) // The publish diagnostic client capabilities. // // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#publishDiagnosticsClientCapabilities @@ -4227,7 +4227,7 @@ type Registration struct { // The method / capability to register for. Method string `json:"method"` // Options necessary for the registration. - RegisterOptions interface{} `json:"registerOptions,omitempty"` + RegisterOptions any `json:"registerOptions,omitempty"` } // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#registrationParams @@ -4262,7 +4262,7 @@ type RelatedFullDocumentDiagnosticReport struct { // a.cpp and result in errors in a header file b.hpp. // // @since 3.17.0 - RelatedDocuments map[DocumentURI]interface{} `json:"relatedDocuments,omitempty"` + RelatedDocuments map[DocumentURI]any `json:"relatedDocuments,omitempty"` FullDocumentDiagnosticReport } @@ -4279,7 +4279,7 @@ type RelatedUnchangedDocumentDiagnosticReport struct { // a.cpp and result in errors in a header file b.hpp. // // @since 3.17.0 - RelatedDocuments map[DocumentURI]interface{} `json:"relatedDocuments,omitempty"` + RelatedDocuments map[DocumentURI]any `json:"relatedDocuments,omitempty"` UnchangedDocumentDiagnosticReport } @@ -4691,7 +4691,7 @@ type ServerCapabilities struct { // Defines how text documents are synced. Is either a detailed structure // defining each notification or for backwards compatibility the // TextDocumentSyncKind number. - TextDocumentSync interface{} `json:"textDocumentSync,omitempty"` + TextDocumentSync any `json:"textDocumentSync,omitempty"` // Defines how notebook documents are synced. // // @since 3.17.0 @@ -4719,7 +4719,7 @@ type ServerCapabilities struct { // The server provides code actions. CodeActionOptions may only be // specified if the client states that it supports // `codeActionLiteralSupport` in its initial `initialize` request. - CodeActionProvider interface{} `json:"codeActionProvider,omitempty"` + CodeActionProvider any `json:"codeActionProvider,omitempty"` // The server provides code lens. CodeLensProvider *CodeLensOptions `json:"codeLensProvider,omitempty"` // The server provides document link support. @@ -4737,7 +4737,7 @@ type ServerCapabilities struct { // The server provides rename support. RenameOptions may only be // specified if the client states that it supports // `prepareSupport` in its initial `initialize` request. - RenameProvider interface{} `json:"renameProvider,omitempty"` + RenameProvider any `json:"renameProvider,omitempty"` // The server provides folding provider support. FoldingRangeProvider *Or_ServerCapabilities_foldingRangeProvider `json:"foldingRangeProvider,omitempty"` // The server provides selection range support. @@ -4755,7 +4755,7 @@ type ServerCapabilities struct { // The server provides semantic tokens support. // // @since 3.16.0 - SemanticTokensProvider interface{} `json:"semanticTokensProvider,omitempty"` + SemanticTokensProvider any `json:"semanticTokensProvider,omitempty"` // The server provides moniker support. // // @since 3.16.0 @@ -4771,7 +4771,7 @@ type ServerCapabilities struct { // The server provides inlay hints. // // @since 3.17.0 - InlayHintProvider interface{} `json:"inlayHintProvider,omitempty"` + InlayHintProvider any `json:"inlayHintProvider,omitempty"` // The server has support for pull model diagnostics. // // @since 3.17.0 @@ -4784,7 +4784,7 @@ type ServerCapabilities struct { // Workspace specific server capabilities. Workspace *WorkspaceOptions `json:"workspace,omitempty"` // Experimental server capabilities. - Experimental interface{} `json:"experimental,omitempty"` + Experimental any `json:"experimental,omitempty"` } // @since 3.18.0 @@ -5590,7 +5590,7 @@ type TypeHierarchyItem struct { // supertypes or subtypes requests. It could also be used to identify the // type hierarchy in the server, helping improve the performance on // resolving supertypes and subtypes. - Data interface{} `json:"data,omitempty"` + Data any `json:"data,omitempty"` } // Type hierarchy options used during static registration. @@ -6132,7 +6132,7 @@ type WorkspaceSymbol struct { Location OrPLocation_workspace_symbol `json:"location"` // A data entry field that is preserved on a workspace symbol between a // workspace symbol request and a workspace symbol resolve request. - Data interface{} `json:"data,omitempty"` + Data any `json:"data,omitempty"` BaseSymbolInformation } @@ -6244,7 +6244,7 @@ type XInitializeParams struct { // The capabilities provided by the client (editor or tool) Capabilities ClientCapabilities `json:"capabilities"` // User provided initialization options. - InitializationOptions interface{} `json:"initializationOptions,omitempty"` + InitializationOptions any `json:"initializationOptions,omitempty"` // The initial trace setting. If omitted trace is disabled ('off'). Trace *TraceValue `json:"trace,omitempty"` WorkDoneProgressParams @@ -6287,7 +6287,7 @@ type _InitializeParams struct { // The capabilities provided by the client (editor or tool) Capabilities ClientCapabilities `json:"capabilities"` // User provided initialization options. - InitializationOptions interface{} `json:"initializationOptions,omitempty"` + InitializationOptions any `json:"initializationOptions,omitempty"` // The initial trace setting. If omitted trace is disabled ('off'). Trace *TraceValue `json:"trace,omitempty"` WorkDoneProgressParams diff --git a/gopls/internal/protocol/tsserver.go b/gopls/internal/protocol/tsserver.go index 51ddad9ec1f..d09f118c171 100644 --- a/gopls/internal/protocol/tsserver.go +++ b/gopls/internal/protocol/tsserver.go @@ -80,7 +80,7 @@ type Server interface { // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_documentLink DocumentLink(context.Context, *DocumentLinkParams) ([]DocumentLink, error) // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_documentSymbol - DocumentSymbol(context.Context, *DocumentSymbolParams) ([]interface{}, error) + DocumentSymbol(context.Context, *DocumentSymbolParams) ([]any, error) // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_foldingRange FoldingRange(context.Context, *FoldingRangeParams) ([]FoldingRange, error) // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_formatting @@ -120,7 +120,7 @@ type Server interface { // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_semanticTokens_full SemanticTokensFull(context.Context, *SemanticTokensParams) (*SemanticTokens, error) // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_semanticTokens_full_delta - SemanticTokensFullDelta(context.Context, *SemanticTokensDeltaParams) (interface{}, error) + SemanticTokensFullDelta(context.Context, *SemanticTokensDeltaParams) (any, error) // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_semanticTokens_range SemanticTokensRange(context.Context, *SemanticTokensRangeParams) (*SemanticTokens, error) // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_signatureHelp @@ -152,7 +152,7 @@ type Server interface { // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspace_didRenameFiles DidRenameFiles(context.Context, *RenameFilesParams) error // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspace_executeCommand - ExecuteCommand(context.Context, *ExecuteCommandParams) (interface{}, error) + ExecuteCommand(context.Context, *ExecuteCommandParams) (any, error) // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspace_symbol Symbol(context.Context, *WorkspaceSymbolParams) ([]SymbolInformation, error) // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspace_textDocumentContent @@ -1083,8 +1083,8 @@ func (s *serverDispatcher) DocumentLink(ctx context.Context, params *DocumentLin } return result, nil } -func (s *serverDispatcher) DocumentSymbol(ctx context.Context, params *DocumentSymbolParams) ([]interface{}, error) { - var result []interface{} +func (s *serverDispatcher) DocumentSymbol(ctx context.Context, params *DocumentSymbolParams) ([]any, error) { + var result []any if err := s.sender.Call(ctx, "textDocument/documentSymbol", params, &result); err != nil { return nil, err } @@ -1223,8 +1223,8 @@ func (s *serverDispatcher) SemanticTokensFull(ctx context.Context, params *Seman } return result, nil } -func (s *serverDispatcher) SemanticTokensFullDelta(ctx context.Context, params *SemanticTokensDeltaParams) (interface{}, error) { - var result interface{} +func (s *serverDispatcher) SemanticTokensFullDelta(ctx context.Context, params *SemanticTokensDeltaParams) (any, error) { + var result any if err := s.sender.Call(ctx, "textDocument/semanticTokens/full/delta", params, &result); err != nil { return nil, err } @@ -1303,8 +1303,8 @@ func (s *serverDispatcher) DidDeleteFiles(ctx context.Context, params *DeleteFil func (s *serverDispatcher) DidRenameFiles(ctx context.Context, params *RenameFilesParams) error { return s.sender.Notify(ctx, "workspace/didRenameFiles", params) } -func (s *serverDispatcher) ExecuteCommand(ctx context.Context, params *ExecuteCommandParams) (interface{}, error) { - var result interface{} +func (s *serverDispatcher) ExecuteCommand(ctx context.Context, params *ExecuteCommandParams) (any, error) { + var result any if err := s.sender.Call(ctx, "workspace/executeCommand", params, &result); err != nil { return nil, err } diff --git a/gopls/internal/protocol/uri.go b/gopls/internal/protocol/uri.go index e4252909835..4105bd041f8 100644 --- a/gopls/internal/protocol/uri.go +++ b/gopls/internal/protocol/uri.go @@ -121,9 +121,13 @@ func filename(uri DocumentURI) (string, error) { if b < ' ' || b == 0x7f || // control character b == '%' || b == '+' || // URI escape b == ':' || // Windows drive letter - b == '@' || b == '&' || b == '?' { // authority or query + b == '&' || b == '?' { // authority or query goto slow } + // We do not reject '@' as it cannot be part of the + // authority (e.g. user:pass@example.com) in a + // "file:///" URL, and '@' commonly appears in file + // paths such as GOMODCACHE/module@version/... } return rest, nil } diff --git a/gopls/internal/server/command.go b/gopls/internal/server/command.go index e785625655e..2b5c282a28f 100644 --- a/gopls/internal/server/command.go +++ b/gopls/internal/server/command.go @@ -309,10 +309,11 @@ func (c *commandHandler) AddTest(ctx context.Context, loc protocol.Location) (*p // commandConfig configures common command set-up and execution. type commandConfig struct { - requireSave bool // whether all files must be saved for the command to work - progress string // title to use for progress reporting. If empty, no progress will be reported. - forView string // view to resolve to a snapshot; incompatible with forURI - forURI protocol.DocumentURI // URI to resolve to a snapshot. If unset, snapshot will be nil. + requireSave bool // whether all files must be saved for the command to work + progress string // title to use for progress reporting. If empty, no progress will be reported. + progressStyle settings.WorkDoneProgressStyle // style information for client-side progress display. + forView string // view to resolve to a snapshot; incompatible with forURI + forURI protocol.DocumentURI // URI to resolve to a snapshot. If unset, snapshot will be nil. } // commandDeps is evaluated from a commandConfig. Note that not all fields may @@ -382,7 +383,11 @@ func (c *commandHandler) run(ctx context.Context, cfg commandConfig, run command ctx, cancel := context.WithCancel(xcontext.Detach(ctx)) if cfg.progress != "" { - deps.work = c.s.progress.Start(ctx, cfg.progress, "Running...", c.params.WorkDoneToken, cancel) + header := "" + if _, ok := c.s.options.SupportedWorkDoneProgressFormats[cfg.progressStyle]; ok && cfg.progressStyle != "" { + header = fmt.Sprintf("style: %s\n\n", cfg.progressStyle) + } + deps.work = c.s.progress.Start(ctx, cfg.progress, header+"Running...", c.params.WorkDoneToken, cancel) } runcmd := func() error { defer release() @@ -683,15 +688,6 @@ func dropDependency(pm *cache.ParsedModule, modulePath string) ([]protocol.TextE return protocol.EditsFromDiffEdits(pm.Mapper, diff) } -// Test is an alias for RunTests (with splayed arguments). -func (c *commandHandler) Test(ctx context.Context, uri protocol.DocumentURI, tests, benchmarks []string) error { - return c.RunTests(ctx, command.RunTestsArgs{ - URI: uri, - Tests: tests, - Benchmarks: benchmarks, - }) -} - func (c *commandHandler) Doc(ctx context.Context, args command.DocArgs) (protocol.URI, error) { if args.Location.URI == "" { return "", errors.New("missing location URI") @@ -1026,19 +1022,22 @@ func (s *server) getUpgrades(ctx context.Context, snapshot *cache.Snapshot, uri func (c *commandHandler) GCDetails(ctx context.Context, uri protocol.DocumentURI) error { return c.run(ctx, commandConfig{ - progress: "Toggling display of compiler optimization details", - forURI: uri, + forURI: uri, }, func(ctx context.Context, deps commandDeps) error { return c.modifyState(ctx, FromToggleCompilerOptDetails, func() (*cache.Snapshot, func(), error) { + // Don't blindly use "dir := deps.fh.URI().Dir()"; validate. meta, err := golang.NarrowestMetadataForFile(ctx, deps.snapshot, deps.fh.URI()) if err != nil { return nil, nil, err } - want := !deps.snapshot.WantCompilerOptDetails(meta.ID) // toggle per-package flag + if len(meta.CompiledGoFiles) == 0 { + return nil, nil, fmt.Errorf("package %q does not compile file %q", meta.ID, deps.fh.URI()) + } + dir := meta.CompiledGoFiles[0].Dir() + + want := !deps.snapshot.WantCompilerOptDetails(dir) // toggle per-directory flag return c.s.session.InvalidateView(ctx, deps.snapshot.View(), cache.StateChange{ - CompilerOptDetails: map[metadata.PackageID]bool{ - meta.ID: want, - }, + CompilerOptDetails: map[protocol.DocumentURI]bool{dir: want}, }) }) }) @@ -1220,9 +1219,10 @@ func (c *commandHandler) Vulncheck(ctx context.Context, args command.VulncheckAr var commandResult command.VulncheckResult err := c.run(ctx, commandConfig{ - progress: GoVulncheckCommandTitle, - requireSave: true, // govulncheck cannot honor overlays - forURI: args.URI, + progress: GoVulncheckCommandTitle, + progressStyle: settings.WorkDoneProgressStyleLog, + requireSave: true, // govulncheck cannot honor overlays + forURI: args.URI, }, func(ctx context.Context, deps commandDeps) error { jsonrpc2.Async(ctx) // run this in parallel with other requests: vulncheck can be slow. @@ -1235,6 +1235,7 @@ func (c *commandHandler) Vulncheck(ctx context.Context, args command.VulncheckAr return err } commandResult.Result = result + commandResult.Token = deps.work.Token() snapshot, release, err := c.s.session.InvalidateView(ctx, deps.snapshot.View(), cache.StateChange{ Vulns: map[protocol.DocumentURI]*vulncheck.Result{args.URI: result}, @@ -1281,6 +1282,7 @@ func (c *commandHandler) Vulncheck(ctx context.Context, args command.VulncheckAr // slated for deletion. // // TODO(golang/vscode-go#3572) +// TODO(hxjiang): deprecate gopls.run_govulncheck. func (c *commandHandler) RunGovulncheck(ctx context.Context, args command.VulncheckArgs) (command.RunVulncheckResult, error) { if args.URI == "" { return command.RunVulncheckResult{}, errors.New("VulncheckArgs is missing URI field") @@ -1733,3 +1735,28 @@ func (c *commandHandler) ScanImports(ctx context.Context) error { } return nil } + +func (c *commandHandler) PackageSymbols(ctx context.Context, args command.PackageSymbolsArgs) (command.PackageSymbolsResult, error) { + var result command.PackageSymbolsResult + err := c.run(ctx, commandConfig{ + forURI: args.URI, + }, func(ctx context.Context, deps commandDeps) error { + res, err := golang.PackageSymbols(ctx, deps.snapshot, args.URI) + if err != nil { + return err + } + result = res + return nil + }) + + // sort symbols for determinism + sort.SliceStable(result.Symbols, func(i, j int) bool { + iv, jv := result.Symbols[i], result.Symbols[j] + if iv.Name == jv.Name { + return iv.Range.Start.Line < jv.Range.Start.Line + } + return iv.Name < jv.Name + }) + + return result, err +} diff --git a/gopls/internal/server/diagnostics.go b/gopls/internal/server/diagnostics.go index e95bf297501..b4e764b1233 100644 --- a/gopls/internal/server/diagnostics.go +++ b/gopls/internal/server/diagnostics.go @@ -11,6 +11,7 @@ import ( "os" "path/filepath" "runtime" + "slices" "sort" "strings" "sync" @@ -511,6 +512,24 @@ func (s *server) diagnose(ctx context.Context, snapshot *cache.Snapshot) (diagMa // TODO(rfindley): here and above, we should avoid using the first result // if err is non-nil (though as of today it's OK). analysisDiags, err = golang.Analyze(ctx, snapshot, toAnalyze, s.progress) + + // Filter out Hint diagnostics for closed files. + // VS Code already omits Hint diagnostics in the Problems tab, but other + // clients do not. This filter makes the visibility of Hints more similar + // across clients. + for uri, diags := range analysisDiags { + if !snapshot.IsOpen(uri) { + newDiags := slices.DeleteFunc(diags, func(diag *cache.Diagnostic) bool { + return diag.Severity == protocol.SeverityHint + }) + if len(newDiags) == 0 { + delete(analysisDiags, uri) + } else { + analysisDiags[uri] = newDiags + } + } + } + if err != nil { event.Error(ctx, "warning: analyzing package", err, append(snapshot.Labels(), label.Package.Of(keys.Join(moremaps.KeySlice(toDiagnose))))...) return @@ -542,25 +561,26 @@ func (s *server) compilerOptDetailsDiagnostics(ctx context.Context, snapshot *ca // TODO(rfindley): This should memoize its results if the package has not changed. // Consider that these points, in combination with the note below about // races, suggest that compiler optimization details should be tracked on the Snapshot. - var detailPkgs map[metadata.PackageID]*metadata.Package - for _, mp := range toDiagnose { - if snapshot.WantCompilerOptDetails(mp.ID) { - if detailPkgs == nil { - detailPkgs = make(map[metadata.PackageID]*metadata.Package) - } - detailPkgs[mp.ID] = mp - } - } - diagnostics := make(diagMap) - for _, mp := range detailPkgs { - perFileDiags, err := golang.CompilerOptDetails(ctx, snapshot, mp) - if err != nil { - event.Error(ctx, "warning: compiler optimization details", err, append(snapshot.Labels(), label.Package.Of(string(mp.ID)))...) + seenDirs := make(map[protocol.DocumentURI]bool) + for _, mp := range toDiagnose { + if len(mp.CompiledGoFiles) == 0 { continue } - for uri, diags := range perFileDiags { - diagnostics[uri] = append(diagnostics[uri], diags...) + dir := mp.CompiledGoFiles[0].Dir() + if snapshot.WantCompilerOptDetails(dir) { + if !seenDirs[dir] { + seenDirs[dir] = true + + perFileDiags, err := golang.CompilerOptDetails(ctx, snapshot, dir) + if err != nil { + event.Error(ctx, "warning: compiler optimization details", err, append(snapshot.Labels(), label.URI.Of(dir))...) + continue + } + for uri, diags := range perFileDiags { + diagnostics[uri] = append(diagnostics[uri], diags...) + } + } } } return diagnostics, nil diff --git a/gopls/internal/server/folding_range.go b/gopls/internal/server/folding_range.go index 0ad00e54c8d..95b2ffc0744 100644 --- a/gopls/internal/server/folding_range.go +++ b/gopls/internal/server/folding_range.go @@ -36,7 +36,7 @@ func (s *server) FoldingRange(ctx context.Context, params *protocol.FoldingRange func toProtocolFoldingRanges(ranges []*golang.FoldingRangeInfo) ([]protocol.FoldingRange, error) { result := make([]protocol.FoldingRange, 0, len(ranges)) for _, info := range ranges { - rng := info.MappedRange.Range() + rng := info.Range result = append(result, protocol.FoldingRange{ StartLine: rng.Start.Line, StartCharacter: rng.Start.Character, diff --git a/gopls/internal/server/general.go b/gopls/internal/server/general.go index 3a3a5efcd70..35614945f9d 100644 --- a/gopls/internal/server/general.go +++ b/gopls/internal/server/general.go @@ -26,10 +26,12 @@ import ( debuglog "golang.org/x/tools/gopls/internal/debug/log" "golang.org/x/tools/gopls/internal/file" "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/protocol/semtok" "golang.org/x/tools/gopls/internal/settings" "golang.org/x/tools/gopls/internal/util/bug" "golang.org/x/tools/gopls/internal/util/goversion" "golang.org/x/tools/gopls/internal/util/moremaps" + "golang.org/x/tools/gopls/internal/util/moreslices" "golang.org/x/tools/internal/event" "golang.org/x/tools/internal/jsonrpc2" ) @@ -163,8 +165,8 @@ func (s *server) Initialize(ctx context.Context, params *protocol.ParamInitializ Range: &protocol.Or_SemanticTokensOptions_range{Value: true}, Full: &protocol.Or_SemanticTokensOptions_full{Value: true}, Legend: protocol.SemanticTokensLegend{ - TokenTypes: protocol.NonNilSlice(options.SemanticTypes), - TokenModifiers: protocol.NonNilSlice(options.SemanticMods), + TokenTypes: moreslices.ConvertStrings[string](semtok.TokenTypes), + TokenModifiers: moreslices.ConvertStrings[string](semtok.TokenModifiers), }, }, SignatureHelpProvider: &protocol.SignatureHelpOptions{ diff --git a/gopls/internal/server/rename.go b/gopls/internal/server/rename.go index cdfb9c7a8fe..b6fac8ba219 100644 --- a/gopls/internal/server/rename.go +++ b/gopls/internal/server/rename.go @@ -31,7 +31,7 @@ func (s *server) Rename(ctx context.Context, params *protocol.RenameParams) (*pr } // Because we don't handle directory renaming within golang.Rename, golang.Rename returns - // boolean value isPkgRenaming to determine whether an DocumentChanges of type RenameFile should + // boolean value isPkgRenaming to determine whether any DocumentChanges of type RenameFile should // be added to the return protocol.WorkspaceEdit value. edits, isPkgRenaming, err := golang.Rename(ctx, snapshot, fh, params.Position, params.NewName) if err != nil { diff --git a/gopls/internal/settings/analysis.go b/gopls/internal/settings/analysis.go index 7e13c801a85..5ba8bdd06b0 100644 --- a/gopls/internal/settings/analysis.go +++ b/gopls/internal/settings/analysis.go @@ -49,6 +49,8 @@ import ( "golang.org/x/tools/gopls/internal/analysis/deprecated" "golang.org/x/tools/gopls/internal/analysis/embeddirective" "golang.org/x/tools/gopls/internal/analysis/fillreturns" + "golang.org/x/tools/gopls/internal/analysis/gofix" + "golang.org/x/tools/gopls/internal/analysis/hostport" "golang.org/x/tools/gopls/internal/analysis/infertypeargs" "golang.org/x/tools/gopls/internal/analysis/modernize" "golang.org/x/tools/gopls/internal/analysis/nonewvars" @@ -68,7 +70,7 @@ import ( // Analyzers are immutable, since they are shared across multiple LSP sessions. type Analyzer struct { analyzer *analysis.Analyzer - enabled bool + nonDefault bool actionKinds []protocol.CodeActionKind severity protocol.DiagnosticSeverity tags []protocol.DiagnosticTag @@ -79,7 +81,7 @@ func (a *Analyzer) Analyzer() *analysis.Analyzer { return a.analyzer } // EnabledByDefault reports whether the analyzer is enabled by default for all sessions. // This value can be configured per-analysis in user settings. -func (a *Analyzer) EnabledByDefault() bool { return a.enabled } +func (a *Analyzer) EnabledByDefault() bool { return !a.nonDefault } // ActionKinds is the set of kinds of code action this analyzer produces. // @@ -87,12 +89,35 @@ func (a *Analyzer) EnabledByDefault() bool { return a.enabled } // TODO(rfindley): revisit. func (a *Analyzer) ActionKinds() []protocol.CodeActionKind { return a.actionKinds } -// Severity is the severity set for diagnostics reported by this -// analyzer. If left unset it defaults to Warning. +// Severity is the severity set for diagnostics reported by this analyzer. +// The default severity is SeverityWarning. // -// Note: diagnostics with severity protocol.SeverityHint do not show up in -// the VS Code "problems" tab. -func (a *Analyzer) Severity() protocol.DiagnosticSeverity { return a.severity } +// While the LSP spec does not specify how severity should be used, here are +// some guiding heuristics: +// - Error: for parse and type errors, which would stop the build. +// - Warning: for analyzer diagnostics reporting likely bugs. +// - Info: for analyzer diagnostics that do not indicate bugs, but may +// suggest inaccurate or superfluous code. +// - Hint: for analyzer diagnostics that do not indicate mistakes, but offer +// simplifications or modernizations. By their nature, hints should +// generally carry quick fixes. +// +// The difference between Info and Hint is particularly subtle. Importantly, +// Hint diagnostics do not appear in the Problems tab in VS Code, so they are +// less intrusive than Info diagnostics. The rule of thumb is this: use Info if +// the diagnostic is not a bug, but the author probably didn't mean to write +// the code that way. Use Hint if the diagnostic is not a bug and the author +// indended to write the code that way, but there is a simpler or more modern +// way to express the same logic. An 'unused' diagnostic is Info level, since +// the author probably didn't mean to check in unreachable code. A 'modernize' +// or 'deprecated' diagnostic is Hint level, since the author intended to write +// the code that way, but now there is a better way. +func (a *Analyzer) Severity() protocol.DiagnosticSeverity { + if a.severity == 0 { + return protocol.SeverityWarning + } + return a.severity +} // Tags is extra tags (unnecessary, deprecated, etc) for diagnostics // reported by this analyzer. @@ -108,85 +133,99 @@ func (a *Analyzer) String() string { return a.analyzer.String() } var DefaultAnalyzers = make(map[string]*Analyzer) // initialized below func init() { - // The traditional vet suite: + // See [Analyzer.Severity] for guidance on setting analyzer severity below. analyzers := []*Analyzer{ // The traditional vet suite: - {analyzer: appends.Analyzer, enabled: true}, - {analyzer: asmdecl.Analyzer, enabled: true}, - {analyzer: assign.Analyzer, enabled: true}, - {analyzer: atomic.Analyzer, enabled: true}, - {analyzer: bools.Analyzer, enabled: true}, - {analyzer: buildtag.Analyzer, enabled: true}, - {analyzer: cgocall.Analyzer, enabled: true}, - {analyzer: composite.Analyzer, enabled: true}, - {analyzer: copylock.Analyzer, enabled: true}, - {analyzer: defers.Analyzer, enabled: true}, - {analyzer: deprecated.Analyzer, enabled: true, severity: protocol.SeverityHint, tags: []protocol.DiagnosticTag{protocol.Deprecated}}, - {analyzer: directive.Analyzer, enabled: true}, - {analyzer: errorsas.Analyzer, enabled: true}, - {analyzer: framepointer.Analyzer, enabled: true}, - {analyzer: httpresponse.Analyzer, enabled: true}, - {analyzer: ifaceassert.Analyzer, enabled: true}, - {analyzer: loopclosure.Analyzer, enabled: true}, - {analyzer: lostcancel.Analyzer, enabled: true}, - {analyzer: nilfunc.Analyzer, enabled: true}, - {analyzer: printf.Analyzer, enabled: true}, - {analyzer: shift.Analyzer, enabled: true}, - {analyzer: sigchanyzer.Analyzer, enabled: true}, - {analyzer: slog.Analyzer, enabled: true}, - {analyzer: stdmethods.Analyzer, enabled: true}, - {analyzer: stdversion.Analyzer, enabled: true}, - {analyzer: stringintconv.Analyzer, enabled: true}, - {analyzer: structtag.Analyzer, enabled: true}, - {analyzer: testinggoroutine.Analyzer, enabled: true}, - {analyzer: tests.Analyzer, enabled: true}, - {analyzer: timeformat.Analyzer, enabled: true}, - {analyzer: unmarshal.Analyzer, enabled: true}, - {analyzer: unreachable.Analyzer, enabled: true}, - {analyzer: unsafeptr.Analyzer, enabled: true}, - {analyzer: unusedresult.Analyzer, enabled: true}, + {analyzer: appends.Analyzer}, + {analyzer: asmdecl.Analyzer}, + {analyzer: assign.Analyzer}, + {analyzer: atomic.Analyzer}, + {analyzer: bools.Analyzer}, + {analyzer: buildtag.Analyzer}, + {analyzer: cgocall.Analyzer}, + {analyzer: composite.Analyzer}, + {analyzer: copylock.Analyzer}, + {analyzer: defers.Analyzer}, + {analyzer: deprecated.Analyzer, severity: protocol.SeverityHint, tags: []protocol.DiagnosticTag{protocol.Deprecated}}, + {analyzer: directive.Analyzer}, + {analyzer: errorsas.Analyzer}, + {analyzer: framepointer.Analyzer}, + {analyzer: httpresponse.Analyzer}, + {analyzer: ifaceassert.Analyzer}, + {analyzer: loopclosure.Analyzer}, + {analyzer: lostcancel.Analyzer}, + {analyzer: nilfunc.Analyzer}, + {analyzer: printf.Analyzer}, + {analyzer: shift.Analyzer}, + {analyzer: sigchanyzer.Analyzer}, + {analyzer: slog.Analyzer}, + {analyzer: stdmethods.Analyzer}, + {analyzer: stdversion.Analyzer}, + {analyzer: stringintconv.Analyzer}, + {analyzer: structtag.Analyzer}, + {analyzer: testinggoroutine.Analyzer}, + {analyzer: tests.Analyzer}, + {analyzer: timeformat.Analyzer}, + {analyzer: unmarshal.Analyzer}, + {analyzer: unreachable.Analyzer}, + {analyzer: unsafeptr.Analyzer}, + {analyzer: unusedresult.Analyzer}, // not suitable for vet: // - some (nilness, yield) use go/ssa; see #59714. // - others don't meet the "frequency" criterion; // see GOROOT/src/cmd/vet/README. - // - some (modernize) report diagnostics on perfectly valid code (hence severity=info) - {analyzer: atomicalign.Analyzer, enabled: true}, - {analyzer: deepequalerrors.Analyzer, enabled: true}, - {analyzer: nilness.Analyzer, enabled: true}, // uses go/ssa - {analyzer: yield.Analyzer, enabled: true}, // uses go/ssa - {analyzer: sortslice.Analyzer, enabled: true}, - {analyzer: embeddirective.Analyzer, enabled: true}, - {analyzer: waitgroup.Analyzer, enabled: true}, // to appear in cmd/vet@go1.25 - {analyzer: modernize.Analyzer, enabled: true, severity: protocol.SeverityInformation}, + {analyzer: atomicalign.Analyzer}, + {analyzer: deepequalerrors.Analyzer}, + {analyzer: nilness.Analyzer}, // uses go/ssa + {analyzer: yield.Analyzer}, // uses go/ssa + {analyzer: sortslice.Analyzer}, + {analyzer: embeddirective.Analyzer}, + {analyzer: waitgroup.Analyzer}, // to appear in cmd/vet@go1.25 + {analyzer: hostport.Analyzer}, // to appear in cmd/vet@go1.25 // disabled due to high false positives - {analyzer: shadow.Analyzer, enabled: false}, // very noisy + {analyzer: shadow.Analyzer, nonDefault: true}, // very noisy // fieldalignment is not even off-by-default; see #67762. - // "simplifiers": analyzers that offer mere style fixes - // gofmt -s suite: - {analyzer: simplifycompositelit.Analyzer, enabled: true, actionKinds: []protocol.CodeActionKind{protocol.SourceFixAll, protocol.QuickFix}}, - {analyzer: simplifyrange.Analyzer, enabled: true, actionKinds: []protocol.CodeActionKind{protocol.SourceFixAll, protocol.QuickFix}}, - {analyzer: simplifyslice.Analyzer, enabled: true, actionKinds: []protocol.CodeActionKind{protocol.SourceFixAll, protocol.QuickFix}}, - // other simplifiers: - {analyzer: infertypeargs.Analyzer, enabled: true, severity: protocol.SeverityHint}, - {analyzer: unusedparams.Analyzer, enabled: true}, - {analyzer: unusedfunc.Analyzer, enabled: true}, - {analyzer: unusedwrite.Analyzer, enabled: true}, // uses go/ssa + // simplifiers and modernizers + // + // These analyzers offer mere style fixes on correct code, + // thus they will never appear in cmd/vet and + // their severity level is "information". + // + // gofmt -s suite + { + analyzer: simplifycompositelit.Analyzer, + actionKinds: []protocol.CodeActionKind{protocol.SourceFixAll, protocol.QuickFix}, + severity: protocol.SeverityInformation, + }, + { + analyzer: simplifyrange.Analyzer, + actionKinds: []protocol.CodeActionKind{protocol.SourceFixAll, protocol.QuickFix}, + severity: protocol.SeverityInformation, + }, + { + analyzer: simplifyslice.Analyzer, + actionKinds: []protocol.CodeActionKind{protocol.SourceFixAll, protocol.QuickFix}, + severity: protocol.SeverityInformation, + }, + // other simplifiers + {analyzer: gofix.Analyzer, severity: protocol.SeverityHint}, + {analyzer: infertypeargs.Analyzer, severity: protocol.SeverityInformation}, + {analyzer: unusedparams.Analyzer, severity: protocol.SeverityInformation}, + {analyzer: unusedfunc.Analyzer, severity: protocol.SeverityInformation}, + {analyzer: unusedwrite.Analyzer, severity: protocol.SeverityInformation}, // uses go/ssa + {analyzer: modernize.Analyzer, severity: protocol.SeverityHint}, // type-error analyzers // These analyzers enrich go/types errors with suggested fixes. - {analyzer: fillreturns.Analyzer, enabled: true}, - {analyzer: nonewvars.Analyzer, enabled: true}, - {analyzer: noresultvalues.Analyzer, enabled: true}, - // TODO(rfindley): why isn't the 'unusedvariable' analyzer enabled, if it - // is only enhancing type errors with suggested fixes? - // - // In particular, enabling this analyzer could cause unused variables to be - // greyed out, (due to the 'deletions only' fix). That seems like a nice UI - // feature. - {analyzer: unusedvariable.Analyzer, enabled: false}, + // Since they exist only to attach their fixes to type errors, their + // severity is irrelevant. + {analyzer: fillreturns.Analyzer}, + {analyzer: nonewvars.Analyzer}, + {analyzer: noresultvalues.Analyzer}, + {analyzer: unusedvariable.Analyzer}, } for _, analyzer := range analyzers { DefaultAnalyzers[analyzer.analyzer.Name] = analyzer diff --git a/gopls/internal/settings/default.go b/gopls/internal/settings/default.go index f9b947b31a8..ebb3f1ccfae 100644 --- a/gopls/internal/settings/default.go +++ b/gopls/internal/settings/default.go @@ -39,6 +39,7 @@ func DefaultOptions(overrides ...func(*Options)) *Options { DynamicWatchedFilesSupported: true, LineFoldingOnly: false, HierarchicalDocumentSymbolSupport: true, + ImportsSource: ImportsSourceGoimports, }, ServerOptions: ServerOptions{ SupportedCodeActions: map[file.Kind]map[protocol.CodeActionKind]bool{ @@ -86,6 +87,7 @@ func DefaultOptions(overrides ...func(*Options)) *Options { DirectoryFilters: []string{"-**/node_modules"}, TemplateExtensions: []string{}, StandaloneTags: []string{"ignore"}, + WorkspaceFiles: []string{}, }, UIOptions: UIOptions{ DiagnosticOptions: DiagnosticOptions{ diff --git a/gopls/internal/settings/settings.go b/gopls/internal/settings/settings.go index 785ebd8b582..8f33bdae96b 100644 --- a/gopls/internal/settings/settings.go +++ b/gopls/internal/settings/settings.go @@ -13,6 +13,7 @@ import ( "golang.org/x/tools/gopls/internal/file" "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/protocol/semtok" "golang.org/x/tools/gopls/internal/util/frob" ) @@ -53,6 +54,7 @@ type ClientOptions struct { PreferredContentFormat protocol.MarkupKind LineFoldingOnly bool HierarchicalDocumentSymbolSupport bool + ImportsSource ImportsSourceEnum `status:"experimental"` SemanticTypes []string SemanticMods []string RelatedInformationSupported bool @@ -61,6 +63,9 @@ type ClientOptions struct { SupportedResourceOperations []protocol.ResourceOperationKind CodeActionResolveOptions []string ShowDocumentSupported bool + // SupportedWorkDoneProgressFormats specifies the formats supported by the + // client for handling workdone progress metadata. + SupportedWorkDoneProgressFormats map[WorkDoneProgressStyle]bool } // ServerOptions holds LSP-specific configuration that is provided by the @@ -139,6 +144,14 @@ type BuildOptions struct { // // This setting is only supported when gopls is built with Go 1.16 or later. StandaloneTags []string + + // WorkspaceFiles configures the set of globs that match files defining the + // logical build of the current workspace. Any on-disk changes to any files + // matching a glob specified here will trigger a reload of the workspace. + // + // This setting need only be customized in environments with a custom + // GOPACKAGESDRIVER. + WorkspaceFiles []string } // Note: UIOptions must be comparable with reflect.DeepEqual. @@ -170,10 +183,26 @@ type UIOptions struct { SemanticTokens bool `status:"experimental"` // NoSemanticString turns off the sending of the semantic token 'string' + // + // Deprecated: Use SemanticTokenTypes["string"] = false instead. See + // golang/vscode-go#3632 NoSemanticString bool `status:"experimental"` - // NoSemanticNumber turns off the sending of the semantic token 'number' + // NoSemanticNumber turns off the sending of the semantic token 'number' + // + // Deprecated: Use SemanticTokenTypes["number"] = false instead. See + // golang/vscode-go#3632. NoSemanticNumber bool `status:"experimental"` + + // SemanticTokenTypes configures the semantic token types. It allows + // disabling types by setting each value to false. + // By default, all types are enabled. + SemanticTokenTypes map[string]bool `status:"experimental"` + + // SemanticTokenModifiers configures the semantic token modifiers. It allows + // disabling modifiers by setting each value to false. + // By default, all modifiers are enabled. + SemanticTokenModifiers map[string]bool `status:"experimental"` } // A CodeLensSource identifies an (algorithmic) source of code lenses. @@ -561,6 +590,10 @@ func (u *UserOptions) SetEnvSlice(env []string) { } } +type WorkDoneProgressStyle string + +const WorkDoneProgressStyleLog WorkDoneProgressStyle = "log" + // InternalOptions contains settings that are not intended for use by the // average user. These may be settings used by tests or outdated settings that // will soon be deprecated. Some of these settings may not even be configurable @@ -697,6 +730,19 @@ func (s ImportShortcut) ShowDefinition() bool { return s == BothShortcuts || s == DefinitionShortcut } +// ImportsSourceEnum has legal values: +// +// - `off` to disable searching the file system for imports +// - `gopls` to use the metadata graph and module cache index +// - `goimports` for the old behavior, to be deprecated +type ImportsSourceEnum string + +const ( + ImportsSourceOff ImportsSourceEnum = "off" + ImportsSourceGopls = "gopls" + ImportsSourceGoimports = "goimports" +) + type Matcher string const ( @@ -863,6 +909,16 @@ func (o *Options) ForClientCapabilities(clientInfo *protocol.ClientInfo, caps pr if caps.TextDocument.CodeAction.DataSupport && caps.TextDocument.CodeAction.ResolveSupport != nil { o.CodeActionResolveOptions = caps.TextDocument.CodeAction.ResolveSupport.Properties } + + // Client experimental capabilities. + if experimental, ok := caps.Experimental.(map[string]any); ok { + if formats, ok := experimental["progressMessageStyles"].([]any); ok { + o.SupportedWorkDoneProgressFormats = make(map[WorkDoneProgressStyle]bool, len(formats)) + for _, f := range formats { + o.SupportedWorkDoneProgressFormats[WorkDoneProgressStyle(f.(string))] = true + } + } + } } var codec = frob.CodecFor[*Options]() @@ -939,6 +995,8 @@ func (o *Options) setOne(name string, value any) error { } o.DirectoryFilters = filters + case "workspaceFiles": + return setStringSlice(&o.WorkspaceFiles, value) case "completionDocumentation": return setBool(&o.CompletionDocumentation, value) case "usePlaceholders": @@ -949,6 +1007,11 @@ func (o *Options) setOne(name string, value any) error { return setBool(&o.CompleteUnimported, value) case "completionBudget": return setDuration(&o.CompletionBudget, value) + case "importsSource": + return setEnum(&o.ImportsSource, value, + ImportsSourceOff, + ImportsSourceGopls, + ImportsSourceGoimports) case "matcher": return setEnum(&o.Matcher, value, Fuzzy, @@ -1033,9 +1096,7 @@ func (o *Options) setOne(name string, value any) error { o.Codelenses = make(map[CodeLensSource]bool) } o.Codelenses = maps.Clone(o.Codelenses) - for source, enabled := range lensOverrides { - o.Codelenses[source] = enabled - } + maps.Copy(o.Codelenses, lensOverrides) if name == "codelens" { return deprecatedError("codelenses") @@ -1065,11 +1126,24 @@ func (o *Options) setOne(name string, value any) error { case "semanticTokens": return setBool(&o.SemanticTokens, value) + // TODO(hxjiang): deprecate noSemanticString and noSemanticNumber. case "noSemanticString": - return setBool(&o.NoSemanticString, value) + if err := setBool(&o.NoSemanticString, value); err != nil { + return err + } + return &SoftError{fmt.Sprintf("noSemanticString setting is deprecated, use semanticTokenTypes instead (though you can continue to apply them for the time being).")} case "noSemanticNumber": - return setBool(&o.NoSemanticNumber, value) + if err := setBool(&o.NoSemanticNumber, value); err != nil { + return nil + } + return &SoftError{fmt.Sprintf("noSemanticNumber setting is deprecated, use semanticTokenTypes instead (though you can continue to apply them for the time being).")} + + case "semanticTokenTypes": + return setBoolMap(&o.SemanticTokenTypes, value) + + case "semanticTokenModifiers": + return setBoolMap(&o.SemanticTokenModifiers, value) case "expandWorkspaceToModule": // See golang/go#63536: we can consider deprecating @@ -1216,6 +1290,30 @@ func (o *Options) setOne(name string, value any) error { return nil } +// EnabledSemanticTokenModifiers returns a map of modifiers to boolean. +func (o *Options) EnabledSemanticTokenModifiers() map[semtok.Modifier]bool { + copy := make(map[semtok.Modifier]bool, len(o.SemanticTokenModifiers)) + for k, v := range o.SemanticTokenModifiers { + copy[semtok.Modifier(k)] = v + } + return copy +} + +// EncodeSemanticTokenTypes returns a map of types to boolean. +func (o *Options) EnabledSemanticTokenTypes() map[semtok.Type]bool { + copy := make(map[semtok.Type]bool, len(o.SemanticTokenTypes)) + for k, v := range o.SemanticTokenModifiers { + copy[semtok.Type(k)] = v + } + if o.NoSemanticString { + copy[semtok.TokString] = false + } + if o.NoSemanticNumber { + copy[semtok.TokNumber] = false + } + return copy +} + // A SoftError is an error that does not affect the functionality of gopls. type SoftError struct { msg string diff --git a/gopls/internal/settings/staticcheck.go b/gopls/internal/settings/staticcheck.go index fca3e55f17e..6e06e0b44ea 100644 --- a/gopls/internal/settings/staticcheck.go +++ b/gopls/internal/settings/staticcheck.go @@ -43,9 +43,9 @@ func init() { } StaticcheckAnalyzers[a.Analyzer.Name] = &Analyzer{ - analyzer: a.Analyzer, - enabled: !a.Doc.NonDefault, - severity: mapSeverity(a.Doc.Severity), + analyzer: a.Analyzer, + nonDefault: a.Doc.NonDefault, + severity: mapSeverity(a.Doc.Severity), } } } diff --git a/gopls/internal/telemetry/cmd/stacks/stacks.go b/gopls/internal/telemetry/cmd/stacks/stacks.go index 1888267c021..7cb20012657 100644 --- a/gopls/internal/telemetry/cmd/stacks/stacks.go +++ b/gopls/internal/telemetry/cmd/stacks/stacks.go @@ -21,7 +21,8 @@ // single ID in the issue body suffices to record the // association. But most problems are exhibited in a variety of // ways, leading to multiple field reports of similar but -// distinct stacks. +// distinct stacks. Hence the following way to associate stacks +// with issues. // // 2. Each GitHub issue body may start with a code block of this form: // @@ -40,8 +41,10 @@ // > | expr && expr // > | expr || expr // -// Each string literal implies a substring match on the stack; +// Each string literal must match complete words on the stack; // the other productions are boolean operations. +// As an example of literal matching, "fu+12" matches "x:fu+12 " +// but not "fu:123" or "snafu+12". // // The stacks command gathers all such predicates out of the // labelled issues and evaluates each one against each new stack. @@ -74,7 +77,9 @@ import ( "net/url" "os" "os/exec" + "path" "path/filepath" + "regexp" "runtime" "sort" "strconv" @@ -82,24 +87,102 @@ import ( "time" "unicode" + "golang.org/x/mod/semver" "golang.org/x/sys/unix" "golang.org/x/telemetry" "golang.org/x/tools/gopls/internal/util/browser" "golang.org/x/tools/gopls/internal/util/moremaps" + "golang.org/x/tools/gopls/internal/util/morestrings" ) // flags var ( + programFlag = flag.String("program", "golang.org/x/tools/gopls", "Package path of program to process") + daysFlag = flag.Int("days", 7, "number of previous days of telemetry data to read") - authToken string // mandatory GitHub authentication token (for R/W issues access) + dryRun = flag.Bool("n", false, "dry run, avoid updating issues") ) +// ProgramConfig is the configuration for processing reports for a specific +// program. +type ProgramConfig struct { + // Program is the package path of the program to process. + Program string + + // IncludeClient indicates that stack Info should include gopls/client metadata. + IncludeClient bool + + // SearchLabel is the GitHub label used to find all existing reports. + SearchLabel string + + // NewIssuePrefix is the package prefix to apply to new issue titles. + NewIssuePrefix string + + // NewIssueLabels are the labels to apply to new issues. + NewIssueLabels []string + + // MatchSymbolPrefix is the prefix of "interesting" symbol names. + // + // A given stack will be "blamed" on the deepest symbol in the stack that: + // 1. Matches MatchSymbolPrefix + // 2. Is an exported function or any method on an exported Type. + // 3. Does _not_ match IgnoreSymbolContains. + MatchSymbolPrefix string + + // IgnoreSymbolContains are "uninteresting" symbol substrings. e.g., + // logging packages. + IgnoreSymbolContains []string +} + +var programs = map[string]ProgramConfig{ + "golang.org/x/tools/gopls": { + Program: "golang.org/x/tools/gopls", + IncludeClient: true, + SearchLabel: "gopls/telemetry-wins", + NewIssuePrefix: "x/tools/gopls", + NewIssueLabels: []string{ + "gopls", + "Tools", + "gopls/telemetry-wins", + "NeedsInvestigation", + }, + MatchSymbolPrefix: "golang.org/x/tools/gopls/", + IgnoreSymbolContains: []string{ + "internal/util/bug.", + "internal/bug.", // former name in gopls/0.14.2 + }, + }, + "cmd/compile": { + Program: "cmd/compile", + SearchLabel: "compiler/telemetry-wins", + NewIssuePrefix: "cmd/compile", + NewIssueLabels: []string{ + "compiler/runtime", + "compiler/telemetry-wins", + "NeedsInvestigation", + }, + MatchSymbolPrefix: "cmd/compile", + IgnoreSymbolContains: []string{ + // Various "fatal" wrappers. + "Fatal", // base.Fatal*, ssa.Value.Fatal*, etc. + "cmd/compile/internal/base.Assert", + "cmd/compile/internal/noder.assert", + "cmd/compile/internal/ssa.Compile.func1", // basically a Fatalf wrapper. + // Panic recovery. + "cmd/compile/internal/types2.(*Checker).handleBailout", + "cmd/compile/internal/gc.handlePanic", + }, + }, +} + func main() { log.SetFlags(0) log.SetPrefix("stacks: ") flag.Parse() + var ghclient *githubClient + // Read GitHub authentication token from $HOME/.stacks.token. // // You can create one using the flow at: GitHub > You > Settings > @@ -119,34 +202,137 @@ func main() { tokenFile := filepath.Join(home, ".stacks.token") content, err := os.ReadFile(tokenFile) if err != nil { - if !os.IsNotExist(err) { - log.Fatalf("cannot read GitHub authentication token: %v", err) + log.Fatalf("cannot read GitHub authentication token: %v", err) + } + ghclient = &githubClient{authToken: string(bytes.TrimSpace(content))} + } + + pcfg, ok := programs[*programFlag] + if !ok { + log.Fatalf("unknown -program %s", *programFlag) + } + + // Read all recent telemetry reports. + stacks, distinctStacks, stackToURL, err := readReports(pcfg, *daysFlag) + if err != nil { + log.Fatalf("Error reading reports: %v", err) + } + + issues, err := readIssues(ghclient, pcfg) + if err != nil { + log.Fatalf("Error reading issues: %v", err) + } + + // Map stacks to existing issues (if any). + claimedBy := claimStacks(issues, stacks) + + // Update existing issues that claimed new stacks. + updateIssues(ghclient, issues, stacks, stackToURL) + + // For each stack, show existing issue or create a new one. + // Aggregate stack IDs by issue summary. + var ( + // Both vars map the summary line to the stack count. + existingIssues = make(map[string]int64) + newIssues = make(map[string]int64) + ) + for stack, counts := range stacks { + id := stackID(stack) + + var total int64 + for _, count := range counts { + total += count + } + + if issue, ok := claimedBy[id]; ok { + // existing issue, already updated above, just store + // the summary. + state := issue.State + if issue.State == "closed" && issue.StateReason == "completed" { + state = "completed" + } + summary := fmt.Sprintf("#%d: %s [%s]", + issue.Number, issue.Title, state) + if state == "completed" && issue.Milestone != nil { + summary += " milestone " + strings.TrimPrefix(issue.Milestone.Title, "gopls/") } - log.Fatalf("no file %s containing GitHub authentication token.", tokenFile) + existingIssues[summary] += total + } else { + // new issue, need to create GitHub issue and store + // summary. + title := newIssue(pcfg, stack, id, stackToURL[stack], counts) + summary := fmt.Sprintf("%s: %s [%s]", id, title, "new") + newIssues[summary] += total } - authToken = string(bytes.TrimSpace(content)) } - // Maps stack text to Info to count. - stacks := make(map[string]map[Info]int64) - var distinctStacks int + fmt.Printf("Found %d distinct stacks in last %v days:\n", distinctStacks, *daysFlag) + print := func(caption string, issues map[string]int64) { + // Print items in descending frequency. + keys := moremaps.KeySlice(issues) + sort.Slice(keys, func(i, j int) bool { + return issues[keys[i]] > issues[keys[j]] + }) + fmt.Printf("%s issues:\n", caption) + for _, summary := range keys { + count := issues[summary] + // Show closed issues in "white". + if isTerminal(os.Stdout) && (strings.Contains(summary, "[closed]") || strings.Contains(summary, "[completed]")) { + // ESC + "[" + n + "m" => change color to n + // (37 = white, 0 = default) + summary = "\x1B[37m" + summary + "\x1B[0m" + } + fmt.Printf("%s (n=%d)\n", summary, count) + } + } + print("Existing", existingIssues) + print("New", newIssues) +} + +// Info is used as a key for de-duping and aggregating. +// Do not add detail about particular records (e.g. data, telemetry URL). +type Info struct { + Program string // "golang.org/x/tools/gopls" + ProgramVersion string // "v0.16.1" + GoVersion string // "go1.23" + GOOS, GOARCH string + GoplsClient string // e.g. "vscode" (only set if Program == "golang.org/x/tools/gopls") +} - // Maps stack to a telemetry URL. - stackToURL := make(map[string]string) +func (info Info) String() string { + s := fmt.Sprintf("%s@%s %s %s/%s", + info.Program, info.ProgramVersion, + info.GoVersion, info.GOOS, info.GOARCH) + if info.GoplsClient != "" { + s += " " + info.GoplsClient + } + return s +} + +// readReports downloads telemetry stack reports for a program from the +// specified number of most recent days. +// +// stacks is a map of stack text to program metadata to stack+metadata report +// count. +// distinctStacks is the number of distinct stacks across all reports. +// stackToURL maps the stack text to the oldest telemetry JSON report it was +// included in. +func readReports(pcfg ProgramConfig, days int) (stacks map[string]map[Info]int64, distinctStacks int, stackToURL map[string]string, err error) { + stacks = make(map[string]map[Info]int64) + stackToURL = make(map[string]string) - // Read all recent telemetry reports. t := time.Now() - for i := 0; i < *daysFlag; i++ { + for i := range days { date := t.Add(-time.Duration(i+1) * 24 * time.Hour).Format(time.DateOnly) url := fmt.Sprintf("https://storage.googleapis.com/prod-telemetry-merged/%s.json", date) resp, err := http.Get(url) if err != nil { - log.Fatalf("can't GET %s: %v", url, err) + return nil, 0, nil, fmt.Errorf("error on GET %s: %v", url, err) } defer resp.Body.Close() if resp.StatusCode != 200 { - log.Fatalf("GET %s returned %d %s", url, resp.StatusCode, resp.Status) + return nil, 0, nil, fmt.Errorf("GET %s returned %d %s", url, resp.StatusCode, resp.Status) } dec := json.NewDecoder(resp.Body) @@ -156,16 +342,28 @@ func main() { if err == io.EOF { break } - log.Fatal(err) + return nil, 0, nil, fmt.Errorf("error decoding report: %v", err) } for _, prog := range report.Programs { - if prog.Program == "golang.org/x/tools/gopls" && len(prog.Stacks) > 0 { - // Include applicable client names (e.g. vscode, eglot). + if prog.Program != pcfg.Program { + continue + } + if len(prog.Stacks) == 0 { + continue + } + // Ignore @devel versions as they correspond to + // ephemeral (and often numerous) variations of + // the program as we work on a fix to a bug. + if prog.Version == "devel" { + continue + } + + // Include applicable client names (e.g. vscode, eglot) for gopls. + var clientSuffix string + if pcfg.IncludeClient { var clients []string - var clientSuffix string for key := range prog.Counters { - client := strings.TrimPrefix(key, "gopls/client:") - if client != key { + if client, ok := strings.CutPrefix(key, "gopls/client:"); ok { clients = append(clients, client) } } @@ -173,147 +371,176 @@ func main() { if len(clients) > 0 { clientSuffix = strings.Join(clients, ",") } + } - // Ignore @devel versions as they correspond to - // ephemeral (and often numerous) variations of - // the program as we work on a fix to a bug. - if prog.Version == "devel" { - continue - } - - distinctStacks++ - - info := Info{ - Program: prog.Program, - Version: prog.Version, - GoVersion: prog.GoVersion, - GOOS: prog.GOOS, - GOARCH: prog.GOARCH, - Client: clientSuffix, - } - for stack, count := range prog.Stacks { - counts := stacks[stack] - if counts == nil { - counts = make(map[Info]int64) - stacks[stack] = counts - } - counts[info] += count - stackToURL[stack] = url + info := Info{ + Program: prog.Program, + ProgramVersion: prog.Version, + GoVersion: prog.GoVersion, + GOOS: prog.GOOS, + GOARCH: prog.GOARCH, + GoplsClient: clientSuffix, + } + for stack, count := range prog.Stacks { + counts := stacks[stack] + if counts == nil { + counts = make(map[Info]int64) + stacks[stack] = counts } + counts[info] += count + stackToURL[stack] = url } + distinctStacks += len(prog.Stacks) } } } - // Query GitHub for all existing GitHub issues with label:gopls/telemetry-wins. - // - // TODO(adonovan): by default GitHub returns at most 30 - // issues; we have lifted this to 100 using per_page=%d, but - // that won't work forever; use paging. - const query = "is:issue label:gopls/telemetry-wins" - res, err := searchIssues(query) + return stacks, distinctStacks, stackToURL, nil +} + +// readIssues returns all existing issues for the given program and parses any +// predicates. +func readIssues(cli *githubClient, pcfg ProgramConfig) ([]*Issue, error) { + // Query GitHub for all existing GitHub issues with the report label. + issues, err := cli.searchIssues(pcfg.SearchLabel) if err != nil { - log.Fatalf("GitHub issues query %q failed: %v", query, err) + // TODO(jba): return error instead of dying, or doc. + log.Fatalf("GitHub issues label %q search failed: %v", pcfg.SearchLabel, err) } // Extract and validate predicate expressions in ```#!stacks...``` code blocks. // See the package doc comment for the grammar. - for _, issue := range res.Items { + for _, issue := range issues { block := findPredicateBlock(issue.Body) if block != "" { - expr, err := parser.ParseExpr(block) + pred, err := parsePredicate(block) if err != nil { log.Printf("invalid predicate in issue #%d: %v\n<<%s>>", issue.Number, err, block) continue } - var validate func(ast.Expr) error - validate = func(e ast.Expr) error { - switch e := e.(type) { - case *ast.UnaryExpr: - if e.Op != token.NOT { - return fmt.Errorf("invalid op: %s", e.Op) - } - return validate(e.X) + issue.predicate = pred + } + } - case *ast.BinaryExpr: - if e.Op != token.LAND && e.Op != token.LOR { - return fmt.Errorf("invalid op: %s", e.Op) - } - if err := validate(e.X); err != nil { - return err - } - return validate(e.Y) + return issues, nil +} - case *ast.ParenExpr: - return validate(e.X) +// parsePredicate parses a predicate expression, returning a function that evaluates +// the predicate on a stack. +// The expression must match this grammar: +// +// expr = "string literal" +// | ( expr ) +// | ! expr +// | expr && expr +// | expr || expr +// +// The value of a string literal is whether it is a substring of the stack, respecting word boundaries. +// That is, a literal L behaves like the regular expression \bL'\b, where L' is L with +// regexp metacharacters quoted. +func parsePredicate(s string) (func(string) bool, error) { + expr, err := parser.ParseExpr(s) + if err != nil { + return nil, fmt.Errorf("parse error: %w", err) + } - case *ast.BasicLit: - if e.Kind != token.STRING { - return fmt.Errorf("invalid literal (%s)", e.Kind) - } - if _, err := strconv.Unquote(e.Value); err != nil { - return err - } + // Cache compiled regexps since we need them more than once. + literalRegexps := make(map[*ast.BasicLit]*regexp.Regexp) + + // Check for errors in the predicate so we can report them now, + // ensuring that evaluation is error-free. + var validate func(ast.Expr) error + validate = func(e ast.Expr) error { + switch e := e.(type) { + case *ast.UnaryExpr: + if e.Op != token.NOT { + return fmt.Errorf("invalid op: %s", e.Op) + } + return validate(e.X) - default: - return fmt.Errorf("syntax error (%T)", e) - } - return nil + case *ast.BinaryExpr: + if e.Op != token.LAND && e.Op != token.LOR { + return fmt.Errorf("invalid op: %s", e.Op) } - if err := validate(expr); err != nil { - log.Printf("invalid predicate in issue #%d: %v\n<<%s>>", - issue.Number, err, block) - continue + if err := validate(e.X); err != nil { + return err } - issue.predicateText = block - issue.predicate = func(stack string) bool { - var eval func(ast.Expr) bool - eval = func(e ast.Expr) bool { - switch e := e.(type) { - case *ast.UnaryExpr: - return !eval(e.X) - - case *ast.BinaryExpr: - if e.Op == token.LAND { - return eval(e.X) && eval(e.Y) - } else { - return eval(e.X) || eval(e.Y) - } + return validate(e.Y) - case *ast.ParenExpr: - return eval(e.X) + case *ast.ParenExpr: + return validate(e.X) - case *ast.BasicLit: - substr, _ := strconv.Unquote(e.Value) - return strings.Contains(stack, substr) - } - panic("unreachable") - } - return eval(expr) + case *ast.BasicLit: + if e.Kind != token.STRING { + return fmt.Errorf("invalid literal (%s)", e.Kind) + } + lit, err := strconv.Unquote(e.Value) + if err != nil { + return err } + // The literal should match complete words. It may match multiple words, + // if it contains non-word runes like whitespace; but it must match word + // boundaries at each end. + // The constructed regular expression is always valid. + literalRegexps[e] = regexp.MustCompile(`\b` + regexp.QuoteMeta(lit) + `\b`) + + default: + return fmt.Errorf("syntax error (%T)", e) } + return nil + } + if err := validate(expr); err != nil { + return nil, err } - // Map each stack ID to its issue. - // - // An issue can claim a stack two ways: - // - // 1. if the issue body contains the ID of the stack. Matching - // is a little loose but base64 will rarely produce words - // that appear in the body by chance. - // - // 2. if the issue body contains a ```#!stacks``` predicate - // that matches the stack. - // - // We report an error if two different issues attempt to claim - // the same stack. - // + return func(stack string) bool { + var eval func(ast.Expr) bool + eval = func(e ast.Expr) bool { + switch e := e.(type) { + case *ast.UnaryExpr: + return !eval(e.X) + + case *ast.BinaryExpr: + if e.Op == token.LAND { + return eval(e.X) && eval(e.Y) + } else { + return eval(e.X) || eval(e.Y) + } + + case *ast.ParenExpr: + return eval(e.X) + + case *ast.BasicLit: + return literalRegexps[e].MatchString(stack) + } + panic("unreachable") + } + return eval(expr) + }, nil +} + +// claimStack maps each stack ID to its issue (if any). +// +// It returns a map of stack text to the issue that claimed it. +// +// An issue can claim a stack two ways: +// +// 1. if the issue body contains the ID of the stack. Matching +// is a little loose but base64 will rarely produce words +// that appear in the body by chance. +// +// 2. if the issue body contains a ```#!stacks``` predicate +// that matches the stack. +// +// We log an error if two different issues attempt to claim +// the same stack. +func claimStacks(issues []*Issue, stacks map[string]map[Info]int64) map[string]*Issue { // This is O(new stacks x existing issues). claimedBy := make(map[string]*Issue) for stack := range stacks { id := stackID(stack) - for _, issue := range res.Items { + for _, issue := range issues { byPredicate := false if strings.Contains(issue.Body, id) { // nop @@ -341,36 +568,12 @@ func main() { } } - // For each stack, show existing issue or create a new one. - // Aggregate stack IDs by issue summary. - var ( - // Both vars map the summary line to the stack count. - existingIssues = make(map[string]int64) - newIssues = make(map[string]int64) - ) - for stack, counts := range stacks { - id := stackID(stack) - - var total int64 - for _, count := range counts { - total += count - } - - if issue, ok := claimedBy[id]; ok { - // existing issue - summary := fmt.Sprintf("#%d: %s [%s]", - issue.Number, issue.Title, issue.State) - existingIssues[summary] += total - } else { - // new issue - title := newIssue(stack, id, stackToURL[stack], counts) - summary := fmt.Sprintf("%s: %s [%s]", id, title, "new") - newIssues[summary] += total - } - } + return claimedBy +} - // Update existing issues that claimed new stacks by predicate. - for _, issue := range res.Items { +// updateIssues updates existing issues that claimed new stacks by predicate. +func updateIssues(cli *githubClient, issues []*Issue, stacks map[string]map[Info]int64, stackToURL map[string]string) { + for _, issue := range issues { if len(issue.newStacks) == 0 { continue } @@ -384,7 +587,8 @@ func main() { newStackIDs = append(newStackIDs, id) writeStackComment(comment, stack, id, stackToURL[stack], stacks[stack]) } - if err := addIssueComment(issue.Number, comment.String()); err != nil { + + if err := cli.addIssueComment(issue.Number, comment.String()); err != nil { log.Println(err) continue } @@ -397,52 +601,101 @@ func main() { body += "\nDups:" } body += " " + strings.Join(newStackIDs, " ") - if err := updateIssueBody(issue.Number, body); err != nil { - log.Printf("added comment to issue #%d but failed to update body: %v", + + update := updateIssue{number: issue.Number, Body: body} + if shouldReopen(issue, stacks) { + update.State = "open" + update.StateReason = "reopened" + } + if err := cli.updateIssue(update); err != nil { + log.Printf("added comment to issue #%d but failed to update: %v", issue.Number, err) continue } log.Printf("added stacks %s to issue #%d", newStackIDs, issue.Number) } +} - fmt.Printf("Found %d distinct stacks in last %v days:\n", distinctStacks, *daysFlag) - print := func(caption string, issues map[string]int64) { - // Print items in descending frequency. - keys := moremaps.KeySlice(issues) - sort.Slice(keys, func(i, j int) bool { - return issues[keys[i]] > issues[keys[j]] - }) - fmt.Printf("%s issues:\n", caption) - for _, summary := range keys { - count := issues[summary] - // Show closed issues in "white". - if isTerminal(os.Stdout) && strings.Contains(summary, "[closed]") { - // ESC + "[" + n + "m" => change color to n - // (37 = white, 0 = default) - summary = "\x1B[37m" + summary + "\x1B[0m" +// An issue should be re-opened if it was closed as fixed, and at least one of the +// new stacks happened since the version containing the fix. +func shouldReopen(issue *Issue, stacks map[string]map[Info]int64) bool { + if !issue.isFixed() { + return false + } + issueProgram, issueVersion, ok := parseMilestone(issue.Milestone) + if !ok { + return false + } + + matchProgram := func(infoProg string) bool { + switch issueProgram { + case "gopls": + return path.Base(infoProg) == issueProgram + case "go": + // At present, we only care about compiler stacks. + // Issues should have milestones like "Go1.24". + return infoProg == "cmd/compile" + default: + return false + } + } + + for _, stack := range issue.newStacks { + for info := range stacks[stack] { + if matchProgram(info.Program) && semver.Compare(semVer(info.ProgramVersion), issueVersion) >= 0 { + log.Printf("reopening issue #%d: purportedly fixed in %s@%s, but found a new stack from version %s", + issue.Number, issueProgram, issueVersion, info.ProgramVersion) + return true } - fmt.Printf("%s (n=%d)\n", summary, count) } } - print("Existing", existingIssues) - print("New", newIssues) + return false } -// Info is used as a key for de-duping and aggregating. -// Do not add detail about particular records (e.g. data, telemetry URL). -type Info struct { - Program string // "golang.org/x/tools/gopls" - Version, GoVersion string // e.g. "gopls/v0.16.1", "go1.23" - GOOS, GOARCH string - Client string // e.g. "vscode" +// An issue is fixed if it was closed because it was completed. +func (i *Issue) isFixed() bool { + return i.State == "closed" && i.StateReason == "completed" } -func (info Info) String() string { - return fmt.Sprintf("%s@%s %s %s/%s %s", - info.Program, info.Version, - info.GoVersion, info.GOOS, info.GOARCH, - info.Client) +// parseMilestone parses a the title of a GitHub milestone. +// If it is in the format PROGRAM/VERSION (for example, "gopls/v0.17.0"), +// then it returns PROGRAM and VERSION. +// If it is in the format Go1.X, then it returns "go" as the program and +// "v1.X" or "v1.X.0" as the version. +// Otherwise, the last return value is false. +func parseMilestone(m *Milestone) (program, version string, ok bool) { + if m == nil { + return "", "", false + } + if strings.HasPrefix(m.Title, "Go") { + v := semVer(m.Title) + if !semver.IsValid(v) { + return "", "", false + } + return "go", v, true + } + program, version, ok = morestrings.CutLast(m.Title, "/") + if !ok || program == "" || version == "" || version[0] != 'v' { + return "", "", false + } + return program, version, true +} + +// semVer returns a semantic version for its argument, which may already be +// a semantic version, or may be a Go version. +// +// v1.2.3 => v1.2.3 +// go1.24 => v1.24 +// Go1.23.5 => v1.23.5 +// goHome => vHome +// +// It returns "", false if the go version is in the wrong format. +func semVer(v string) string { + if strings.HasPrefix(v, "go") || strings.HasPrefix(v, "Go") { + return "v" + v[2:] + } + return v } // stackID returns a 32-bit identifier for a stack @@ -469,24 +722,27 @@ func stackID(stack string) string { // manually de-dup the issue before deciding whether to submit the form.) // // It returns the title. -func newIssue(stack, id string, jsonURL string, counts map[Info]int64) string { - // Use a heuristic to find a suitable symbol to blame - // in the title: the first public function or method - // of a public type, in gopls, to appear in the stack - // trace. We can always refine it later. +func newIssue(pcfg ProgramConfig, stack, id, jsonURL string, counts map[Info]int64) string { + // Use a heuristic to find a suitable symbol to blame in the title: the + // first public function or method of a public type, in + // MatchSymbolPrefix, to appear in the stack trace. We can always + // refine it later. // // TODO(adonovan): include in the issue a source snippet ±5 // lines around the PC in this symbol. var symbol string +outer: for _, line := range strings.Split(stack, "\n") { - // Look for: - // gopls/.../pkg.Func - // gopls/.../pkg.Type.method - // gopls/.../pkg.(*Type).method - if strings.Contains(line, "internal/util/bug.") { - continue // not interesting + for _, s := range pcfg.IgnoreSymbolContains { + if strings.Contains(line, s) { + continue outer // not interesting + } } - if _, rest, ok := strings.Cut(line, "golang.org/x/tools/gopls/"); ok { + // Look for: + // pcfg.MatchSymbolPrefix/.../pkg.Func + // pcfg.MatchSymbolPrefix/.../pkg.Type.method + // pcfg.MatchSymbolPrefix/.../pkg.(*Type).method + if _, rest, ok := strings.Cut(line, pcfg.MatchSymbolPrefix); ok { if i := strings.IndexByte(rest, '.'); i >= 0 { rest = rest[i+1:] rest = strings.TrimPrefix(rest, "(*") @@ -500,7 +756,7 @@ func newIssue(stack, id string, jsonURL string, counts map[Info]int64) string { } // Populate the form (title, body, label) - title := fmt.Sprintf("x/tools/gopls: bug in %s", symbol) + title := fmt.Sprintf("%s: bug in %s", pcfg.NewIssuePrefix, symbol) body := new(bytes.Buffer) @@ -513,7 +769,7 @@ func newIssue(stack, id string, jsonURL string, counts map[Info]int64) string { writeStackComment(body, stack, id, jsonURL, counts) - const labels = "gopls,Tools,gopls/telemetry-wins,NeedsInvestigation" + labels := strings.Join(pcfg.NewIssueLabels, ",") // Report it. The user will interactively finish the task, // since they will typically de-dup it without even creating a new issue @@ -543,7 +799,7 @@ func writeStackComment(body *bytes.Buffer, stack, id string, jsonURL string, cou id, jsonURL) // Read the mapping from symbols to file/line. - pclntab, err := readPCLineTable(info) + pclntab, err := readPCLineTable(info, defaultStacksDir) if err != nil { log.Fatal(err) } @@ -631,7 +887,7 @@ func frameURL(pclntab map[string]FileLine, info Info, frame string) string { } return fmt.Sprintf("https://cs.opensource.google/go/x/tools/+/%s:%s;l=%d", - "gopls/"+info.Version, rest, linenum) + "gopls/"+info.ProgramVersion, rest, linenum) } // other x/ module dependency? @@ -650,65 +906,123 @@ func frameURL(pclntab map[string]FileLine, info Info, frame string) string { return "" } +// -- GitHub client -- + +// A githubClient interacts with GitHub. +// During testing, updates to GitHub are saved in changes instead of being applied. +// Reads from GitHub occur normally. +type githubClient struct { + authToken string // mandatory GitHub authentication token (for R/W issues access) + divertChanges bool // divert attempted GitHub changes to the changes field instead of executing them + changes []any // slice of (addIssueComment | updateIssueBody) +} + +func (cli *githubClient) takeChanges() []any { + r := cli.changes + cli.changes = nil + return r +} + +// addIssueComment is a change for creating a comment on an issue. +type addIssueComment struct { + number int + comment string +} + +// updateIssue is a change for modifying an existing issue. +// It includes the issue number and the fields that can be updated on a GitHub issue. +// A JSON-marshaled updateIssue can be used as the body of the update request sent to GitHub. +// See https://docs.github.com/en/rest/issues/issues?apiVersion=2022-11-28#update-an-issue. +type updateIssue struct { + number int // issue number; must be unexported + Body string `json:"body,omitempty"` + State string `json:"state,omitempty"` // "open" or "closed" + StateReason string `json:"state_reason,omitempty"` // "completed", "not_planned", "reopened" +} + // -- GitHub search -- // searchIssues queries the GitHub issue tracker. -func searchIssues(query string) (*IssuesSearchResult, error) { - q := url.QueryEscape(query) +func (cli *githubClient) searchIssues(label string) ([]*Issue, error) { + label = url.QueryEscape(label) - req, err := http.NewRequest("GET", "https://api.github.com/search/issues?q="+q+"&per_page=100", nil) - if err != nil { - return nil, err - } - req.Header.Add("Authorization", "Bearer "+authToken) - resp, err := http.DefaultClient.Do(req) - if err != nil { - return nil, err - } - defer resp.Body.Close() - if resp.StatusCode != http.StatusOK { - body, _ := io.ReadAll(resp.Body) - return nil, fmt.Errorf("search query failed: %s (body: %s)", resp.Status, body) + // Slurp all issues with the telemetry label. + // + // The pagination link headers have an annoying format, but ultimately + // are just ?page=1, ?page=2, etc with no extra state. So just keep + // trying new pages until we get no more results. + // + // NOTE: With this scheme, GitHub clearly has no protection against + // race conditions, so presumably we could get duplicate issues or miss + // issues across pages. + + getPage := func(page int) ([]*Issue, error) { + url := fmt.Sprintf("https://api.github.com/repos/golang/go/issues?state=all&labels=%s&per_page=100&page=%d", label, page) + req, err := http.NewRequest("GET", url, nil) + if err != nil { + return nil, err + } + req.Header.Add("Authorization", "Bearer "+cli.authToken) + resp, err := http.DefaultClient.Do(req) + if err != nil { + return nil, err + } + defer resp.Body.Close() + if resp.StatusCode != http.StatusOK { + body, _ := io.ReadAll(resp.Body) + return nil, fmt.Errorf("search query %s failed: %s (body: %s)", url, resp.Status, body) + } + var r []*Issue + if err := json.NewDecoder(resp.Body).Decode(&r); err != nil { + return nil, err + } + + return r, nil } - var result IssuesSearchResult - if err := json.NewDecoder(resp.Body).Decode(&result); err != nil { - return nil, err + + var results []*Issue + for page := 1; ; page++ { + r, err := getPage(page) + if err != nil { + return nil, err + } + if len(r) == 0 { + // No more results. + break + } + + results = append(results, r...) } - return &result, nil + + return results, nil } -// updateIssueBody updates the body of the numbered issue. -func updateIssueBody(number int, body string) error { - // https://docs.github.com/en/rest/issues/comments#update-an-issue - var payload struct { - Body string `json:"body"` - } - payload.Body = body - data, err := json.Marshal(payload) - if err != nil { - return err +// updateIssue updates the numbered issue. +func (cli *githubClient) updateIssue(update updateIssue) error { + if cli.divertChanges { + cli.changes = append(cli.changes, update) + return nil } - url := fmt.Sprintf("https://api.github.com/repos/golang/go/issues/%d", number) - req, err := http.NewRequest("PATCH", url, bytes.NewReader(data)) - if err != nil { - return err - } - req.Header.Add("Authorization", "Bearer "+authToken) - resp, err := http.DefaultClient.Do(req) + data, err := json.Marshal(update) if err != nil { return err } - defer resp.Body.Close() - if resp.StatusCode != http.StatusOK { - body, _ := io.ReadAll(resp.Body) - return fmt.Errorf("issue update failed: %s (body: %s)", resp.Status, body) + + url := fmt.Sprintf("https://api.github.com/repos/golang/go/issues/%d", update.number) + if err := cli.requestChange("PATCH", url, data, http.StatusOK); err != nil { + return fmt.Errorf("updating issue: %v", err) } return nil } // addIssueComment adds a markdown comment to the numbered issue. -func addIssueComment(number int, comment string) error { +func (cli *githubClient) addIssueComment(number int, comment string) error { + if cli.divertChanges { + cli.changes = append(cli.changes, addIssueComment{number, comment}) + return nil + } + // https://docs.github.com/en/rest/issues/comments#create-an-issue-comment var payload struct { Body string `json:"body"` @@ -720,42 +1034,54 @@ func addIssueComment(number int, comment string) error { } url := fmt.Sprintf("https://api.github.com/repos/golang/go/issues/%d/comments", number) - req, err := http.NewRequest("POST", url, bytes.NewReader(data)) + if err := cli.requestChange("POST", url, data, http.StatusCreated); err != nil { + return fmt.Errorf("creating issue comment: %v", err) + } + return nil +} + +// requestChange sends a request to url using method, which may change the state at the server. +// The data is sent as the request body, and wantStatus is the expected response status code. +func (cli *githubClient) requestChange(method, url string, data []byte, wantStatus int) error { + if *dryRun { + log.Printf("DRY RUN: %s %s", method, url) + return nil + } + req, err := http.NewRequest(method, url, bytes.NewReader(data)) if err != nil { return err } - req.Header.Add("Authorization", "Bearer "+authToken) + req.Header.Add("Authorization", "Bearer "+cli.authToken) resp, err := http.DefaultClient.Do(req) if err != nil { return err } defer resp.Body.Close() - if resp.StatusCode != http.StatusCreated { + if resp.StatusCode != wantStatus { body, _ := io.ReadAll(resp.Body) - return fmt.Errorf("failed to create issue comment: %s (body: %s)", resp.Status, body) + return fmt.Errorf("request failed: %s (body: %s)", resp.Status, body) } return nil } -// See https://developer.github.com/v3/search/#search-issues. - -type IssuesSearchResult struct { - TotalCount int `json:"total_count"` - Items []*Issue -} +// See https://docs.github.com/en/rest/issues/issues?apiVersion=2022-11-28#list-repository-issues. type Issue struct { - Number int - HTMLURL string `json:"html_url"` - Title string - State string - User *User - CreatedAt time.Time `json:"created_at"` - Body string // in Markdown format - - predicateText string // text of ```#!stacks...``` predicate block - predicate func(string) bool // matching predicate over stack text - newStacks []string // new stacks to add to existing issue (comments and IDs) + Number int + HTMLURL string `json:"html_url"` + Title string + State string + StateReason string `json:"state_reason"` + User *User + CreatedAt time.Time `json:"created_at"` + Body string // in Markdown format + Milestone *Milestone + + // Set by readIssues. + predicate func(string) bool // matching predicate over stack text + + // Set by claimIssues. + newStacks []string // new stacks to add to existing issue (comments and IDs) } type User struct { @@ -763,6 +1089,10 @@ type User struct { HTMLURL string `json:"html_url"` } +type Milestone struct { + Title string +} + // -- pclntab -- type FileLine struct { @@ -770,65 +1100,100 @@ type FileLine struct { line int } +const defaultStacksDir = "/tmp/stacks-cache" + // readPCLineTable builds the gopls executable specified by info, // reads its PC-to-line-number table, and returns the file/line of // each TEXT symbol. -func readPCLineTable(info Info) (map[string]FileLine, error) { +// +// stacksDir is a semi-durable temp directory (i.e. lasts for at least a few +// hours) to hold recent sources and executables. +func readPCLineTable(info Info, stacksDir string) (map[string]FileLine, error) { // The stacks dir will be a semi-durable temp directory // (i.e. lasts for at least hours) holding source trees // and executables we have built recently. // // Each subdir will hold a specific revision. - stacksDir := "/tmp/gopls-stacks" if err := os.MkdirAll(stacksDir, 0777); err != nil { return nil, fmt.Errorf("can't create stacks dir: %v", err) } - // Fetch the source for the tools repo, - // shallow-cloning just the desired revision. - // (Skip if it's already cloned.) - revDir := filepath.Join(stacksDir, info.Version) - if !fileExists(filepath.Join(revDir, "go.mod")) { - // We check for presence of the go.mod file, - // not just the directory itself, as the /tmp reaper - // often removes stale files before removing their directories. - // Remove those stale directories now. - _ = os.RemoveAll(revDir) // ignore errors - - log.Printf("cloning tools@gopls/%s", info.Version) - if err := shallowClone(revDir, "https://go.googlesource.com/tools", "gopls/"+info.Version); err != nil { + // When building a subrepo tool, we must clone the source of the + // subrepo, and run go build from that checkout. + // + // When building a main repo tool, no need to clone or change + // directories. GOTOOLCHAIN is sufficient to fetch and build the + // appropriate version. + var buildDir string + switch info.Program { + case "golang.org/x/tools/gopls": + // Fetch the source for the tools repo, + // shallow-cloning just the desired revision. + // (Skip if it's already cloned.) + revDir := filepath.Join(stacksDir, info.ProgramVersion) + if !fileExists(filepath.Join(revDir, "go.mod")) { + // We check for presence of the go.mod file, + // not just the directory itself, as the /tmp reaper + // often removes stale files before removing their directories. + // Remove those stale directories now. _ = os.RemoveAll(revDir) // ignore errors - return nil, fmt.Errorf("clone: %v", err) + + // TODO(prattmic): Consider using ProgramConfig + // configuration if we add more configurations. + log.Printf("cloning tools@gopls/%s", info.ProgramVersion) + if err := shallowClone(revDir, "https://go.googlesource.com/tools", "gopls/"+info.ProgramVersion); err != nil { + _ = os.RemoveAll(revDir) // ignore errors + return nil, fmt.Errorf("clone: %v", err) + } } + + // gopls is in its own module, we must build from there. + buildDir = filepath.Join(revDir, "gopls") + case "cmd/compile": + // Nothing to do, GOTOOLCHAIN is sufficient. + + // Switch build directories so if we happen to be in Go module + // directory its go.mod doesn't restrict the toolchain versions + // we're allowed to use. + buildDir = "/" + default: + return nil, fmt.Errorf("don't know how to build unknown program %s", info.Program) } + // No slashes in file name. + escapedProg := strings.Replace(info.Program, "/", "_", -1) + // Build the executable with the correct GOTOOLCHAIN, GOOS, GOARCH. // Use -trimpath for normalized file names. // (Skip if it's already built.) - exe := fmt.Sprintf("exe-%s.%s-%s", info.GoVersion, info.GOOS, info.GOARCH) - cmd := exec.Command("go", "build", "-trimpath", "-o", "../"+exe) - cmd.Stderr = os.Stderr - cmd.Dir = filepath.Join(revDir, "gopls") - cmd.Env = append(os.Environ(), - "GOTOOLCHAIN="+info.GoVersion, - "GOOS="+info.GOOS, - "GOARCH="+info.GOARCH, - ) - if !fileExists(filepath.Join(revDir, exe)) { + exe := fmt.Sprintf("exe-%s-%s.%s-%s", escapedProg, info.GoVersion, info.GOOS, info.GOARCH) + exe = filepath.Join(stacksDir, exe) + + if !fileExists(exe) { log.Printf("building %s@%s with %s for %s/%s", - info.Program, info.Version, info.GoVersion, info.GOOS, info.GOARCH) + info.Program, info.ProgramVersion, info.GoVersion, info.GOOS, info.GOARCH) + + cmd := exec.Command("go", "build", "-trimpath", "-o", exe, info.Program) + cmd.Stderr = os.Stderr + cmd.Dir = buildDir + cmd.Env = append(os.Environ(), + "GOTOOLCHAIN="+info.GoVersion, + "GOEXPERIMENT=", // Don't forward GOEXPERIMENT from current environment since the GOTOOLCHAIN selected might not support the same experiments. + "GOOS="+info.GOOS, + "GOARCH="+info.GOARCH, + ) if err := cmd.Run(); err != nil { - return nil, fmt.Errorf("building: %v (rm -fr /tmp/gopls-stacks?)", err) + return nil, fmt.Errorf("building: %v (rm -fr %s?)", err, stacksDir) } } // Read pclntab of executable. - cmd = exec.Command("go", "tool", "objdump", exe) + cmd := exec.Command("go", "tool", "objdump", exe) cmd.Stdout = new(strings.Builder) cmd.Stderr = os.Stderr - cmd.Dir = revDir cmd.Env = append(os.Environ(), "GOTOOLCHAIN="+info.GoVersion, + "GOEXPERIMENT=", // Don't forward GOEXPERIMENT from current environment since the GOTOOLCHAIN selected might not support the same experiments. "GOOS="+info.GOOS, "GOARCH="+info.GOARCH, ) diff --git a/gopls/internal/telemetry/cmd/stacks/stacks_test.go b/gopls/internal/telemetry/cmd/stacks/stacks_test.go new file mode 100644 index 00000000000..452113a1581 --- /dev/null +++ b/gopls/internal/telemetry/cmd/stacks/stacks_test.go @@ -0,0 +1,345 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build linux || darwin + +package main + +import ( + "encoding/json" + "strings" + "testing" +) + +func TestReadPCLineTable(t *testing.T) { + if testing.Short() { + // TODO(prattmic): It would be nice to have a unit test that + // didn't require downloading. + t.Skip("downloads source from the internet, skipping in -short") + } + + type testCase struct { + name string + info Info + wantSymbol string + wantFileLine FileLine + } + + tests := []testCase{ + { + name: "gopls", + info: Info{ + Program: "golang.org/x/tools/gopls", + ProgramVersion: "v0.16.1", + GoVersion: "go1.23.4", + GOOS: "linux", + GOARCH: "amd64", + }, + wantSymbol: "golang.org/x/tools/gopls/internal/cmd.(*Application).Run", + wantFileLine: FileLine{ + file: "golang.org/x/tools/gopls/internal/cmd/cmd.go", + line: 230, + }, + }, + { + name: "compile", + info: Info{ + Program: "cmd/compile", + ProgramVersion: "go1.23.4", + GoVersion: "go1.23.4", + GOOS: "linux", + GOARCH: "amd64", + }, + wantSymbol: "runtime.main", + wantFileLine: FileLine{ + file: "runtime/proc.go", + line: 147, + }, + }, + } + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + stacksDir := t.TempDir() + pcln, err := readPCLineTable(tc.info, stacksDir) + if err != nil { + t.Fatalf("readPCLineTable got err %v want nil", err) + } + + got, ok := pcln[tc.wantSymbol] + if !ok { + t.Fatalf("PCLineTable want entry %s got !ok from pcln %+v", tc.wantSymbol, pcln) + } + + if got != tc.wantFileLine { + t.Fatalf("symbol %s got FileLine %+v want %+v", tc.wantSymbol, got, tc.wantFileLine) + } + }) + } +} + +func TestParsePredicate(t *testing.T) { + for _, tc := range []struct { + expr string + arg string + want bool + }{ + {`"x"`, `"x"`, true}, + {`"x"`, `"axe"`, false}, // literals match whole words + {`"x"`, "val:x+5", true}, + {`"fu+12"`, "x:fu+12,", true}, + {`"fu+12"`, "snafu+12,", false}, + {`"fu+12"`, "x:fu+123,", false}, + {`"a.*b"`, "a.*b", true}, // regexp metachars are escaped + {`"a.*b"`, "axxb", false}, // ditto + {`"x"`, `"y"`, false}, + {`!"x"`, "x", false}, + {`!"x"`, "y", true}, + {`"x" && "y"`, "xy", false}, + {`"x" && "y"`, "x y", true}, + {`"x" && "y"`, "x", false}, + {`"x" && "y"`, "y", false}, + {`"xz" && "zy"`, "xzy", false}, + {`"xz" && "zy"`, "zy,xz", true}, + {`"x" || "y"`, "x\ny", true}, + {`"x" || "y"`, "x", true}, + {`"x" || "y"`, "y", true}, + {`"x" || "y"`, "z", false}, + } { + eval, err := parsePredicate(tc.expr) + if err != nil { + t.Fatal(err) + } + got := eval(tc.arg) + if got != tc.want { + t.Errorf("%s applied to %q: got %t, want %t", tc.expr, tc.arg, got, tc.want) + } + } +} + +func TestParsePredicateError(t *testing.T) { + // Validate that bad predicates return errors. + for _, expr := range []string{ + ``, + `1`, + `foo`, // an identifier, not a literal + `"x" + "y"`, + `"x" &&`, + `~"x"`, + `f(1)`, + } { + if _, err := parsePredicate(expr); err == nil { + t.Errorf("%s: got nil, want error", expr) + } + } +} + +// which takes the bulk of the time. +func TestUpdateIssues(t *testing.T) { + if testing.Short() { + t.Skip("downloads source from the internet, skipping in -short") + } + + c := &githubClient{divertChanges: true} + const stack1 = "stack1" + id1 := stackID(stack1) + stacksToURL := map[string]string{stack1: "URL1"} + + // checkIssueComment asserts that the change adds an issue of the specified + // number, with a body that contains various strings. + checkIssueComment := func(t *testing.T, change any, number int, version string) { + t.Helper() + cic, ok := change.(addIssueComment) + if !ok { + t.Fatalf("got %T, want addIssueComment", change) + } + if cic.number != number { + t.Errorf("issue number: got %d, want %d", cic.number, number) + } + for _, want := range []string{"URL1", stack1, id1, "golang.org/x/tools/gopls@" + version} { + if !strings.Contains(cic.comment, want) { + t.Errorf("missing %q in comment:\n%s", want, cic.comment) + } + } + } + + t.Run("open issue", func(t *testing.T) { + issues := []*Issue{{ + Number: 1, + State: "open", + newStacks: []string{stack1}, + }} + + info := Info{ + Program: "golang.org/x/tools/gopls", + ProgramVersion: "v0.16.1", + } + stacks := map[string]map[Info]int64{stack1: map[Info]int64{info: 3}} + updateIssues(c, issues, stacks, stacksToURL) + changes := c.takeChanges() + + if g, w := len(changes), 2; g != w { + t.Fatalf("got %d changes, want %d", g, w) + } + + // The first change creates an issue comment. + checkIssueComment(t, changes[0], 1, "v0.16.1") + + // The second change updates the issue body, and only the body. + ui, ok := changes[1].(updateIssue) + if !ok { + t.Fatalf("got %T, want updateIssue", changes[1]) + } + if ui.number != 1 { + t.Errorf("issue number: got %d, want 1", ui.number) + } + if ui.Body == "" || ui.State != "" || ui.StateReason != "" { + t.Errorf("updating other than just the body:\n%+v", ui) + } + want := "Dups: " + id1 + if !strings.Contains(ui.Body, want) { + t.Errorf("missing %q in body %q", want, ui.Body) + } + }) + t.Run("should be reopened", func(t *testing.T) { + issues := []*Issue{{ + // Issue purportedly fixed in v0.16.0 + Number: 2, + State: "closed", + StateReason: "completed", + Milestone: &Milestone{Title: "gopls/v0.16.0"}, + newStacks: []string{stack1}, + }} + // New stack in a later version. + info := Info{ + Program: "golang.org/x/tools/gopls", + ProgramVersion: "v0.17.0", + } + stacks := map[string]map[Info]int64{stack1: map[Info]int64{info: 3}} + updateIssues(c, issues, stacks, stacksToURL) + + changes := c.takeChanges() + if g, w := len(changes), 2; g != w { + t.Fatalf("got %d changes, want %d", g, w) + } + // The first change creates an issue comment. + checkIssueComment(t, changes[0], 2, "v0.17.0") + + // The second change updates the issue body, state, and state reason. + ui, ok := changes[1].(updateIssue) + if !ok { + t.Fatalf("got %T, want updateIssue", changes[1]) + } + if ui.number != 2 { + t.Errorf("issue number: got %d, want 2", ui.number) + } + if ui.Body == "" || ui.State != "open" || ui.StateReason != "reopened" { + t.Errorf(`update fields should be non-empty body, state "open", state reason "reopened":\n%+v`, ui) + } + want := "Dups: " + id1 + if !strings.Contains(ui.Body, want) { + t.Errorf("missing %q in body %q", want, ui.Body) + } + + }) + +} + +func TestMarshalUpdateIssueFields(t *testing.T) { + // Verify that only the non-empty fields of updateIssueFields are marshalled. + for _, tc := range []struct { + fields updateIssue + want string + }{ + {updateIssue{Body: "b"}, `{"body":"b"}`}, + {updateIssue{State: "open"}, `{"state":"open"}`}, + {updateIssue{State: "open", StateReason: "reopened"}, `{"state":"open","state_reason":"reopened"}`}, + } { + bytes, err := json.Marshal(tc.fields) + if err != nil { + t.Fatal(err) + } + got := string(bytes) + if got != tc.want { + t.Errorf("%+v: got %s, want %s", tc.fields, got, tc.want) + } + } +} + +func TestShouldReopen(t *testing.T) { + const stack = "stack" + const gopls = "golang.org/x/tools/gopls" + goplsMilestone := &Milestone{Title: "gopls/v0.2.0"} + goMilestone := &Milestone{Title: "Go1.23"} + + for _, tc := range []struct { + name string + issue Issue + info Info + want bool + }{ + { + "issue open", + Issue{State: "open", Milestone: goplsMilestone}, + Info{Program: gopls, ProgramVersion: "v0.2.0"}, + false, + }, + { + "issue closed but not fixed", + Issue{State: "closed", StateReason: "not_planned", Milestone: goplsMilestone}, + Info{Program: gopls, ProgramVersion: "v0.2.0"}, + false, + }, + { + "different program", + Issue{State: "closed", StateReason: "completed", Milestone: goplsMilestone}, + Info{Program: "other", ProgramVersion: "v0.2.0"}, + false, + }, + { + "later version", + Issue{State: "closed", StateReason: "completed", Milestone: goplsMilestone}, + Info{Program: gopls, ProgramVersion: "v0.3.0"}, + true, + }, + { + "earlier version", + Issue{State: "closed", StateReason: "completed", Milestone: goplsMilestone}, + Info{Program: gopls, ProgramVersion: "v0.1.0"}, + false, + }, + { + "same version", + Issue{State: "closed", StateReason: "completed", Milestone: goplsMilestone}, + Info{Program: gopls, ProgramVersion: "v0.2.0"}, + true, + }, + { + "compiler later version", + Issue{State: "closed", StateReason: "completed", Milestone: goMilestone}, + Info{Program: "cmd/compile", ProgramVersion: "go1.24"}, + true, + }, + { + "compiler earlier version", + Issue{State: "closed", StateReason: "completed", Milestone: goMilestone}, + Info{Program: "cmd/compile", ProgramVersion: "go1.22"}, + false, + }, + { + "compiler same version", + Issue{State: "closed", StateReason: "completed", Milestone: goMilestone}, + Info{Program: "cmd/compile", ProgramVersion: "go1.23"}, + true, + }, + } { + t.Run(tc.name, func(t *testing.T) { + tc.issue.Number = 1 + tc.issue.newStacks = []string{stack} + got := shouldReopen(&tc.issue, map[string]map[Info]int64{stack: map[Info]int64{tc.info: 1}}) + if got != tc.want { + t.Errorf("got %t, want %t", got, tc.want) + } + }) + } +} diff --git a/gopls/internal/template/implementations.go b/gopls/internal/template/implementations.go index 19a27620b57..4ed485cfee2 100644 --- a/gopls/internal/template/implementations.go +++ b/gopls/internal/template/implementations.go @@ -199,15 +199,8 @@ func SemanticTokens(ctx context.Context, snapshot *cache.Snapshot, spn protocol. line, col := p.LineCol(t.Start) add(line, col, uint32(sz)) } - const noStrings = false - const noNumbers = false ans := &protocol.SemanticTokens{ - Data: semtok.Encode( - items, - noStrings, - noNumbers, - snapshot.Options().SemanticTypes, - snapshot.Options().SemanticMods), + Data: semtok.Encode(items, nil, nil), // for small cache, some day. for now, the LSP client ignores this // (that is, when the LSP client starts returning these, we can cache) ResultID: fmt.Sprintf("%v", time.Now()), diff --git a/gopls/internal/test/integration/bench/repo_test.go b/gopls/internal/test/integration/bench/repo_test.go index 0e86f3e1da7..50370e73491 100644 --- a/gopls/internal/test/integration/bench/repo_test.go +++ b/gopls/internal/test/integration/bench/repo_test.go @@ -147,7 +147,7 @@ type repo struct { // reusableDir return a reusable directory for benchmarking, or "". // // If the user specifies a directory, the test will create and populate it -// on the first run an re-use it on subsequent runs. Otherwise it will +// on the first run and re-use it on subsequent runs. Otherwise it will // create, populate, and delete a temporary directory. func (r *repo) reusableDir() string { if r.inDir == nil { diff --git a/gopls/internal/test/integration/codelens/codelens_test.go b/gopls/internal/test/integration/codelens/codelens_test.go index bb8ad95ee19..c1f2c524232 100644 --- a/gopls/internal/test/integration/codelens/codelens_test.go +++ b/gopls/internal/test/integration/codelens/codelens_test.go @@ -261,9 +261,6 @@ module mod.com/a go 1.22 require golang.org/x/hello v1.2.3 --- go.sum -- -golang.org/x/hello v1.2.3 h1:7Wesfkx/uBd+eFgPrq0irYj/1XfmbvLV8jZ/W7C2Dwg= -golang.org/x/hello v1.2.3/go.mod h1:OgtlzsxVMUUdsdQCIDYgaauCTH47B8T8vofouNJfzgY= -- main.go -- package main @@ -282,6 +279,7 @@ require golang.org/x/hello v1.3.3 ` WithOptions( + WriteGoSum("."), ProxyFiles(proxyWithLatest), ).Run(t, shouldUpdateDep, func(t *testing.T, env *Env) { env.RunGoCommand("mod", "vendor") @@ -335,11 +333,6 @@ require golang.org/x/hello v1.0.0 require golang.org/x/unused v1.0.0 // EOF --- go.sum -- -golang.org/x/hello v1.0.0 h1:qbzE1/qT0/zojAMd/JcPsO2Vb9K4Bkeyq0vB2JGMmsw= -golang.org/x/hello v1.0.0/go.mod h1:WW7ER2MRNXWA6c8/4bDIek4Hc/+DofTrMaQQitGXcco= -golang.org/x/unused v1.0.0 h1:LecSbCn5P3vTcxubungSt1Pn4D/WocCaiWOPDC0y0rw= -golang.org/x/unused v1.0.0/go.mod h1:ihoW8SgWzugwwj0N2SfLfPZCxTB1QOVfhMfB5PWTQ8U= -- main.go -- package main @@ -349,7 +342,10 @@ func main() { _ = hi.Goodbye } ` - WithOptions(ProxyFiles(proxy)).Run(t, shouldRemoveDep, func(t *testing.T, env *Env) { + WithOptions( + WriteGoSum("."), + ProxyFiles(proxy), + ).Run(t, shouldRemoveDep, func(t *testing.T, env *Env) { env.OpenFile("go.mod") env.RegexpReplace("go.mod", "// EOF", "// EOF unsaved edit") // unsaved edits ok env.ExecuteCodeLensCommand("go.mod", command.Tidy, nil) diff --git a/gopls/internal/test/integration/completion/completion_test.go b/gopls/internal/test/integration/completion/completion_test.go index 1f6eb2fe0fb..1d293fe9019 100644 --- a/gopls/internal/test/integration/completion/completion_test.go +++ b/gopls/internal/test/integration/completion/completion_test.go @@ -276,9 +276,6 @@ module mod.com go 1.14 require example.com v1.2.3 --- go.sum -- -example.com v1.2.3 h1:ihBTGWGjTU3V4ZJ9OmHITkU9WQ4lGdQkMjgyLFk0FaY= -example.com v1.2.3/go.mod h1:Y2Rc5rVWjWur0h3pd9aEvK5Pof8YKDANh9gHA2Maujo= -- main.go -- package main @@ -295,6 +292,7 @@ func _() { } ` WithOptions( + WriteGoSum("."), ProxyFiles(proxy), ).Run(t, mod, func(t *testing.T, env *Env) { // Make sure the dependency is in the module cache and accessible for @@ -347,9 +345,6 @@ module mod.com go 1.14 require example.com v1.2.3 --- go.sum -- -example.com v1.2.3 h1:ihBTGWGjTU3V4ZJ9OmHITkU9WQ4lGdQkMjgyLFk0FaY= -example.com v1.2.3/go.mod h1:Y2Rc5rVWjWur0h3pd9aEvK5Pof8YKDANh9gHA2Maujo= -- useblah.go -- // +build hidden @@ -361,7 +356,9 @@ package mainmod const Name = "mainmod" ` - WithOptions(ProxyFiles(proxy)).Run(t, files, func(t *testing.T, env *Env) { + WithOptions( + WriteGoSum("."), + ProxyFiles(proxy)).Run(t, files, func(t *testing.T, env *Env) { env.CreateBuffer("import.go", "package pkg\nvar _ = mainmod.Name\n") env.SaveBuffer("import.go") content := env.ReadWorkspaceFile("import.go") @@ -471,12 +468,12 @@ module test.com go 1.16 -- prog.go -- package waste -// Deprecated, use newFoof +// Deprecated: use newFoof. func fooFunc() bool { return false } -// Deprecated +// Deprecated: bad. const badPi = 3.14 func doit() { diff --git a/gopls/internal/test/integration/diagnostics/analysis_test.go b/gopls/internal/test/integration/diagnostics/analysis_test.go index 8cb86f8f735..7e93398d57a 100644 --- a/gopls/internal/test/integration/diagnostics/analysis_test.go +++ b/gopls/internal/test/integration/diagnostics/analysis_test.go @@ -125,3 +125,32 @@ func main() { } }) } + +func TestAnalysisFiltering(t *testing.T) { + // This test checks that hint level diagnostics are only surfaced for open + // files. + + const src = ` +-- go.mod -- +module mod.com + +go 1.20 + +-- a.go -- +package p + +var x interface{} + +-- b.go -- +package p + +var y interface{} +` + Run(t, src, func(t *testing.T, env *Env) { + env.OpenFile("a.go") + env.AfterChange( + Diagnostics(ForFile("a.go"), WithMessage("replaced by any")), + NoDiagnostics(ForFile("b.go")), + ) + }) +} diff --git a/gopls/internal/test/integration/diagnostics/diagnostics_test.go b/gopls/internal/test/integration/diagnostics/diagnostics_test.go index 9e6c504cc86..c496f6464a3 100644 --- a/gopls/internal/test/integration/diagnostics/diagnostics_test.go +++ b/gopls/internal/test/integration/diagnostics/diagnostics_test.go @@ -421,9 +421,6 @@ module mod.com go 1.12 require foo.test v1.2.3 --- go.sum -- -foo.test v1.2.3 h1:TMA+lyd1ck0TqjSFpNe4T6cf/K6TYkoHwOOcMBMjaEw= -foo.test v1.2.3/go.mod h1:Ij3kyLIe5lzjycjh13NL8I2gX0quZuTdW0MnmlwGBL4= -- print.go -- package lib @@ -451,6 +448,7 @@ const Answer = 42 func TestResolveDiagnosticWithDownload(t *testing.T) { WithOptions( + WriteGoSum("."), ProxyFiles(testPackageWithRequireProxy), ).Run(t, testPackageWithRequire, func(t *testing.T, env *Env) { env.OpenFile("print.go") @@ -1753,9 +1751,6 @@ module mod.com go 1.12 require nested.com v1.0.0 --- go.sum -- -nested.com v1.0.0 h1:I6spLE4CgFqMdBPc+wTV2asDO2QJ3tU0YAT+jkLeN1I= -nested.com v1.0.0/go.mod h1:ly53UzXQgVjSlV7wicdBB4p8BxfytuGT1Xcyv0ReJfI= -- main.go -- package main @@ -1779,6 +1774,7 @@ package hello func helloHelper() {} ` WithOptions( + WriteGoSum("."), ProxyFiles(proxy), Modes(Default), ).Run(t, nested, func(t *testing.T, env *Env) { diff --git a/gopls/internal/test/integration/fake/editor.go b/gopls/internal/test/integration/fake/editor.go index 1b1e0f170a2..adc9df6c17d 100644 --- a/gopls/internal/test/integration/fake/editor.go +++ b/gopls/internal/test/integration/fake/editor.go @@ -379,6 +379,10 @@ func clientCapabilities(cfg EditorConfig) (protocol.ClientCapabilities, error) { } // Request that the server provide its complete list of code action kinds. capabilities.TextDocument.CodeAction = protocol.CodeActionClientCapabilities{ + DataSupport: true, + ResolveSupport: &protocol.ClientCodeActionResolveOptions{ + Properties: []string{"edit"}, + }, CodeActionLiteralSupport: protocol.ClientCodeActionLiteralOptions{ CodeActionKind: protocol.ClientCodeActionKindOptions{ ValueSet: []protocol.CodeActionKind{protocol.Empty}, // => all diff --git a/gopls/internal/test/integration/misc/compileropt_test.go b/gopls/internal/test/integration/misc/compileropt_test.go index 8b8f78cd62d..175ec640042 100644 --- a/gopls/internal/test/integration/misc/compileropt_test.go +++ b/gopls/internal/test/integration/misc/compileropt_test.go @@ -14,7 +14,7 @@ import ( . "golang.org/x/tools/gopls/internal/test/integration" ) -// TestCompilerOptDetails exercises the "Toggle compiler optimization details" code action. +// TestCompilerOptDetails exercises the "{Show,Hide} compiler optimization details" code action. func TestCompilerOptDetails(t *testing.T) { if runtime.GOOS == "android" { t.Skipf("the compiler optimization details code action doesn't work on Android") @@ -24,7 +24,8 @@ func TestCompilerOptDetails(t *testing.T) { -- go.mod -- module mod.com -go 1.15 +go 1.18 + -- main.go -- package main @@ -38,7 +39,7 @@ func main() { env.OpenFile("main.go") actions := env.CodeActionForFile("main.go", nil) - // Execute the "Toggle compiler optimization details" command. + // Execute the "Show compiler optimization details" command. docAction, err := codeActionByKind(actions, settings.GoToggleCompilerOptDetails) if err != nil { t.Fatal(err) @@ -79,3 +80,89 @@ func f(x int) *int { return &x }`) ) }) } + +// TestCompilerOptDetails_perDirectory exercises that the "want +// optimization details" flag has per-directory cardinality. +func TestCompilerOptDetails_perDirectory(t *testing.T) { + if runtime.GOOS == "android" { + t.Skipf("the compiler optimization details code action doesn't work on Android") + } + + const mod = ` +-- go.mod -- +module mod.com +go 1.18 + +-- a/a.go -- +package a + +func F(x int) any { return &x } + +-- a/a_test.go -- +package a + +func G(x int) any { return &x } + +-- a/a_x_test.go -- +package a_test + +func H(x int) any { return &x } +` + + Run(t, mod, func(t *testing.T, env *Env) { + // toggle executes the "Toggle compiler optimization details" + // command within a file, and asserts that it has the specified title. + toggle := func(filename, wantTitle string) { + env.OpenFile(filename) + actions := env.CodeActionForFile(filename, nil) + + docAction, err := codeActionByKind(actions, settings.GoToggleCompilerOptDetails) + if err != nil { + t.Fatal(err) + } + if docAction.Title != wantTitle { + t.Errorf("CodeAction.Title = %q, want %q", docAction.Title, wantTitle) + } + params := &protocol.ExecuteCommandParams{ + Command: docAction.Command.Command, + Arguments: docAction.Command.Arguments, + } + env.ExecuteCommand(params, nil) + } + + // Show diagnostics for directory a/ from one file. + // Diagnostics are reported for all three packages. + toggle("a/a.go", `Show compiler optimization details for "a"`) + env.OnceMet( + CompletedWork(server.DiagnosticWorkTitle(server.FromToggleCompilerOptDetails), 1, true), + Diagnostics( + ForFile("a/a.go"), + AtPosition("a/a.go", 2, 7), + WithMessage("x escapes to heap"), + WithSeverityTags("optimizer details", protocol.SeverityInformation, nil), + ), + Diagnostics( + ForFile("a/a_test.go"), + AtPosition("a/a_test.go", 2, 7), + WithMessage("x escapes to heap"), + WithSeverityTags("optimizer details", protocol.SeverityInformation, nil), + ), + Diagnostics( + ForFile("a/a_x_test.go"), + AtPosition("a/a_x_test.go", 2, 7), + WithMessage("x escapes to heap"), + WithSeverityTags("optimizer details", protocol.SeverityInformation, nil), + ), + ) + + // Hide diagnostics for the directory from a different file. + // All diagnostics disappear. + toggle("a/a_test.go", `Hide compiler optimization details for "a"`) + env.OnceMet( + CompletedWork(server.DiagnosticWorkTitle(server.FromToggleCompilerOptDetails), 2, true), + NoDiagnostics(ForFile("a/a.go")), + NoDiagnostics(ForFile("a/a_test.go")), + NoDiagnostics(ForFile("a/a_x_test.go")), + ) + }) +} diff --git a/gopls/internal/test/integration/misc/configuration_test.go b/gopls/internal/test/integration/misc/configuration_test.go index 1077c21ac36..6d588a7d3da 100644 --- a/gopls/internal/test/integration/misc/configuration_test.go +++ b/gopls/internal/test/integration/misc/configuration_test.go @@ -186,7 +186,7 @@ var ErrFoo = errors.New("foo") cfg.Env = map[string]string{ "AN_ARBITRARY_VAR": "FOO", } - cfg.Settings = map[string]interface{}{ + cfg.Settings = map[string]any{ "staticcheck": true, } env.ChangeConfiguration(cfg) diff --git a/gopls/internal/test/integration/misc/definition_test.go b/gopls/internal/test/integration/misc/definition_test.go index 95054977e14..d36bb024672 100644 --- a/gopls/internal/test/integration/misc/definition_test.go +++ b/gopls/internal/test/integration/misc/definition_test.go @@ -466,10 +466,6 @@ module example.com/a go 1.14 require other.com/b v1.0.0 --- go.sum -- -other.com/b v1.0.0 h1:1wb3PMGdet5ojzrKl+0iNksRLnOM9Jw+7amBNqmYwqk= -other.com/b v1.0.0/go.mod h1:TgHQFucl04oGT+vrUm/liAzukYHNxCwKNkQZEyn3m9g= - -- a.go -- package a import "other.com/b" @@ -477,6 +473,7 @@ const _ = b.K ` WithOptions( + WriteGoSum("."), ProxyFiles(proxy), Modes(Default), // fails in 'experimental' mode ).Run(t, src, func(t *testing.T, env *Env) { diff --git a/gopls/internal/test/integration/misc/fix_test.go b/gopls/internal/test/integration/misc/fix_test.go index 5a01afe2400..261b5841109 100644 --- a/gopls/internal/test/integration/misc/fix_test.go +++ b/gopls/internal/test/integration/misc/fix_test.go @@ -21,9 +21,10 @@ func TestFillStruct(t *testing.T) { capabilities string wantCommand bool }{ - {"default", "{}", true}, - {"no data", `{ "textDocument": {"codeAction": { "resolveSupport": { "properties": ["edit"] } } } }`, true}, - {"resolve support", `{ "textDocument": {"codeAction": { "dataSupport": true, "resolveSupport": { "properties": ["edit"] } } } }`, false}, + {"default", "{}", false}, + {"no data support", `{"textDocument": {"codeAction": {"dataSupport": false, "resolveSupport": {"properties": ["edit"]}}}}`, true}, + {"no resolve support", `{"textDocument": {"codeAction": {"dataSupport": true, "resolveSupport": {"properties": []}}}}`, true}, + {"data and resolve support", `{"textDocument": {"codeAction": {"dataSupport": true, "resolveSupport": {"properties": ["edit"]}}}}`, false}, } const basic = ` diff --git a/gopls/internal/test/integration/misc/highlight_test.go b/gopls/internal/test/integration/misc/highlight_test.go index 9e3dd980464..e4da558e5d0 100644 --- a/gopls/internal/test/integration/misc/highlight_test.go +++ b/gopls/internal/test/integration/misc/highlight_test.go @@ -95,9 +95,6 @@ module mod.com go 1.12 require example.com v1.2.3 --- go.sum -- -example.com v1.2.3 h1:WFzrgiQJwEDJNLDUOV1f9qlasQkvzXf2UNLaNIqbWsI= -example.com v1.2.3/go.mod h1:Y2Rc5rVWjWur0h3pd9aEvK5Pof8YKDANh9gHA2Maujo= -- main.go -- package main @@ -110,6 +107,7 @@ func main() {}` WithOptions( ProxyFiles(proxy), + WriteGoSum("."), ).Run(t, mod, func(t *testing.T, env *Env) { env.OpenFile("main.go") diff --git a/gopls/internal/test/integration/misc/hover_test.go b/gopls/internal/test/integration/misc/hover_test.go index 1592b899b1d..7be50efe6d4 100644 --- a/gopls/internal/test/integration/misc/hover_test.go +++ b/gopls/internal/test/integration/misc/hover_test.go @@ -21,7 +21,7 @@ func TestHoverUnexported(t *testing.T) { -- golang.org/x/structs@v1.0.0/go.mod -- module golang.org/x/structs -go 1.12 +go 1.21 -- golang.org/x/structs@v1.0.0/types.go -- package structs @@ -40,12 +40,9 @@ func printMixed(m Mixed) { -- go.mod -- module mod.com -go 1.12 +go 1.21 require golang.org/x/structs v1.0.0 --- go.sum -- -golang.org/x/structs v1.0.0 h1:Ito/a7hBYZaNKShFrZKjfBA/SIPvmBrcPCBWPx5QeKk= -golang.org/x/structs v1.0.0/go.mod h1:47gkSIdo5AaQaWJS0upVORsxfEr1LL1MWv9dmYF3iq4= -- main.go -- package main @@ -60,6 +57,7 @@ func main() { // TODO: use a nested workspace folder here. WithOptions( ProxyFiles(proxy), + WriteGoSum("."), ).Run(t, mod, func(t *testing.T, env *Env) { env.OpenFile("main.go") mixedLoc := env.RegexpSearch("main.go", "Mixed") diff --git a/gopls/internal/test/integration/misc/imports_test.go b/gopls/internal/test/integration/misc/imports_test.go index 5b8b020124d..98a70478ecf 100644 --- a/gopls/internal/test/integration/misc/imports_test.go +++ b/gopls/internal/test/integration/misc/imports_test.go @@ -8,11 +8,14 @@ import ( "os" "os/exec" "path/filepath" + "runtime" "strings" "testing" + "golang.org/x/tools/gopls/internal/settings" "golang.org/x/tools/gopls/internal/test/compare" . "golang.org/x/tools/gopls/internal/test/integration" + "golang.org/x/tools/gopls/internal/test/integration/fake" "golang.org/x/tools/gopls/internal/protocol" ) @@ -246,6 +249,158 @@ var _, _ = x.X, y.Y }) } +// make sure it gets the v2 +/* marker test? + +Add proxy data with the special proxy/ prefix (see gopls/internal/test/marker/testdata/quickfix/unusedrequire.txt). +Invoke the organizeImports codeaction directly (see gopls/internal/test/marker/testdata/codeaction/imports.txt, but use the edit=golden named argument instead of result= to minimize the size of the golden output. +*/ +func Test58382(t *testing.T) { + files := `-- main.go -- +package main +import "fmt" +func main() { + fmt.Println(xurls.Relaxed().FindAllString()) +} +-- go.mod -- +module demo +go 1.20 +` + cache := `-- mvdan.cc/xurls@v2.5.0/xurls.go -- +package xurls +func Relaxed() *regexp.Regexp { +return nil +} +-- github.com/mvdan/xurls/v2@v1.1.0/xurls.go -- +package xurls +func Relaxed() *regexp.Regexp { +return nil +} +` + modcache := t.TempDir() + defer cleanModCache(t, modcache) + mx := fake.UnpackTxt(cache) + for k, v := range mx { + fname := filepath.Join(modcache, k) + dir := filepath.Dir(fname) + os.MkdirAll(dir, 0777) + if err := os.WriteFile(fname, v, 0644); err != nil { + t.Fatal(err) + } + } + WithOptions( + EnvVars{"GOMODCACHE": modcache}, + WriteGoSum("."), + Settings{"importsSource": settings.ImportsSourceGopls}, + ).Run(t, files, func(t *testing.T, env *Env) { + + env.OpenFile("main.go") + env.SaveBuffer("main.go") + out := env.BufferText("main.go") + if !strings.Contains(out, "xurls/v2") { + t.Errorf("did not get v2 in %q", out) + } + }) +} + +// get the version requested in the go.mod file, not /v2 +func Test61208(t *testing.T) { + files := `-- main.go -- +package main +import "fmt" +func main() { + fmt.Println(xurls.Relaxed().FindAllString()) +} +-- go.mod -- +module demo +go 1.20 +require github.com/mvdan/xurls v1.1.0 +` + cache := `-- mvdan.cc/xurls/v2@v2.5.0/a/xurls.go -- +package xurls +func Relaxed() *regexp.Regexp { +return nil +} +-- github.com/mvdan/xurls@v1.1.0/a/xurls.go -- +package xurls +func Relaxed() *regexp.Regexp { +return nil +} +` + modcache := t.TempDir() + defer cleanModCache(t, modcache) + mx := fake.UnpackTxt(cache) + for k, v := range mx { + fname := filepath.Join(modcache, k) + dir := filepath.Dir(fname) + os.MkdirAll(dir, 0777) + if err := os.WriteFile(fname, v, 0644); err != nil { + t.Fatal(err) + } + } + WithOptions( + EnvVars{"GOMODCACHE": modcache}, + WriteGoSum("."), + ).Run(t, files, func(t *testing.T, env *Env) { + env.OpenFile("main.go") + env.SaveBuffer("main.go") + out := env.BufferText("main.go") + if !strings.Contains(out, "github.com/mvdan/xurls") { + t.Errorf("did not get github.com/mvdan/xurls in %q", out) + } + }) +} + +// get the version already used in the module +func Test60663(t *testing.T) { + files := `-- main.go -- +package main +import "fmt" +func main() { + fmt.Println(xurls.Relaxed().FindAllString()) +} +-- go.mod -- +module demo +go 1.20 +-- a.go -- +package main +import "github.com/mvdan/xurls" +var _ = xurls.Relaxed() +` + cache := `-- mvdan.cc/xurls/v2@v2.5.0/xurls.go -- +package xurls +func Relaxed() *regexp.Regexp { +return nil +} +-- github.com/mvdan/xurls@v1.1.0/xurls.go -- +package xurls +func Relaxed() *regexp.Regexp { +return nil +} +` + modcache := t.TempDir() + defer cleanModCache(t, modcache) + mx := fake.UnpackTxt(cache) + for k, v := range mx { + fname := filepath.Join(modcache, k) + dir := filepath.Dir(fname) + os.MkdirAll(dir, 0777) + if err := os.WriteFile(fname, v, 0644); err != nil { + t.Fatal(err) + } + } + WithOptions( + EnvVars{"GOMODCACHE": modcache}, + WriteGoSum("."), + ).Run(t, files, func(t *testing.T, env *Env) { + env.OpenFile("main.go") + env.SaveBuffer("main.go") + out := env.BufferText("main.go") + if !strings.Contains(out, "github.com/mvdan/xurls") { + t.Errorf("did not get github.com/mvdan/xurls in %q", out) + } + }) +} func TestRelativeReplace(t *testing.T) { const files = ` -- go.mod -- @@ -342,6 +497,42 @@ func TestA(t *testing.T) { }) } +// Test of golang/go#70755 +func TestQuickFixIssue70755(t *testing.T) { + const files = ` +-- go.mod -- +module mod.com +go 1.19.0 // with go 1.23.0 this fails on some builders +-- bar/bar.go -- +package notbar +type NotBar struct {} +-- baz/baz.go -- +package baz +type Baz struct {} +-- foo/foo.go -- +package foo +type foo struct { + bar notbar.NotBar + baz baz.Baz +}` + WithOptions( + Settings{"importsSource": settings.ImportsSourceGopls}). + Run(t, files, func(t *testing.T, env *Env) { + env.OpenFile("foo/foo.go") + var d protocol.PublishDiagnosticsParams + env.AfterChange(ReadDiagnostics("foo/foo.go", &d)) + env.ApplyQuickFixes("foo/foo.go", d.Diagnostics) + // at this point 'import notbar "mod.com/bar"' has been added + // but it's still missing the import of "mod.com/baz" + y := env.BufferText("foo/foo.go") + if !strings.Contains(y, `notbar "mod.com/bar"`) { + t.Error("quick fix did not find notbar") + } + env.SaveBuffer("foo/foo.go") + env.AfterChange(NoDiagnostics(ForFile("foo/foo.go"))) + }) +} + // Test for golang/go#52784 func TestGoWorkImports(t *testing.T) { const pkg = ` @@ -386,3 +577,114 @@ func Test() { env.AfterChange(NoDiagnostics(ForFile("caller/caller.go"))) }) } + +// prefer the undeprecated alternative 70736 +func TestDeprecated70736(t *testing.T) { + t.Logf("GOOS %s, GARCH %s version %s", runtime.GOOS, runtime.GOARCH, runtime.Version()) + files := `-- main.go -- +package main +func main() { + var v = xurls.Relaxed().FindAllString() + var w = xurls.A +} +-- go.mod -- +module demo +go 1.20 +` + cache := `-- mvdan.cc/xurls/v2@v2.5.0/xurls.go -- +package xurls +// Deprecated: +func Relaxed() *regexp.Regexp { +return nil +} +var A int +-- github.com/mvdan/xurls@v1.1.0/xurls.go -- +package xurls +func Relaxed() *regexp.Regexp { +return nil +} +var A int +` + modcache := t.TempDir() + defer cleanModCache(t, modcache) + mx := fake.UnpackTxt(cache) + for k, v := range mx { + fname := filepath.Join(modcache, k) + dir := filepath.Dir(fname) + os.MkdirAll(dir, 0777) + if err := os.WriteFile(fname, v, 0644); err != nil { + t.Fatal(err) + } + } + WithOptions( + EnvVars{"GOMODCACHE": modcache}, + WriteGoSum("."), + Settings{"importsSource": settings.ImportsSourceGopls}, + ).Run(t, files, func(t *testing.T, env *Env) { + env.OpenFile("main.go") + env.SaveBuffer("main.go") + out := env.BufferText("main.go") + if strings.Contains(out, "xurls/v2") { + t.Errorf("chose deprecated v2 in %q", out) + } + }) +} + +// Find the non-test package asked for in a test +func TestTestImports(t *testing.T) { + const pkg = ` +-- go.work -- +go 1.19 + +use ( + ./caller + ./mod + ./xxx +) +-- caller/go.mod -- +module caller.com + +go 1.18 + +require mod.com v0.0.0 +require xxx.com v0.0.0 + +replace mod.com => ../mod +replace xxx.com => ../xxx +-- caller/caller_test.go -- +package main + +var _ = a.Test +-- xxx/go.mod -- +module xxx.com + +go 1.18 +-- xxx/a/a_test.go -- +package a + +func Test() { +} +-- mod/go.mod -- +module mod.com + +go 1.18 +-- mod/a/a.go -- +package a + +func Test() { +} +` + WithOptions(Modes(Default)).Run(t, pkg, func(t *testing.T, env *Env) { + env.OpenFile("caller/caller_test.go") + env.AfterChange(Diagnostics(env.AtRegexp("caller/caller_test.go", "a.Test"))) + + // Saving caller_test.go should trigger goimports, which should find a.Test in + // the mod.com module, thanks to the go.work file. + env.SaveBuffer("caller/caller_test.go") + env.AfterChange(NoDiagnostics(ForFile("caller/caller_test.go"))) + buf := env.BufferText("caller/caller_test.go") + if !strings.Contains(buf, "mod.com/a") { + t.Errorf("got %q, expected a mod.com/a", buf) + } + }) +} diff --git a/gopls/internal/test/integration/misc/package_symbols_test.go b/gopls/internal/test/integration/misc/package_symbols_test.go new file mode 100644 index 00000000000..860264f2bb0 --- /dev/null +++ b/gopls/internal/test/integration/misc/package_symbols_test.go @@ -0,0 +1,100 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package misc + +import ( + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/google/go-cmp/cmp/cmpopts" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/protocol/command" + "golang.org/x/tools/gopls/internal/test/integration" +) + +func TestPackageSymbols(t *testing.T) { + const files = ` +-- a.go -- +package a + +var A = "var" +type S struct{} + +func (s *S) M1() {} +-- b.go -- +package a + +var b = 1 + +func (s *S) M2() {} + +func (s *S) M3() {} + +func F() {} +` + integration.Run(t, files, func(t *testing.T, env *integration.Env) { + a_uri := env.Sandbox.Workdir.URI("a.go") + b_uri := env.Sandbox.Workdir.URI("b.go") + args, err := command.MarshalArgs(command.PackageSymbolsArgs{ + URI: a_uri, + }) + if err != nil { + t.Fatalf("failed to MarshalArgs: %v", err) + } + + var res command.PackageSymbolsResult + env.ExecuteCommand(&protocol.ExecuteCommandParams{ + Command: "gopls.package_symbols", + Arguments: args, + }, &res) + + want := command.PackageSymbolsResult{ + PackageName: "a", + Files: []protocol.DocumentURI{a_uri, b_uri}, + Symbols: []command.PackageSymbol{ + { + Name: "A", + Kind: protocol.Variable, + File: 0, + }, + { + Name: "F", + Kind: protocol.Function, + File: 1, + }, + { + Name: "S", + Kind: protocol.Struct, + File: 0, + Children: []command.PackageSymbol{ + { + Name: "M1", + Kind: protocol.Method, + File: 0, + }, + { + Name: "M2", + Kind: protocol.Method, + File: 1, + }, + { + Name: "M3", + Kind: protocol.Method, + File: 1, + }, + }, + }, + { + Name: "b", + Kind: protocol.Variable, + File: 1, + }, + }, + } + if diff := cmp.Diff(want, res, cmpopts.IgnoreFields(command.PackageSymbol{}, "Range", "SelectionRange", "Detail")); diff != "" { + t.Errorf("gopls.package_symbols returned unexpected diff (-want +got):\n%s", diff) + } + }) +} diff --git a/gopls/internal/test/integration/misc/prompt_test.go b/gopls/internal/test/integration/misc/prompt_test.go index 9e87bd9ba36..37cd654b08d 100644 --- a/gopls/internal/test/integration/misc/prompt_test.go +++ b/gopls/internal/test/integration/misc/prompt_test.go @@ -276,6 +276,19 @@ func main() { allCounters = []string{acceptanceCounter, declinedCounter, attempt1Counter} ) + // To avoid (but not prevent) the flakes encountered in golang/go#68659, we + // need to perform our first read before starting to increment counters. + // + // ReadCounter checks to see if the counter file needs to be rotated before + // reading. When files are rotated, all previous counts are lost. Calling + // ReadCounter here reduces the window for a flake due to this rotation (the + // file was originally was located during countertest.Open in TestMain). + // + // golang/go#71590 tracks the larger problems with the countertest library. + // + // (The counter name below is arbitrary.) + _, _ = countertest.ReadCounter(counter.New("issue68659")) + // We must increment counters in order for the initial reads below to // succeed. // diff --git a/gopls/internal/test/integration/misc/references_test.go b/gopls/internal/test/integration/misc/references_test.go index 73e4fffe3b8..e84dcd71dc3 100644 --- a/gopls/internal/test/integration/misc/references_test.go +++ b/gopls/internal/test/integration/misc/references_test.go @@ -376,10 +376,6 @@ module example.com/a go 1.14 require other.com/b v1.0.0 --- go.sum -- -other.com/b v1.0.0 h1:9WyCKS+BLAMRQM0CegP6zqP2beP+ShTbPaARpNY31II= -other.com/b v1.0.0/go.mod h1:TgHQFucl04oGT+vrUm/liAzukYHNxCwKNkQZEyn3m9g= - -- a.go -- package a import "other.com/b" @@ -388,6 +384,7 @@ var _ b.B ` WithOptions( + WriteGoSum("."), ProxyFiles(proxy), Modes(Default), // fails in 'experimental' mode ).Run(t, src, func(t *testing.T, env *Env) { diff --git a/gopls/internal/test/integration/misc/vendor_test.go b/gopls/internal/test/integration/misc/vendor_test.go index f3bed9082b7..6606772737e 100644 --- a/gopls/internal/test/integration/misc/vendor_test.go +++ b/gopls/internal/test/integration/misc/vendor_test.go @@ -31,9 +31,6 @@ module mod.com go 1.14 require golang.org/x/hello v1.2.3 --- go.sum -- -golang.org/x/hello v1.2.3 h1:EcMp5gSkIhaTkPXp8/3+VH+IFqTpk3ZbpOhqk0Ncmho= -golang.org/x/hello v1.2.3/go.mod h1:WW7ER2MRNXWA6c8/4bDIek4Hc/+DofTrMaQQitGXcco= -- vendor/modules.txt -- -- a/a1.go -- package a @@ -48,6 +45,7 @@ func _() { WithOptions( Modes(Default), ProxyFiles(basicProxy), + WriteGoSum("."), ).Run(t, pkgThatUsesVendoring, func(t *testing.T, env *Env) { env.OpenFile("a/a1.go") d := &protocol.PublishDiagnosticsParams{} @@ -71,9 +69,6 @@ module mod.com go 1.14 require golang.org/x/hello v1.2.3 --- go.sum -- -golang.org/x/hello v1.2.3 h1:EcMp5gSkIhaTkPXp8/3+VH+IFqTpk3ZbpOhqk0Ncmho= -golang.org/x/hello v1.2.3/go.mod h1:WW7ER2MRNXWA6c8/4bDIek4Hc/+DofTrMaQQitGXcco= -- main.go -- package main @@ -86,6 +81,7 @@ func main() { WithOptions( Modes(Default), ProxyFiles(basicProxy), + WriteGoSum("."), ).Run(t, src, func(t *testing.T, env *Env) { env.OpenFile("main.go") env.AfterChange(NoDiagnostics()) diff --git a/gopls/internal/test/integration/misc/vuln_test.go b/gopls/internal/test/integration/misc/vuln_test.go index 9f6061c43d9..9dad13179af 100644 --- a/gopls/internal/test/integration/misc/vuln_test.go +++ b/gopls/internal/test/integration/misc/vuln_test.go @@ -368,13 +368,6 @@ require ( golang.org/amod v1.0.0 // indirect golang.org/bmod v0.5.0 // indirect ) --- go.sum -- -golang.org/amod v1.0.0 h1:EUQOI2m5NhQZijXZf8WimSnnWubaFNrrKUH/PopTN8k= -golang.org/amod v1.0.0/go.mod h1:yvny5/2OtYFomKt8ax+WJGvN6pfN1pqjGnn7DQLUi6E= -golang.org/bmod v0.5.0 h1:KgvUulMyMiYRB7suKA0x+DfWRVdeyPgVJvcishTH+ng= -golang.org/bmod v0.5.0/go.mod h1:f6o+OhF66nz/0BBc/sbCsshyPRKMSxZIlG50B/bsM4c= -golang.org/cmod v1.1.3 h1:PJ7rZFTk7xGAunBRDa0wDe7rZjZ9R/vr1S2QkVVCngQ= -golang.org/cmod v1.1.3/go.mod h1:eCR8dnmvLYQomdeAZRCPgS5JJihXtqOQrpEkNj5feQA= -- x/x.go -- package x @@ -497,7 +490,7 @@ func vulnTestEnv(proxyData string) (*vulntest.DB, []RunOption, error) { "_GOPLS_TEST_BINARY_RUN_AS_GOPLS": "true", // needed to run `gopls vulncheck`. "GOSUMDB": "off", } - return db, []RunOption{ProxyFiles(proxyData), ev, settings}, nil + return db, []RunOption{ProxyFiles(proxyData), ev, settings, WriteGoSum(".")}, nil } func TestRunVulncheckPackageDiagnostics(t *testing.T) { @@ -675,7 +668,7 @@ func TestRunGovulncheck_Expiry(t *testing.T) { }) } -func stringify(a interface{}) string { +func stringify(a any) string { data, _ := json.Marshal(a) return string(data) } @@ -814,9 +807,6 @@ go 1.18 require golang.org/bmod v0.5.0 --- go.sum -- -golang.org/bmod v0.5.0 h1:MT/ysNRGbCiURc5qThRFWaZ5+rK3pQRPo9w7dYZfMDk= -golang.org/bmod v0.5.0/go.mod h1:k+zl+Ucu4yLIjndMIuWzD/MnOHy06wqr3rD++y0abVs= -- x/x.go -- package x diff --git a/gopls/internal/test/integration/modfile/modfile_test.go b/gopls/internal/test/integration/modfile/modfile_test.go index 243bb04e960..5a194246a42 100644 --- a/gopls/internal/test/integration/modfile/modfile_test.go +++ b/gopls/internal/test/integration/modfile/modfile_test.go @@ -808,7 +808,6 @@ go 1.12 require ( example.com v1.2.3 ) --- go.sum -- -- main.go -- package main @@ -918,11 +917,6 @@ module mod.com go 1.12 require hasdep.com v1.2.3 --- go.sum -- -example.com v1.2.3 h1:ihBTGWGjTU3V4ZJ9OmHITkU9WQ4lGdQkMjgyLFk0FaY= -example.com v1.2.3/go.mod h1:Y2Rc5rVWjWur0h3pd9aEvK5Pof8YKDANh9gHA2Maujo= -hasdep.com v1.2.3 h1:00y+N5oD+SpKoqV1zP2VOPawcW65Zb9NebANY3GSzGI= -hasdep.com v1.2.3/go.mod h1:ePVZOlez+KZEOejfLPGL2n4i8qiAjrkhQZ4wcImqAes= -- main.go -- package main @@ -957,19 +951,13 @@ go 1.12 require hasdep.com v1.2.3 require random.com v1.2.3 --- go.sum -- -example.com v1.2.3 h1:ihBTGWGjTU3V4ZJ9OmHITkU9WQ4lGdQkMjgyLFk0FaY= -example.com v1.2.3/go.mod h1:Y2Rc5rVWjWur0h3pd9aEvK5Pof8YKDANh9gHA2Maujo= -hasdep.com v1.2.3 h1:00y+N5oD+SpKoqV1zP2VOPawcW65Zb9NebANY3GSzGI= -hasdep.com v1.2.3/go.mod h1:ePVZOlez+KZEOejfLPGL2n4i8qiAjrkhQZ4wcImqAes= -random.com v1.2.3 h1:PzYTykzqqH6+qU0dIgh9iPFbfb4Mm8zNBjWWreRKtx0= -random.com v1.2.3/go.mod h1:8EGj+8a4Hw1clAp8vbaeHAsKE4sbm536FP7nKyXO+qQ= -- main.go -- package main func main() {} ` WithOptions( + WriteGoSum("."), ProxyFiles(proxy), ).Run(t, mod, func(t *testing.T, env *Env) { d := &protocol.PublishDiagnosticsParams{} @@ -1010,7 +998,6 @@ go 1.12 require ( example.com v1.2.3 ) --- go.sum -- -- main.go -- package main @@ -1078,7 +1065,6 @@ func Goodbye() { module mod.com go 1.12 --- go.sum -- -- main.go -- package main diff --git a/gopls/internal/test/integration/watch/setting_test.go b/gopls/internal/test/integration/watch/setting_test.go index abd9799c584..2a825a5b937 100644 --- a/gopls/internal/test/integration/watch/setting_test.go +++ b/gopls/internal/test/integration/watch/setting_test.go @@ -60,7 +60,7 @@ package subdir // use (true|false) or some other truthy value. func TestSubdirWatchPatterns_BadValues(t *testing.T) { tests := []struct { - badValue interface{} + badValue any wantMessage string }{ {true, "invalid type bool (want string)"}, diff --git a/gopls/internal/test/integration/watch/watch_test.go b/gopls/internal/test/integration/watch/watch_test.go index 3fb1ab546a6..340ceb5ebf7 100644 --- a/gopls/internal/test/integration/watch/watch_test.go +++ b/gopls/internal/test/integration/watch/watch_test.go @@ -525,9 +525,6 @@ module mod.com go 1.12 require example.com v1.2.2 --- go.sum -- -example.com v1.2.3 h1:OnPPkx+rW63kj9pgILsu12MORKhSlnFa3DVRJq1HZ7g= -example.com v1.2.3/go.mod h1:Y2Rc5rVWjWur0h3pd9aEvK5Pof8YKDANh9gHA2Maujo= -- main.go -- package main @@ -537,7 +534,9 @@ func main() { blah.X() } ` - WithOptions(ProxyFiles(proxy)).Run(t, mod, func(t *testing.T, env *Env) { + WithOptions( + WriteGoSum("."), + ProxyFiles(proxy)).Run(t, mod, func(t *testing.T, env *Env) { env.WriteWorkspaceFiles(map[string]string{ "go.mod": `module mod.com diff --git a/gopls/internal/test/integration/workspace/metadata_test.go b/gopls/internal/test/integration/workspace/metadata_test.go index 59dfec3ad97..71ca4329777 100644 --- a/gopls/internal/test/integration/workspace/metadata_test.go +++ b/gopls/internal/test/integration/workspace/metadata_test.go @@ -217,9 +217,6 @@ module b.com/nested go 1.18 require b.com/other v1.4.6 --- go.sum -- -b.com/other v1.4.6 h1:pHXSzGsk6DamYXp9uRdDB9A/ZQqAN9it+JudU0sBf94= -b.com/other v1.4.6/go.mod h1:T0TYuGdAHw4p/l0+1P/yhhYHfZRia7PaadNVDu58OWM= -- nested.go -- package nested @@ -228,6 +225,7 @@ import "b.com/other/foo" const C = foo.Foo ` WithOptions( + WriteGoSum("."), ProxyFiles(proxy), ).Run(t, files, func(t *testing.T, env *Env) { env.OnceMet( diff --git a/gopls/internal/test/integration/workspace/packages_test.go b/gopls/internal/test/integration/workspace/packages_test.go index 7ee19bcca54..fdee21d822f 100644 --- a/gopls/internal/test/integration/workspace/packages_test.go +++ b/gopls/internal/test/integration/workspace/packages_test.go @@ -119,12 +119,14 @@ package foo import "testing" func Foo() func TestFoo2(t *testing.T) +func foo() -- foo_test.go -- package foo import "testing" func TestFoo(t *testing.T) func Issue70927(*error) +func Test_foo(t *testing.T) -- foo2_test.go -- package foo_test @@ -164,6 +166,7 @@ func Test(*testing.T) URI: env.Editor.DocumentURI("foo_test.go"), Tests: []command.TestCase{ {Name: "TestFoo"}, + {Name: "Test_foo"}, }, }, }, @@ -188,6 +191,7 @@ func Test(*testing.T) }, }, []string{ "func TestFoo(t *testing.T)", + "func Test_foo(t *testing.T)", "func TestBar(t *testing.T) {}", }) }) @@ -242,6 +246,7 @@ func Test(*testing.T) URI: env.Editor.DocumentURI("foo_test.go"), Tests: []command.TestCase{ {Name: "TestFoo"}, + {Name: "Test_foo"}, }, }, }, @@ -282,6 +287,7 @@ func Test(*testing.T) }, }, []string{ "func TestFoo(t *testing.T)", + "func Test_foo(t *testing.T)", "func TestBaz(*testing.T)", "func BenchmarkBaz(*testing.B)", "func FuzzBaz(*testing.F)", diff --git a/gopls/internal/test/integration/workspace/standalone_test.go b/gopls/internal/test/integration/workspace/standalone_test.go index d837899f7fb..3b690465744 100644 --- a/gopls/internal/test/integration/workspace/standalone_test.go +++ b/gopls/internal/test/integration/workspace/standalone_test.go @@ -194,7 +194,7 @@ func main() {} ) cfg := env.Editor.Config() - cfg.Settings = map[string]interface{}{ + cfg.Settings = map[string]any{ "standaloneTags": []string{"ignore"}, } env.ChangeConfiguration(cfg) diff --git a/gopls/internal/test/integration/workspace/vendor_test.go b/gopls/internal/test/integration/workspace/vendor_test.go index f14cf539de0..10826430164 100644 --- a/gopls/internal/test/integration/workspace/vendor_test.go +++ b/gopls/internal/test/integration/workspace/vendor_test.go @@ -36,10 +36,6 @@ module example.com/a go 1.14 require other.com/b v1.0.0 --- go.sum -- -other.com/b v1.0.0 h1:ct1+0RPozzMvA2rSYnVvIfr/GDHcd7oVnw147okdi3g= -other.com/b v1.0.0/go.mod h1:bfTSZo/4ZtAQJWBYScopwW6n9Ctfsl2mi8nXsqjDXR8= - -- a.go -- package a @@ -49,6 +45,7 @@ var _ b.B ` WithOptions( + WriteGoSum("."), ProxyFiles(proxy), Modes(Default), ).Run(t, src, func(t *testing.T, env *Env) { diff --git a/gopls/internal/test/integration/workspace/workspace_test.go b/gopls/internal/test/integration/workspace/workspace_test.go index 587ac522c41..00d4d81e021 100644 --- a/gopls/internal/test/integration/workspace/workspace_test.go +++ b/gopls/internal/test/integration/workspace/workspace_test.go @@ -309,9 +309,6 @@ module a.com require c.com v1.2.3 exclude b.com v1.2.3 --- go.sum -- -c.com v1.2.3 h1:n07Dz9fYmpNqvZMwZi5NEqFcSHbvLa9lacMX+/g25tw= -c.com v1.2.3/go.mod h1:/4TyYgU9Nu5tA4NymP5xyqE8R2VMzGD3TbJCwCOvHAg= -- main.go -- package a @@ -320,6 +317,7 @@ func main() { } ` WithOptions( + WriteGoSum("."), ProxyFiles(proxy), ).Run(t, files, func(t *testing.T, env *Env) { env.OnceMet( diff --git a/gopls/internal/test/marker/doc.go b/gopls/internal/test/marker/doc.go index abddbddacd3..dff8dfa109f 100644 --- a/gopls/internal/test/marker/doc.go +++ b/gopls/internal/test/marker/doc.go @@ -120,7 +120,7 @@ Here is the list of supported value markers: argument may be specified only as a string or regular expression in the first pass. - - defloc(name, location): performs a textDocument/defintiion request at the + - defloc(name, location): performs a textDocument/definition request at the src location, and binds the result to the given name. This may be used to refer to positions in the standard library. diff --git a/gopls/internal/test/marker/marker_test.go b/gopls/internal/test/marker/marker_test.go index 654bca4ae5b..516dfeb3881 100644 --- a/gopls/internal/test/marker/marker_test.go +++ b/gopls/internal/test/marker/marker_test.go @@ -2223,6 +2223,18 @@ func codeAction(env *integration.Env, uri protocol.DocumentURI, rng protocol.Ran // specified location and kind, and captures the resulting document changes. // If diag is non-nil, it is used as the code action context. func codeActionChanges(env *integration.Env, uri protocol.DocumentURI, rng protocol.Range, kind protocol.CodeActionKind, diag *protocol.Diagnostic) ([]protocol.DocumentChange, error) { + // Collect any server-initiated changes created by workspace/applyEdit. + // + // We set up this handler immediately, not right before executing the code + // action command, so we can assert that neither the codeAction request nor + // codeAction resolve request cause edits as a side effect (golang/go#71405). + var changes []protocol.DocumentChange + restore := env.Editor.Client().SetApplyEditHandler(func(ctx context.Context, wsedit *protocol.WorkspaceEdit) error { + changes = append(changes, wsedit.DocumentChanges...) + return nil + }) + defer restore() + // Request all code actions that apply to the diagnostic. // A production client would set Only=[kind], // but we can give a better error if we don't filter. @@ -2312,14 +2324,6 @@ func codeActionChanges(env *integration.Env, uri protocol.DocumentURI, rng proto // whose WorkspaceEditFunc hook temporarily gathers the edits // instead of applying them. - var changes []protocol.DocumentChange - cli := env.Editor.Client() - restore := cli.SetApplyEditHandler(func(ctx context.Context, wsedit *protocol.WorkspaceEdit) error { - changes = append(changes, wsedit.DocumentChanges...) - return nil - }) - defer restore() - if _, err := env.Editor.Server.ExecuteCommand(env.Ctx, &protocol.ExecuteCommandParams{ Command: action.Command.Command, Arguments: action.Command.Arguments, diff --git a/gopls/internal/test/marker/testdata/codeaction/extract_variable.txt b/gopls/internal/test/marker/testdata/codeaction/extract_variable.txt index 9dd0f766e05..0fba1afe003 100644 --- a/gopls/internal/test/marker/testdata/codeaction/extract_variable.txt +++ b/gopls/internal/test/marker/testdata/codeaction/extract_variable.txt @@ -1,6 +1,16 @@ This test checks the behavior of the 'extract variable/constant' code action. See extract_variable_resolve.txt for the same test with resolve support. +-- capabilities.json -- +{ + "textDocument": { + "codeAction": { + "dataSupport": false, + "resolveSupport": {} + } + } +} + -- flags -- -ignore_extra_diags diff --git a/gopls/internal/test/marker/testdata/codeaction/extract_variable_all.txt b/gopls/internal/test/marker/testdata/codeaction/extract_variable_all.txt index 050f29bfec7..5916c0696cc 100644 --- a/gopls/internal/test/marker/testdata/codeaction/extract_variable_all.txt +++ b/gopls/internal/test/marker/testdata/codeaction/extract_variable_all.txt @@ -1,5 +1,15 @@ This test checks the behavior of the 'replace all occurrences of expression' code action, with resolve support. -See extract_expressions.txt for the same test without resolve support. +See extract_variable_all_resolve.txt for the same test with resolve support. + +-- capabilities.json -- +{ + "textDocument": { + "codeAction": { + "dataSupport": false, + "resolveSupport": {} + } + } +} -- flags -- -ignore_extra_diags diff --git a/gopls/internal/test/marker/testdata/codeaction/extract_variable_all_resolve.txt b/gopls/internal/test/marker/testdata/codeaction/extract_variable_all_resolve.txt index 02c03929567..8f6544f19df 100644 --- a/gopls/internal/test/marker/testdata/codeaction/extract_variable_all_resolve.txt +++ b/gopls/internal/test/marker/testdata/codeaction/extract_variable_all_resolve.txt @@ -1,5 +1,5 @@ This test checks the behavior of the 'replace all occurrences of expression' code action, with resolve support. -See extract_expressions.txt for the same test without resolve support. +See extract_variable_all.txt for the same test without resolve support. -- capabilities.json -- { diff --git a/gopls/internal/test/marker/testdata/codeaction/extract_variable_resolve.txt b/gopls/internal/test/marker/testdata/codeaction/extract_variable_resolve.txt index 203b6d1eadc..819717897ab 100644 --- a/gopls/internal/test/marker/testdata/codeaction/extract_variable_resolve.txt +++ b/gopls/internal/test/marker/testdata/codeaction/extract_variable_resolve.txt @@ -1,17 +1,6 @@ This test checks the behavior of the 'extract variable/constant' code action, with resolve support. See extract_variable.txt for the same test without resolve support. --- capabilities.json -- -{ - "textDocument": { - "codeAction": { - "dataSupport": true, - "resolveSupport": { - "properties": ["edit"] - } - } - } -} -- flags -- -ignore_extra_diags diff --git a/gopls/internal/test/marker/testdata/codeaction/fill_struct.txt b/gopls/internal/test/marker/testdata/codeaction/fill_struct.txt index 600119dad8e..5a50978ad5e 100644 --- a/gopls/internal/test/marker/testdata/codeaction/fill_struct.txt +++ b/gopls/internal/test/marker/testdata/codeaction/fill_struct.txt @@ -1,6 +1,16 @@ This test checks the behavior of the 'fill struct' code action. See fill_struct_resolve.txt for same test with resolve support. +-- capabilities.json -- +{ + "textDocument": { + "codeAction": { + "dataSupport": false, + "resolveSupport": {} + } + } +} + -- flags -- -ignore_extra_diags @@ -364,12 +374,15 @@ func fill() { _ := StructAnon{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct_anon) } -- @fillStruct_anon/fillStruct_anon.go -- -@@ -13 +13,5 @@ +@@ -13 +13,8 @@ - _ := StructAnon{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct_anon) + _ := StructAnon{ + a: struct{}{}, + b: map[string]any{}, -+ c: map[string]struct{d int; e bool}{}, ++ c: map[string]struct { ++ d int ++ e bool ++ }{}, + } //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct_anon) -- fillStruct_nested.go -- package fillstruct @@ -457,13 +470,8 @@ func fill() { + UnfilledInt: 0, + StructPartialB: StructPartialB{}, -- @fillStruct_partial2/fillStruct_partial.go -- -@@ -19,4 +19,2 @@ -- /* this comment should disappear */ -- PrefilledInt: 7, // This comment should be blown away. -- /* As should -- this one */ -+ PrefilledInt: 7, -+ UnfilledInt: 0, +@@ -23 +23 @@ ++ UnfilledInt: 0, -- fillStruct_spaces.go -- package fillstruct @@ -566,7 +574,7 @@ func _[T any]() { + bar: 0, +} //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams2) -- @typeparams3/typeparams.go -- -@@ -21 +21 @@ +@@ -22 +22 @@ + foo: 0, -- @typeparams4/typeparams.go -- @@ -29 +29,4 @@ @@ -723,3 +731,33 @@ func _() { + aliasArray: aliasArray{}, + aliasNamed: aliasNamed{}, + } //@codeaction("}", "refactor.rewrite.fillStruct", edit=alias) +-- preserveformat/preserveformat.go -- +package preserveformat + +type ( + Node struct { + Value int + } + Graph struct { + Nodes []*Node `json:""` + Edges map[*Node]*Node + Other string + } +) + +func _() { + _ := &Graph{ + // comments at the start preserved + Nodes: []*Node{ + {Value: 0}, // comments in the middle preserved + // between lines + {Value: 0}, + }, // another comment + // comment group + // below + } //@codeaction("}", "refactor.rewrite.fillStruct", edit=preserveformat) +} +-- @preserveformat/preserveformat/preserveformat.go -- +@@ -24 +24,2 @@ ++ Edges: map[*Node]*Node{}, ++ Other: "", diff --git a/gopls/internal/test/marker/testdata/codeaction/fill_struct_resolve.txt b/gopls/internal/test/marker/testdata/codeaction/fill_struct_resolve.txt index 6d1250e26aa..9c1f8f728ca 100644 --- a/gopls/internal/test/marker/testdata/codeaction/fill_struct_resolve.txt +++ b/gopls/internal/test/marker/testdata/codeaction/fill_struct_resolve.txt @@ -1,17 +1,6 @@ This test checks the behavior of the 'fill struct' code action, with resolve support. See fill_struct.txt for same test without resolve support. --- capabilities.json -- -{ - "textDocument": { - "codeAction": { - "dataSupport": true, - "resolveSupport": { - "properties": ["edit"] - } - } - } -} -- flags -- -ignore_extra_diags @@ -373,12 +362,15 @@ func fill() { _ := StructAnon{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct_anon) } -- @fillStruct_anon/fillStruct_anon.go -- -@@ -13 +13,5 @@ +@@ -13 +13,8 @@ - _ := StructAnon{} //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct_anon) + _ := StructAnon{ + a: struct{}{}, + b: map[string]any{}, -+ c: map[string]struct{d int; e bool}{}, ++ c: map[string]struct { ++ d int ++ e bool ++ }{}, + } //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct_anon) -- fillStruct_nested.go -- package fillstruct @@ -452,8 +444,8 @@ func fill() { PrefilledInt: 5, } //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct_partial1) b := StructPartialB{ - /* this comment should disappear */ - PrefilledInt: 7, // This comment should be blown away. + /* this comment should be preserved */ + PrefilledInt: 7, // This comment should be preserved. /* As should this one */ } //@codeaction("}", "refactor.rewrite.fillStruct", edit=fillStruct_partial2) @@ -466,13 +458,8 @@ func fill() { + UnfilledInt: 0, + StructPartialB: StructPartialB{}, -- @fillStruct_partial2/fillStruct_partial.go -- -@@ -19,4 +19,2 @@ -- /* this comment should disappear */ -- PrefilledInt: 7, // This comment should be blown away. -- /* As should -- this one */ -+ PrefilledInt: 7, -+ UnfilledInt: 0, +@@ -23 +23 @@ ++ UnfilledInt: 0, -- fillStruct_spaces.go -- package fillstruct @@ -575,7 +562,7 @@ func _[T any]() { + bar: 0, +} //@codeaction("}", "refactor.rewrite.fillStruct", edit=typeparams2) -- @typeparams3/typeparams.go -- -@@ -21 +21 @@ +@@ -22 +22 @@ + foo: 0, -- @typeparams4/typeparams.go -- @@ -29 +29,4 @@ diff --git a/gopls/internal/test/marker/testdata/codeaction/fill_switch.txt b/gopls/internal/test/marker/testdata/codeaction/fill_switch.txt index 1912c92c19a..a92a895287f 100644 --- a/gopls/internal/test/marker/testdata/codeaction/fill_switch.txt +++ b/gopls/internal/test/marker/testdata/codeaction/fill_switch.txt @@ -1,6 +1,15 @@ This test checks the behavior of the 'fill switch' code action. See fill_switch_resolve.txt for same test with resolve support. +-- capabilities.json -- +{ + "textDocument": { + "codeAction": { + "dataSupport": false, + "resolveSupport": {} + } + } +} -- flags -- -ignore_extra_diags diff --git a/gopls/internal/test/marker/testdata/codeaction/fill_switch_resolve.txt b/gopls/internal/test/marker/testdata/codeaction/fill_switch_resolve.txt index c8380a7d6d6..39a7eae7779 100644 --- a/gopls/internal/test/marker/testdata/codeaction/fill_switch_resolve.txt +++ b/gopls/internal/test/marker/testdata/codeaction/fill_switch_resolve.txt @@ -1,17 +1,6 @@ This test checks the behavior of the 'fill switch' code action, with resolve support. See fill_switch.txt for same test without resolve support. --- capabilities.json -- -{ - "textDocument": { - "codeAction": { - "dataSupport": true, - "resolveSupport": { - "properties": ["edit"] - } - } - } -} -- flags -- -ignore_extra_diags diff --git a/gopls/internal/test/marker/testdata/codeaction/inline.txt b/gopls/internal/test/marker/testdata/codeaction/inline.txt index 4c2bf15c207..1871a303d2b 100644 --- a/gopls/internal/test/marker/testdata/codeaction/inline.txt +++ b/gopls/internal/test/marker/testdata/codeaction/inline.txt @@ -1,6 +1,16 @@ This is a minimal test of the refactor.inline.call code action, without resolve support. See inline_resolve.txt for same test with resolve support. +-- capabilities.json -- +{ + "textDocument": { + "codeAction": { + "dataSupport": false, + "resolveSupport": {} + } + } +} + -- go.mod -- module example.com/codeaction go 1.18 diff --git a/gopls/internal/test/marker/testdata/codeaction/inline_resolve.txt b/gopls/internal/test/marker/testdata/codeaction/inline_resolve.txt index c889ed8bba3..cf311838706 100644 --- a/gopls/internal/test/marker/testdata/codeaction/inline_resolve.txt +++ b/gopls/internal/test/marker/testdata/codeaction/inline_resolve.txt @@ -1,17 +1,6 @@ This is a minimal test of the refactor.inline.call code actions, with resolve support. See inline.txt for same test without resolve support. --- capabilities.json -- -{ - "textDocument": { - "codeAction": { - "dataSupport": true, - "resolveSupport": { - "properties": ["edit"] - } - } - } -} -- go.mod -- module example.com/codeaction go 1.18 diff --git a/gopls/internal/test/marker/testdata/codeaction/removeparam.txt b/gopls/internal/test/marker/testdata/codeaction/removeparam.txt index 8bebfc29c40..7ba21a6a876 100644 --- a/gopls/internal/test/marker/testdata/codeaction/removeparam.txt +++ b/gopls/internal/test/marker/testdata/codeaction/removeparam.txt @@ -1,6 +1,16 @@ This test exercises the refactoring to remove unused parameters. See removeparam_resolve.txt for same test with resolve support. +-- capabilities.json -- +{ + "textDocument": { + "codeAction": { + "dataSupport": false, + "resolveSupport": {} + } + } +} + -- go.mod -- module unused.mod diff --git a/gopls/internal/test/marker/testdata/codeaction/removeparam_resolve.txt b/gopls/internal/test/marker/testdata/codeaction/removeparam_resolve.txt index 3d92d758b13..a10251a87ee 100644 --- a/gopls/internal/test/marker/testdata/codeaction/removeparam_resolve.txt +++ b/gopls/internal/test/marker/testdata/codeaction/removeparam_resolve.txt @@ -1,17 +1,6 @@ This test exercises the refactoring to remove unused parameters, with resolve support. See removeparam.txt for same test without resolve support. --- capabilities.json -- -{ - "textDocument": { - "codeAction": { - "dataSupport": true, - "resolveSupport": { - "properties": ["edit"] - } - } - } -} -- go.mod -- module unused.mod diff --git a/gopls/internal/test/marker/testdata/completion/comment.txt b/gopls/internal/test/marker/testdata/completion/comment.txt index f66bfdab186..34ef242e2f9 100644 --- a/gopls/internal/test/marker/testdata/completion/comment.txt +++ b/gopls/internal/test/marker/testdata/completion/comment.txt @@ -79,3 +79,9 @@ func Multiline() int { //@item(multiline, "Multiline", "func() int", "func") // //@complete(" ", multiline) return 0 } + +// This test checks that gopls does not panic if the receiver is syntactically +// present but empty. +// +// //@complete(" ") +func () _() {} diff --git a/gopls/internal/test/marker/testdata/diagnostics/analyzers.txt b/gopls/internal/test/marker/testdata/diagnostics/analyzers.txt index 7ba338032e9..fb7876a0492 100644 --- a/gopls/internal/test/marker/testdata/diagnostics/analyzers.txt +++ b/gopls/internal/test/marker/testdata/diagnostics/analyzers.txt @@ -74,6 +74,14 @@ func _() { }() } +// inline +func _() { + f() //@diag("f", re"Call of analyzer.f should be inlined") +} + +//go:fix inline +func f() { fmt.Println(1) } + -- cgocall/cgocall.go -- package cgocall diff --git a/gopls/internal/test/marker/testdata/fixedbugs/issue71044.txt b/gopls/internal/test/marker/testdata/fixedbugs/issue71044.txt new file mode 100644 index 00000000000..4b0f2045343 --- /dev/null +++ b/gopls/internal/test/marker/testdata/fixedbugs/issue71044.txt @@ -0,0 +1,18 @@ +This test checks that we don't crash while completing receivers that may happen +to be builtin types (due to invalid code). This crash was reported by telemetry +in golang/go#71044. + +-- flags -- +-ignore_extra_diags + +-- go.mod -- +module example.com/amap + +go 1.18 + +-- a.go -- +package amap + +import "unsafe" + +func (unsafe.Pointer) _() {} //@ rank("unsafe") diff --git a/gopls/internal/test/marker/testdata/highlight/highlight_printf.txt b/gopls/internal/test/marker/testdata/highlight/highlight_printf.txt new file mode 100644 index 00000000000..5c9bc21f016 --- /dev/null +++ b/gopls/internal/test/marker/testdata/highlight/highlight_printf.txt @@ -0,0 +1,62 @@ + +This test checks functionality of the printf-like directives and operands highlight. +-- flags -- +-ignore_extra_diags +-- highlights.go -- +package highlightprintf +import ( + "fmt" +) + +func BasicPrintfHighlights() { + fmt.Printf("Hello %s, you have %d new messages!", "Alice", 5) //@hiloc(normals, "%s", write),hiloc(normalarg0, "\"Alice\"", read),highlightall(normals, normalarg0) + fmt.Printf("Hello %s, you have %d new messages!", "Alice", 5) //@hiloc(normald, "%d", write),hiloc(normalargs1, "5", read),highlightall(normald, normalargs1) +} + +func ComplexPrintfHighlights() { + fmt.Printf("Hello %#3.4s, you have %-2.3d new messages!", "Alice", 5) //@hiloc(complexs, "%#3.4s", write),hiloc(complexarg0, "\"Alice\"", read),highlightall(complexs, complexarg0) + fmt.Printf("Hello %#3.4s, you have %-2.3d new messages!", "Alice", 5) //@hiloc(complexd, "%-2.3d", write),hiloc(complexarg1, "5", read),highlightall(complexd, complexarg1) +} + +func MissingDirectives() { + fmt.Printf("Hello %s, you have 5 new messages!", "Alice", 5) //@hiloc(missings, "%s", write),hiloc(missingargs0, "\"Alice\"", read),highlightall(missings, missingargs0) +} + +func TooManyDirectives() { + fmt.Printf("Hello %s, you have %d new %s %q messages!", "Alice", 5) //@hiloc(toomanys, "%s", write),hiloc(toomanyargs0, "\"Alice\"", read),highlightall(toomanys, toomanyargs0) + fmt.Printf("Hello %s, you have %d new %s %q messages!", "Alice", 5) //@hiloc(toomanyd, "%d", write),hiloc(toomanyargs1, "5", read),highlightall(toomanyd, toomanyargs1) +} + +func VerbIsPercentage() { + fmt.Printf("%4.2% %d", 6) //@hiloc(z1, "%d", write),hiloc(z2, "6", read),highlightall(z1, z2) +} + +func SpecialChars() { + fmt.Printf("Hello \n %s, you \t \n have %d new messages!", "Alice", 5) //@hiloc(specials, "%s", write),hiloc(specialargs0, "\"Alice\"", read),highlightall(specials, specialargs0) + fmt.Printf("Hello \n %s, you \t \n have %d new messages!", "Alice", 5) //@hiloc(speciald, "%d", write),hiloc(specialargs1, "5", read),highlightall(speciald, specialargs1) +} + +func Escaped() { + fmt.Printf("Hello %% \n %s, you \t%% \n have %d new m%%essages!", "Alice", 5) //@hiloc(escapeds, "%s", write),hiloc(escapedargs0, "\"Alice\"", read),highlightall(escapeds, escapedargs0) + fmt.Printf("Hello %% \n %s, you \t%% \n have %d new m%%essages!", "Alice", 5) //@hiloc(escapedd, "%s", write),hiloc(escapedargs1, "\"Alice\"", read),highlightall(escapedd, escapedargs1) + fmt.Printf("%d \nss \x25[2]d", 234, 123) //@hiloc(zz1, "%d", write),hiloc(zz2, "234", read),highlightall(zz1,zz2) + fmt.Printf("%d \nss \x25[2]d", 234, 123) //@hiloc(zz3, "\\x25[2]d", write),hiloc(zz4, "123", read),highlightall(zz3,zz4) +} + +func Indexed() { + fmt.Printf("%[1]d", 3) //@hiloc(i1, "%[1]d", write),hiloc(i2, "3", read),highlightall(i1, i2) + fmt.Printf("%[1]*d", 3, 6) //@hiloc(i3, "[1]*", write),hiloc(i4, "3", read),hiloc(i5, "d", write),hiloc(i6, "6", read),highlightall(i3, i4),highlightall(i5, i6) + fmt.Printf("%[2]*[1]d", 3, 4) //@hiloc(i7, "[2]*", write),hiloc(i8, "4", read),hiloc(i9, "[1]d", write),hiloc(i10, "3", read),highlightall(i7, i8),highlightall(i9, i10) + fmt.Printf("%[2]*.[1]*[3]d", 4, 5, 6) //@hiloc(i11, "[2]*", write),hiloc(i12, "5", read),hiloc(i13, ".[1]*", write),hiloc(i14, "4", read),hiloc(i15, "[3]d", write),hiloc(i16, "6", read),highlightall(i11, i12),highlightall(i13, i14),highlightall(i15, i16) +} + +func MultipleIndexed() { + fmt.Printf("%[1]d %[1].2d", 3) //@hiloc(m1, "%[1]d", write),hiloc(m2, "3", read),hiloc(m3, "%[1].2d", write),highlightall(m1, m2, m3) +} + +// This test checks that gopls doesn't crash (index out of bounds) +// while haven't fill the last non-variadic argument. +func NoEffectOnUnfinishedArg() { + var s string //@hiloc(var, "s", write) + fmt.Fprintf(s) //@hiloc(firstArg, "s", read),highlightall(var, firstArg) +} diff --git a/gopls/internal/test/marker/testdata/quickfix/stubmethods/basic.txt b/gopls/internal/test/marker/testdata/quickfix/stubmethods/basic.txt index 96f992f8aaa..1ddee2cfe98 100644 --- a/gopls/internal/test/marker/testdata/quickfix/stubmethods/basic.txt +++ b/gopls/internal/test/marker/testdata/quickfix/stubmethods/basic.txt @@ -1,6 +1,16 @@ This test exercises basic 'stub methods' functionality. See basic_resolve.txt for the same test with resolve support. +-- capabilities.json -- +{ + "textDocument": { + "codeAction": { + "dataSupport": false, + "resolveSupport": {} + } + } +} + -- go.mod -- module example.com go 1.12 diff --git a/gopls/internal/test/marker/testdata/quickfix/stubmethods/basic_resolve.txt b/gopls/internal/test/marker/testdata/quickfix/stubmethods/basic_resolve.txt index 502cc40bb74..f3e3dfefb71 100644 --- a/gopls/internal/test/marker/testdata/quickfix/stubmethods/basic_resolve.txt +++ b/gopls/internal/test/marker/testdata/quickfix/stubmethods/basic_resolve.txt @@ -1,17 +1,6 @@ This test exercises basic 'stub methods' functionality, with resolve support. See basic.txt for the same test without resolve support. --- capabilities.json -- -{ - "textDocument": { - "codeAction": { - "dataSupport": true, - "resolveSupport": { - "properties": ["edit"] - } - } - } -} -- go.mod -- module example.com go 1.12 diff --git a/gopls/internal/test/marker/testdata/signature/signature.txt b/gopls/internal/test/marker/testdata/signature/signature.txt index 4f4064397c6..74f53a20e64 100644 --- a/gopls/internal/test/marker/testdata/signature/signature.txt +++ b/gopls/internal/test/marker/testdata/signature/signature.txt @@ -26,8 +26,16 @@ func Foo(a string, b int) (c bool) { func Bar(float64, ...byte) { } +func FooArr(a []int) { + +} + type myStruct struct{} +type Bar struct { + A, B, C, D string +} + func (*myStruct) foo(e *json.Decoder) (*big.Int, error) { return nil, nil } @@ -114,6 +122,14 @@ func Qux() { AliasSlice() //@signature(")", "AliasSlice(a []*Alias) (b Alias)", 0) AliasMap() //@signature(")", "AliasMap(a map[*Alias]StringAlias) (b map[*Alias]StringAlias, c map[*Alias]StringAlias)", 0) OtherAliasMap() //@signature(")", "OtherAliasMap(a map[Alias]OtherAlias, b map[Alias]OtherAlias) map[Alias]OtherAlias", 0) + + var l []Foo + l = append(l, Foo{ //@signature(",", "append(slice []Type, elems ...Type) []Type", 0) + A: "hello", //@signature(",", "", 0) + B: "world", //@signature(",", "", 0) + }) + + FooArr([]int{1, 2, 3, 4, 5}) //@signature("1", "", 0) } func Hello(func()) {} diff --git a/gopls/internal/test/marker/testdata/token/builtin_constant.txt b/gopls/internal/test/marker/testdata/token/builtin_constant.txt deleted file mode 100644 index 79736d625b7..00000000000 --- a/gopls/internal/test/marker/testdata/token/builtin_constant.txt +++ /dev/null @@ -1,21 +0,0 @@ -This test checks semanticTokens on builtin constants. -(test for #70219.) - --- settings.json -- -{ - "semanticTokens": true -} - --- flags -- --ignore_extra_diags - --- default_lib_const.go -- -package p - -func _() { - a, b := false, true //@ token("false", "variable", "readonly defaultLibrary"), token("true", "variable", "readonly defaultLibrary") -} - -const ( - c = iota //@ token("iota", "variable", "readonly defaultLibrary number") -) diff --git a/gopls/internal/test/marker/testdata/token/format.txt b/gopls/internal/test/marker/testdata/token/format.txt new file mode 100644 index 00000000000..c577cc666af --- /dev/null +++ b/gopls/internal/test/marker/testdata/token/format.txt @@ -0,0 +1,26 @@ +This test checks semanticTokens for format string placeholders. + +-- settings.json -- +{ + "semanticTokens": true +} + +-- flags -- +-ignore_extra_diags + +-- format.go -- +package format + +import "fmt" + +func PrintfTests() { + var i int + var x float64 + fmt.Printf("%b %d %f", 3, i, x) //@ token("%b", "string", "format"), token("%d", "string", "format"),token("%f", "string", "format"), + fmt.Printf("lit1%blit2%dlit3%flit4", 3, i, x) //@ token("%b", "string", "format"), token("%d", "string", "format"),token("%f", "string", "format"),token("lit1", "string", ""),token("lit2", "string", ""),token("lit3", "string", ""), + fmt.Printf("%% %d lit2", 3, i, x) //@ token("%d", "string", "format"),token("%%", "string", ""),token("lit2", "string", ""), + fmt.Printf("Hello %% \n %s, you \t%% \n have %d new m%%essages!", "Alice", 5) //@ token("%s", "string", "format"),token("%d", "string", "format") + fmt.Printf("%d \nss \x25[2]d", 234, 123) //@ token("%d", "string", "format"),token("\\x25[2]d", "string", "format") + fmt.Printf("start%[2]*.[1]*[3]dmiddle%send", 4, 5, 6) //@ token("%[2]*.[1]*[3]d", "string", "format"),token("start", "string", ""),token("%s", "string", "format"),token("middle", "string", ""),token("end", "string", "") +} + diff --git a/gopls/internal/test/marker/testdata/token/modifiers.txt b/gopls/internal/test/marker/testdata/token/modifiers.txt new file mode 100644 index 00000000000..86789e3b956 --- /dev/null +++ b/gopls/internal/test/marker/testdata/token/modifiers.txt @@ -0,0 +1,61 @@ +This test checks the output of semanticTokens modifiers. +(including test for #70219.) + +-- settings.json -- +{ + "semanticTokens": true +} + +-- flags -- +-ignore_extra_diags + +-- standard.go -- +package modifiers + +func _() { + a, b := false, true //@ token("false", "variable", "readonly defaultLibrary"), token("true", "variable", "readonly defaultLibrary") +} + +const ( + c = iota //@ token("iota", "variable", "readonly defaultLibrary number") +) + +-- custom.go -- +package modifiers + +type Foo struct{} + +func _() { + var array [2]string //@ token("array", "variable", "definition array") + array = [2]string{"", ""} //@ token("array", "variable", "array") + + var b bool //@ token("b", "variable", "definition bool") + b = true //@ token("b", "variable", "bool") + + var c chan string //@ token("c", "variable", "definition chan") + c = make(chan string) //@ token("c", "variable", "chan") + + type inter interface{} //@ token("inter", "type", "definition interface") + + var m map[string]string //@ token("m", "variable", "definition map") + m = make(map[string]string) //@ token("m", "variable", "map") + + var number int //@ token("number", "variable", "definition number") + number = 1 //@ token("number", "variable", "number") + + var ptr *Foo //@ token("ptr", "variable", "definition pointer") + ptr = nil //@ token("ptr", "variable", "pointer") + + var sig func(string) //@ token("sig", "variable", "definition signature") + sig = nil //@ token("sig", "variable", "signature") + + var slice []string //@ token("slice", "variable", "definition slice") + slice = nil //@ token("slice", "variable", "slice") + + var str string //@ token("str", "variable", "definition string") + str = "" //@ token("str", "variable", "string") + + var foo Foo //@ token("foo", "variable", "definition struct") + foo = Foo{} //@ token("foo", "variable", "struct") +} + diff --git a/gopls/internal/util/frob/frob.go b/gopls/internal/util/frob/frob.go index c297e2a1014..00ef7c7f95e 100644 --- a/gopls/internal/util/frob/frob.go +++ b/gopls/internal/util/frob/frob.go @@ -244,8 +244,8 @@ func (fr *frob) Decode(data []byte, ptr any) { panic(fmt.Sprintf("got %v, want %v", rv.Type(), fr.t)) } rd := &reader{data} - if string(rd.bytes(4)) != magic { - panic("not a frob-encoded message") + if len(data) < len(magic) || string(rd.bytes(len(magic))) != magic { + panic("not a frob-encoded message") // (likely an empty message) } fr.decode(rd, rv) if len(rd.data) > 0 { diff --git a/gopls/internal/util/moreiters/iters.go b/gopls/internal/util/moreiters/iters.go new file mode 100644 index 00000000000..e4d83ae8618 --- /dev/null +++ b/gopls/internal/util/moreiters/iters.go @@ -0,0 +1,16 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package moreiters + +import "iter" + +// First returns the first value of seq and true. +// If seq is empty, it returns the zero value of T and false. +func First[T any](seq iter.Seq[T]) (z T, ok bool) { + for t := range seq { + return t, true + } + return z, false +} diff --git a/gopls/internal/util/moremaps/maps.go b/gopls/internal/util/moremaps/maps.go index 00dd1e4210b..e25627d67b5 100644 --- a/gopls/internal/util/moremaps/maps.go +++ b/gopls/internal/util/moremaps/maps.go @@ -31,7 +31,7 @@ func KeySlice[M ~map[K]V, K comparable, V any](m M) []K { return r } -// Values returns the values of the map M, like slices.Collect(maps.Values(m)). +// ValueSlice returns the values of the map M, like slices.Collect(maps.Values(m)). func ValueSlice[M ~map[K]V, K comparable, V any](m M) []V { r := make([]V, 0, len(m)) for _, v := range m { @@ -60,7 +60,7 @@ func Sorted[M ~map[K]V, K cmp.Ordered, V any](m M) iter.Seq2[K, V] { } } -// SortedFunc returns an iterator over the entries of m in key order. +// SortedFunc returns an iterator over the entries of m in the key order determined by cmp. func SortedFunc[M ~map[K]V, K comparable, V any](m M, cmp func(x, y K) int) iter.Seq2[K, V] { // TODO(adonovan): use maps.SortedFunc if proposal #68598 is accepted. return func(yield func(K, V) bool) { diff --git a/gopls/internal/util/moreslices/slices.go b/gopls/internal/util/moreslices/slices.go index 5905e360bfa..7658cd8b536 100644 --- a/gopls/internal/util/moreslices/slices.go +++ b/gopls/internal/util/moreslices/slices.go @@ -18,3 +18,13 @@ func Remove[T comparable](slice []T, elem T) []T { } return out } + +// ConvertStrings converts a slice of type A (with underlying type string) +// to a slice of type B (with underlying type string). +func ConvertStrings[B, A ~string](input []A) []B { + result := make([]B, len(input)) + for i, v := range input { + result[i] = B(string(v)) + } + return result +} diff --git a/gopls/internal/util/morestrings/strings.go b/gopls/internal/util/morestrings/strings.go new file mode 100644 index 00000000000..5632006a40f --- /dev/null +++ b/gopls/internal/util/morestrings/strings.go @@ -0,0 +1,15 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package morestrings + +import "strings" + +// CutLast is the "last" analogue of [strings.Cut]. +func CutLast(s, sep string) (before, after string, ok bool) { + if i := strings.LastIndex(s, sep); i >= 0 { + return s[:i], s[i+len(sep):], true + } + return s, "", false +} diff --git a/gopls/internal/util/persistent/map.go b/gopls/internal/util/persistent/map.go index 5cb556a482b..193f98791d8 100644 --- a/gopls/internal/util/persistent/map.go +++ b/gopls/internal/util/persistent/map.go @@ -9,6 +9,7 @@ package persistent import ( "fmt" + "iter" "math/rand" "strings" "sync/atomic" @@ -57,10 +58,10 @@ func (m *Map[K, V]) String() string { var buf strings.Builder buf.WriteByte('{') var sep string - m.Range(func(k K, v V) { + for k, v := range m.All() { fmt.Fprintf(&buf, "%s%v: %v", sep, k, v) sep = ", " - }) + } buf.WriteByte('}') return buf.String() } @@ -149,29 +150,29 @@ func (pm *Map[K, V]) Clear() { pm.root = nil } -// Keys returns all keys present in the map. -func (pm *Map[K, V]) Keys() []K { - var keys []K - pm.root.forEach(func(k, _ any) { - keys = append(keys, k.(K)) - }) - return keys +// Keys returns the ascending sequence of keys present in the map. +func (pm *Map[K, V]) Keys() iter.Seq[K] { + return func(yield func(K) bool) { + pm.root.forEach(func(k, _ any) bool { + return yield(k.(K)) + }) + } } -// Range calls f sequentially in ascending key order for all entries in the map. -func (pm *Map[K, V]) Range(f func(key K, value V)) { - pm.root.forEach(func(k, v any) { - f(k.(K), v.(V)) - }) +// All returns the sequence of map entries in ascending key order. +func (pm *Map[K, V]) All() iter.Seq2[K, V] { + return func(yield func(K, V) bool) { + pm.root.forEach(func(k, v any) bool { + return yield(k.(K), v.(V)) + }) + } } -func (node *mapNode) forEach(f func(key, value any)) { - if node == nil { - return - } - node.left.forEach(f) - f(node.key, node.value.value) - node.right.forEach(f) +func (node *mapNode) forEach(yield func(key, value any) bool) bool { + return node == nil || + node.left.forEach(yield) && + yield(node.key, node.value.value) && + node.right.forEach(yield) } // Get returns the map value associated with the specified key. diff --git a/gopls/internal/util/persistent/map_test.go b/gopls/internal/util/persistent/map_test.go index effa1c1da85..88dced2a85f 100644 --- a/gopls/internal/util/persistent/map_test.go +++ b/gopls/internal/util/persistent/map_test.go @@ -240,12 +240,12 @@ func (vm *validatedMap) validate(t *testing.T) { } actualMap := make(map[int]int, len(vm.expected)) - vm.impl.Range(func(key, value int) { + for key, value := range vm.impl.All() { if other, ok := actualMap[key]; ok { t.Fatalf("key is present twice, key: %d, first value: %d, second value: %d", key, value, other) } actualMap[key] = value - }) + } assertSameMap(t, actualMap, vm.expected) } diff --git a/gopls/internal/util/persistent/set.go b/gopls/internal/util/persistent/set.go index 2d5f4edac96..e47d046fb48 100644 --- a/gopls/internal/util/persistent/set.go +++ b/gopls/internal/util/persistent/set.go @@ -4,7 +4,11 @@ package persistent -import "golang.org/x/tools/gopls/internal/util/constraints" +import ( + "iter" + + "golang.org/x/tools/gopls/internal/util/constraints" +) // Set is a collection of elements of type K. // @@ -43,12 +47,14 @@ func (s *Set[K]) Contains(key K) bool { return ok } -// Range calls f sequentially in ascending key order for all entries in the set. -func (s *Set[K]) Range(f func(key K)) { - if s.impl != nil { - s.impl.Range(func(key K, _ struct{}) { - f(key) - }) +// All returns the sequence of set elements in ascending order. +func (s *Set[K]) All() iter.Seq[K] { + return func(yield func(K) bool) { + if s.impl != nil { + s.impl.root.forEach(func(k, _ any) bool { + return yield(k.(K)) + }) + } } } diff --git a/gopls/internal/util/persistent/set_test.go b/gopls/internal/util/persistent/set_test.go index 31911b451b3..192b1c74121 100644 --- a/gopls/internal/util/persistent/set_test.go +++ b/gopls/internal/util/persistent/set_test.go @@ -111,11 +111,11 @@ func diff[K constraints.Ordered](got *persistent.Set[K], want []K) string { wantSet[w] = struct{}{} } var diff []string - got.Range(func(key K) { + for key := range got.All() { if _, ok := wantSet[key]; !ok { diff = append(diff, fmt.Sprintf("+%v", key)) } - }) + } for key := range wantSet { if !got.Contains(key) { diff = append(diff, fmt.Sprintf("-%v", key)) diff --git a/gopls/internal/vulncheck/vulntest/report.go b/gopls/internal/vulncheck/vulntest/report.go index 7dbebca6d6b..6aa87221866 100644 --- a/gopls/internal/vulncheck/vulntest/report.go +++ b/gopls/internal/vulncheck/vulntest/report.go @@ -104,7 +104,7 @@ type Package struct { DerivedSymbols []string `yaml:"derived_symbols,omitempty"` } -// Version is an SemVer 2.0.0 semantic version with no leading "v" prefix, +// Version is a SemVer 2.0.0 semantic version with no leading "v" prefix, // as used by OSV. type Version string diff --git a/internal/analysisinternal/addimport_test.go b/internal/analysisinternal/addimport_test.go index f361bde82f8..145d5861b8f 100644 --- a/internal/analysisinternal/addimport_test.go +++ b/internal/analysisinternal/addimport_test.go @@ -183,6 +183,42 @@ import foo "encoding/json" func _() { foo +}`, + }, + { + descr: descr("dot import unshadowed"), + src: `package a + +import . "fmt" + +func _() { + «. fmt» +}`, + want: `package a + +import . "fmt" + +func _() { + . +}`, + }, + { + descr: descr("dot import shadowed"), + src: `package a + +import . "fmt" + +func _(Print fmt.Stringer) { + «fmt fmt» +}`, + want: `package a + +import "fmt" + +import . "fmt" + +func _(Print fmt.Stringer) { + fmt }`, }, } { @@ -218,7 +254,8 @@ func _() { conf.Check(f.Name.Name, fset, []*ast.File{f}, info) // add import - name, edits := analysisinternal.AddImport(info, f, pos, path, name) + // The "Print" argument is only relevant for dot-import tests. + name, prefix, edits := analysisinternal.AddImport(info, f, name, path, "Print", pos) var edit analysis.TextEdit switch len(edits) { @@ -229,6 +266,15 @@ func _() { t.Fatalf("expected at most one edit, got %d", len(edits)) } + // prefix is a simple function of name. + wantPrefix := name + "." + if name == "." { + wantPrefix = "" + } + if prefix != wantPrefix { + t.Errorf("got prefix %q, want %q", prefix, wantPrefix) + } + // apply patch start := fset.Position(edit.Pos) end := fset.Position(edit.End) diff --git a/internal/analysisinternal/analysis.go b/internal/analysisinternal/analysis.go index 58615232ff9..abf708111bf 100644 --- a/internal/analysisinternal/analysis.go +++ b/internal/analysisinternal/analysis.go @@ -8,15 +8,19 @@ package analysisinternal import ( "bytes" + "cmp" "fmt" "go/ast" + "go/printer" "go/scanner" "go/token" "go/types" - "os" pathpkg "path" + "slices" + "strings" "golang.org/x/tools/go/analysis" + "golang.org/x/tools/internal/typesinternal" ) func TypeErrorEndPos(fset *token.FileSet, src []byte, start token.Pos) token.Pos { @@ -174,20 +178,25 @@ func equivalentTypes(want, got types.Type) bool { return types.AssignableTo(want, got) } -// MakeReadFile returns a simple implementation of the Pass.ReadFile function. -func MakeReadFile(pass *analysis.Pass) func(filename string) ([]byte, error) { +// A ReadFileFunc is a function that returns the +// contents of a file, such as [os.ReadFile]. +type ReadFileFunc = func(filename string) ([]byte, error) + +// CheckedReadFile returns a wrapper around a Pass.ReadFile +// function that performs the appropriate checks. +func CheckedReadFile(pass *analysis.Pass, readFile ReadFileFunc) ReadFileFunc { return func(filename string) ([]byte, error) { if err := CheckReadable(pass, filename); err != nil { return nil, err } - return os.ReadFile(filename) + return readFile(filename) } } // CheckReadable enforces the access policy defined by the ReadFile field of [analysis.Pass]. func CheckReadable(pass *analysis.Pass, filename string) error { - if slicesContains(pass.OtherFiles, filename) || - slicesContains(pass.IgnoredFiles, filename) { + if slices.Contains(pass.OtherFiles, filename) || + slices.Contains(pass.IgnoredFiles, filename) { return nil } for _, f := range pass.Files { @@ -198,24 +207,21 @@ func CheckReadable(pass *analysis.Pass, filename string) error { return fmt.Errorf("Pass.ReadFile: %s is not among OtherFiles, IgnoredFiles, or names of Files", filename) } -// TODO(adonovan): use go1.21 slices.Contains. -func slicesContains[S ~[]E, E comparable](slice S, x E) bool { - for _, elem := range slice { - if elem == x { - return true - } - } - return false -} - // AddImport checks whether this file already imports pkgpath and // that import is in scope at pos. If so, it returns the name under // which it was imported and a zero edit. Otherwise, it adds a new // import of pkgpath, using a name derived from the preferred name, -// and returns the chosen name along with the edit for the new import. +// and returns the chosen name, a prefix to be concatenated with member +// to form a qualified name, and the edit for the new import. +// +// In the special case that pkgpath is dot-imported then member, the +// identifer for which the import is being added, is consulted. If +// member is not shadowed at pos, AddImport returns (".", "", nil). +// (AddImport accepts the caller's implicit claim that the imported +// package declares member.) // // It does not mutate its arguments. -func AddImport(info *types.Info, file *ast.File, pos token.Pos, pkgpath, preferredName string) (name string, newImport []analysis.TextEdit) { +func AddImport(info *types.Info, file *ast.File, preferredName, pkgpath, member string, pos token.Pos) (name, prefix string, newImport []analysis.TextEdit) { // Find innermost enclosing lexical block. scope := info.Scopes[file].Innermost(pos) if scope == nil { @@ -225,10 +231,16 @@ func AddImport(info *types.Info, file *ast.File, pos token.Pos, pkgpath, preferr // Is there an existing import of this package? // If so, are we in its scope? (not shadowed) for _, spec := range file.Imports { - pkgname, ok := importedPkgName(info, spec) - if ok && pkgname.Imported().Path() == pkgpath { - if _, obj := scope.LookupParent(pkgname.Name(), pos); obj == pkgname { - return pkgname.Name(), nil + pkgname := info.PkgNameOf(spec) + if pkgname != nil && pkgname.Imported().Path() == pkgpath { + name = pkgname.Name() + if name == "." { + // The scope of ident must be the file scope. + if s, _ := scope.LookupParent(member, pos); s == info.Scopes[file] { + return name, "", nil + } + } else if _, obj := scope.LookupParent(name, pos); obj == pkgname { + return name, name + ".", nil } } } @@ -266,22 +278,174 @@ func AddImport(info *types.Info, file *ast.File, pos token.Pos, pkgpath, preferr before = decl0.Doc } } - return newName, []analysis.TextEdit{{ + return newName, newName + ".", []analysis.TextEdit{{ Pos: before.Pos(), End: before.Pos(), NewText: []byte(newText), }} } -// importedPkgName returns the PkgName object declared by an ImportSpec. -// TODO(adonovan): use go1.22's Info.PkgNameOf. -func importedPkgName(info *types.Info, imp *ast.ImportSpec) (*types.PkgName, bool) { - var obj types.Object - if imp.Name != nil { - obj = info.Defs[imp.Name] - } else { - obj = info.Implicits[imp] +// Format returns a string representation of the expression e. +func Format(fset *token.FileSet, e ast.Expr) string { + var buf strings.Builder + printer.Fprint(&buf, fset, e) // ignore errors + return buf.String() +} + +// Imports returns true if path is imported by pkg. +func Imports(pkg *types.Package, path string) bool { + for _, imp := range pkg.Imports() { + if imp.Path() == path { + return true + } + } + return false +} + +// IsTypeNamed reports whether t is (or is an alias for) a +// package-level defined type with the given package path and one of +// the given names. It returns false if t is nil. +// +// This function avoids allocating the concatenation of "pkg.Name", +// which is important for the performance of syntax matching. +func IsTypeNamed(t types.Type, pkgPath string, names ...string) bool { + if named, ok := types.Unalias(t).(*types.Named); ok { + tname := named.Obj() + return tname != nil && + typesinternal.IsPackageLevel(tname) && + tname.Pkg().Path() == pkgPath && + slices.Contains(names, tname.Name()) + } + return false +} + +// IsPointerToNamed reports whether t is (or is an alias for) a pointer to a +// package-level defined type with the given package path and one of the given +// names. It returns false if t is not a pointer type. +func IsPointerToNamed(t types.Type, pkgPath string, names ...string) bool { + r := typesinternal.Unpointer(t) + if r == t { + return false + } + return IsTypeNamed(r, pkgPath, names...) +} + +// IsFunctionNamed reports whether obj is a package-level function +// defined in the given package and has one of the given names. +// It returns false if obj is nil. +// +// This function avoids allocating the concatenation of "pkg.Name", +// which is important for the performance of syntax matching. +func IsFunctionNamed(obj types.Object, pkgPath string, names ...string) bool { + f, ok := obj.(*types.Func) + return ok && + typesinternal.IsPackageLevel(obj) && + f.Pkg().Path() == pkgPath && + f.Type().(*types.Signature).Recv() == nil && + slices.Contains(names, f.Name()) +} + +// IsMethodNamed reports whether obj is a method defined on a +// package-level type with the given package and type name, and has +// one of the given names. It returns false if obj is nil. +// +// This function avoids allocating the concatenation of "pkg.TypeName.Name", +// which is important for the performance of syntax matching. +func IsMethodNamed(obj types.Object, pkgPath string, typeName string, names ...string) bool { + if fn, ok := obj.(*types.Func); ok { + if recv := fn.Type().(*types.Signature).Recv(); recv != nil { + _, T := typesinternal.ReceiverNamed(recv) + return T != nil && + IsTypeNamed(T, pkgPath, typeName) && + slices.Contains(names, fn.Name()) + } } - pkgname, ok := obj.(*types.PkgName) - return pkgname, ok + return false +} + +// ValidateFixes validates the set of fixes for a single diagnostic. +// Any error indicates a bug in the originating analyzer. +// +// It updates fixes so that fixes[*].End.IsValid(). +// +// It may be used as part of an analysis driver implementation. +func ValidateFixes(fset *token.FileSet, a *analysis.Analyzer, fixes []analysis.SuggestedFix) error { + fixMessages := make(map[string]bool) + for i := range fixes { + fix := &fixes[i] + if fixMessages[fix.Message] { + return fmt.Errorf("analyzer %q suggests two fixes with same Message (%s)", a.Name, fix.Message) + } + fixMessages[fix.Message] = true + if err := validateFix(fset, fix); err != nil { + return fmt.Errorf("analyzer %q suggests invalid fix (%s): %v", a.Name, fix.Message, err) + } + } + return nil +} + +// validateFix validates a single fix. +// Any error indicates a bug in the originating analyzer. +// +// It updates fix so that fix.End.IsValid(). +func validateFix(fset *token.FileSet, fix *analysis.SuggestedFix) error { + + // Stably sort edits by Pos. This ordering puts insertions + // (end = start) before deletions (end > start) at the same + // point, but uses a stable sort to preserve the order of + // multiple insertions at the same point. + slices.SortStableFunc(fix.TextEdits, func(x, y analysis.TextEdit) int { + if sign := cmp.Compare(x.Pos, y.Pos); sign != 0 { + return sign + } + return cmp.Compare(x.End, y.End) + }) + + var prev *analysis.TextEdit + for i := range fix.TextEdits { + edit := &fix.TextEdits[i] + + // Validate edit individually. + start := edit.Pos + file := fset.File(start) + if file == nil { + return fmt.Errorf("missing file info for pos (%v)", edit.Pos) + } + if end := edit.End; end.IsValid() { + if end < start { + return fmt.Errorf("pos (%v) > end (%v)", edit.Pos, edit.End) + } + endFile := fset.File(end) + if endFile == nil { + return fmt.Errorf("malformed end position %v", end) + } + if endFile != file { + return fmt.Errorf("edit spans files %v and %v", file.Name(), endFile.Name()) + } + } else { + edit.End = start // update the SuggestedFix + } + if eof := token.Pos(file.Base() + file.Size()); edit.End > eof { + return fmt.Errorf("end is (%v) beyond end of file (%v)", edit.End, eof) + } + + // Validate the sequence of edits: + // properly ordered, no overlapping deletions + if prev != nil && edit.Pos < prev.End { + xpos := fset.Position(prev.Pos) + xend := fset.Position(prev.End) + ypos := fset.Position(edit.Pos) + yend := fset.Position(edit.End) + return fmt.Errorf("overlapping edits to %s (%d:%d-%d:%d and %d:%d-%d:%d)", + xpos.Filename, + xpos.Line, xpos.Column, + xend.Line, xend.Column, + ypos.Line, ypos.Column, + yend.Line, yend.Column, + ) + } + prev = edit + } + + return nil } diff --git a/internal/apidiff/README.md b/internal/apidiff/README.md deleted file mode 100644 index 3d9576c2866..00000000000 --- a/internal/apidiff/README.md +++ /dev/null @@ -1,624 +0,0 @@ -# Checking Go Package API Compatibility - -The `apidiff` tool in this directory determines whether two versions of the same -package are compatible. The goal is to help the developer make an informed -choice of semantic version after they have changed the code of their module. - -`apidiff` reports two kinds of changes: incompatible ones, which require -incrementing the major part of the semantic version, and compatible ones, which -require a minor version increment. If no API changes are reported but there are -code changes that could affect client code, then the patch version should -be incremented. - -Because `apidiff` ignores package import paths, it may be used to display API -differences between any two packages, not just different versions of the same -package. - -The current version of `apidiff` compares only packages, not modules. - - -## Compatibility Desiderata - -Any tool that checks compatibility can offer only an approximation. No tool can -detect behavioral changes; and even if it could, whether a behavioral change is -a breaking change or not depends on many factors, such as whether it closes a -security hole or fixes a bug. Even a change that causes some code to fail to -compile may not be considered a breaking change by the developers or their -users. It may only affect code marked as experimental or unstable, for -example, or the break may only manifest in unlikely cases. - -For a tool to be useful, its notion of compatibility must be relaxed enough to -allow reasonable changes, like adding a field to a struct, but strict enough to -catch significant breaking changes. A tool that is too lax will miss important -incompatibilities, and users will stop trusting it; one that is too strict may -generate so much noise that users will ignore it. - -To a first approximation, this tool reports a change as incompatible if it could -cause client code to stop compiling. But `apidiff` ignores five ways in which -code may fail to compile after a change. Three of them are mentioned in the -[Go 1 Compatibility Guarantee](https://golang.org/doc/go1compat). - -### Unkeyed Struct Literals - -Code that uses an unkeyed struct literal would fail to compile if a field was -added to the struct, making any such addition an incompatible change. An example: - -``` -// old -type Point struct { X, Y int } - -// new -type Point struct { X, Y, Z int } - -// client -p := pkg.Point{1, 2} // fails in new because there are more fields than expressions -``` -Here and below, we provide three snippets: the code in the old version of the -package, the code in the new version, and the code written in a client of the package, -which refers to it by the name `pkg`. The client code compiles against the old -code but not the new. - -### Embedding and Shadowing - -Adding an exported field to a struct can break code that embeds that struct, -because the newly added field may conflict with an identically named field -at the same struct depth. A selector referring to the latter would become -ambiguous and thus erroneous. - - -``` -// old -type Point struct { X, Y int } - -// new -type Point struct { X, Y, Z int } - -// client -type z struct { Z int } - -var v struct { - pkg.Point - z -} - -_ = v.Z // fails in new -``` -In the new version, the last line fails to compile because there are two embedded `Z` -fields at the same depth, one from `z` and one from `pkg.Point`. - - -### Using an Identical Type Externally - -If it is possible for client code to write a type expression representing the -underlying type of a defined type in a package, then external code can use it in -assignments involving the package type, making any change to that type incompatible. -``` -// old -type Point struct { X, Y int } - -// new -type Point struct { X, Y, Z int } - -// client -var p struct { X, Y int } = pkg.Point{} // fails in new because of Point's extra field -``` -Here, the external code could have used the provided name `Point`, but chose not -to. I'll have more to say about this and related examples later. - -### unsafe.Sizeof and Friends - -Since `unsafe.Sizeof`, `unsafe.Offsetof` and `unsafe.Alignof` are constant -expressions, they can be used in an array type literal: - -``` -// old -type S struct{ X int } - -// new -type S struct{ X, y int } - -// client -var a [unsafe.Sizeof(pkg.S{})]int = [8]int{} // fails in new because S's size is not 8 -``` -Use of these operations could make many changes to a type potentially incompatible. - - -### Type Switches - -A package change that merges two different types (with same underlying type) -into a single new type may break type switches in clients that refer to both -original types: - -``` -// old -type T1 int -type T2 int - -// new -type T1 int -type T2 = T1 - -// client -switch x.(type) { -case T1: -case T2: -} // fails with new because two cases have the same type -``` -This sort of incompatibility is sufficiently esoteric to ignore; the tool allows -merging types. - -## First Attempt at a Definition - -Our first attempt at defining compatibility captures the idea that all the -exported names in the old package must have compatible equivalents in the new -package. - -A new package is compatible with an old one if and only if: -- For every exported package-level name in the old package, the same name is - declared in the new at package level, and -- the names denote the same kind of object (e.g. both are variables), and -- the types of the objects are compatible. - -We will work out the details (and make some corrections) below, but it is clear -already that we will need to determine what makes two types compatible. And -whatever the definition of type compatibility, it's certainly true that if two -types are the same, they are compatible. So we will need to decide what makes an -old and new type the same. We will call this sameness relation _correspondence_. - -## Type Correspondence - -Go already has a definition of when two types are the same: -[type identity](https://golang.org/ref/spec#Type_identity). -But identity isn't adequate for our purpose: it says that two defined -types are identical if they arise from the same definition, but it's unclear -what "same" means when talking about two different packages (or two versions of -a single package). - -The obvious change to the definition of identity is to require that old and new -[defined types](https://golang.org/ref/spec#Type_definitions) -have the same name instead. But that doesn't work either, for two -reasons. First, type aliases can equate two defined types with different names: - -``` -// old -type E int - -// new -type t int -type E = t -``` -Second, an unexported type can be renamed: - -``` -// old -type u1 int -var V u1 - -// new -type u2 int -var V u2 -``` -Here, even though `u1` and `u2` are unexported, their exported fields and -methods are visible to clients, so they are part of the API. But since the name -`u1` is not visible to clients, it can be changed compatibly. We say that `u1` -and `u2` are _exposed_: a type is exposed if a client package can declare variables of that type. - -We will say that an old defined type _corresponds_ to a new one if they have the -same name, or one can be renamed to the other without otherwise changing the -API. In the first example above, old `E` and new `t` correspond. In the second, -old `u1` and new `u2` correspond. - -Two or more old defined types can correspond to a single new type: we consider -"merging" two types into one to be a compatible change. As mentioned above, -code that uses both names in a type switch will fail, but we deliberately ignore -this case. However, a single old type can correspond to only one new type. - -So far, we've explained what correspondence means for defined types. To extend -the definition to all types, we parallel the language's definition of type -identity. So, for instance, an old and a new slice type correspond if their -element types correspond. - -## Definition of Compatibility - -We can now present the definition of compatibility used by `apidiff`. - -### Package Compatibility - -> A new package is compatible with an old one if: ->1. Each exported name in the old package's scope also appears in the new ->package's scope, and the object (constant, variable, function or type) denoted ->by that name in the old package is compatible with the object denoted by the ->name in the new package, and ->2. For every exposed type that implements an exposed interface in the old package, -> its corresponding type should implement the corresponding interface in the new package. -> ->Otherwise the packages are incompatible. - -As an aside, the tool also finds exported names in the new package that are not -exported in the old, and marks them as compatible changes. - -Clause 2 is discussed further in "Whole-Package Compatibility." - -### Object Compatibility - -This section provides compatibility rules for constants, variables, functions -and types. - -#### Constants - ->A new exported constant is compatible with an old one of the same name if and only if ->1. Their types correspond, and ->2. Their values are identical. - -It is tempting to allow changing a typed constant to an untyped one. That may -seem harmless, but it can break code like this: - -``` -// old -const C int64 = 1 - -// new -const C = 1 - -// client -var x = C // old type is int64, new is int -var y int64 = x // fails with new: different types in assignment -``` - -A change to the value of a constant can break compatibility if the value is used -in an array type: - -``` -// old -const C = 1 - -// new -const C = 2 - -// client -var a [C]int = [1]int{} // fails with new because [2]int and [1]int are different types -``` -Changes to constant values are rare, and determining whether they are compatible -or not is better left to the user, so the tool reports them. - -#### Variables - ->A new exported variable is compatible with an old one of the same name if and ->only if their types correspond. - -Correspondence doesn't look past names, so this rule does not prevent adding a -field to `MyStruct` if the package declares `var V MyStruct`. It does, however, mean that - -``` -var V struct { X int } -``` -is incompatible with -``` -var V struct { X, Y int } -``` -I discuss this at length below in the section "Compatibility, Types and Names." - -#### Functions - ->A new exported function or variable is compatible with an old function of the ->same name if and only if their types (signatures) correspond. - -This rule captures the fact that, although many signature changes are compatible -for all call sites, none are compatible for assignment: - -``` -var v func(int) = pkg.F -``` -Here, `F` must be of type `func(int)` and not, for instance, `func(...int)` or `func(interface{})`. - -Note that the rule permits changing a function to a variable. This is a common -practice, usually done for test stubbing, and cannot break any code at compile -time. - -#### Exported Types - -> A new exported type is compatible with an old one if and only if their -> names are the same and their types correspond. - -This rule seems far too strict. But, ignoring aliases for the moment, it demands only -that the old and new _defined_ types correspond. Consider: -``` -// old -type T struct { X int } - -// new -type T struct { X, Y int } -``` -The addition of `Y` is a compatible change, because this rule does not require -that the struct literals have to correspond, only that the defined types -denoted by `T` must correspond. (Remember that correspondence stops at type -names.) - -If one type is an alias that refers to the corresponding defined type, the -situation is the same: - -``` -// old -type T struct { X int } - -// new -type u struct { X, Y int } -type T = u -``` -Here, the only requirement is that old `T` corresponds to new `u`, not that the -struct types correspond. (We can't tell from this snippet that the old `T` and -the new `u` do correspond; that depends on whether `u` replaces `T` throughout -the API.) - -However, the following change is incompatible, because the names do not -denote corresponding types: - -``` -// old -type T = struct { X int } - -// new -type T = struct { X, Y int } -``` -### Type Literal Compatibility - -Only five kinds of types can differ compatibly: defined types, structs, -interfaces, channels and numeric types. We only consider the compatibility of -the last four when they are the underlying type of a defined type. See -"Compatibility, Types and Names" for a rationale. - -We justify the compatibility rules by enumerating all the ways a type -can be used, and by showing that the allowed changes cannot break any code that -uses values of the type in those ways. - -Values of all types can be used in assignments (including argument passing and -function return), but we do not require that old and new types are assignment -compatible. That is because we assume that the old and new packages are never -used together: any given binary will link in either the old package or the new. -So in describing how a type can be used in the sections below, we omit -assignment. - -Any type can also be used in a type assertion or conversion. The changes we allow -below may affect the run-time behavior of these operations, but they cannot affect -whether they compile. The only such breaking change would be to change -the type `T` in an assertion `x.T` so that it no longer implements the interface -type of `x`; but the rules for interfaces below disallow that. - -> A new type is compatible with an old one if and only if they correspond, or -> one of the cases below applies. - -#### Defined Types - -Other than assignment, the only ways to use a defined type are to access its -methods, or to make use of the properties of its underlying type. Rule 2 below -covers the latter, and rules 3 and 4 cover the former. - -> A new defined type is compatible with an old one if and only if all of the -> following hold: ->1. They correspond. ->2. Their underlying types are compatible. ->3. The new exported value method set is a superset of the old. ->4. The new exported pointer method set is a superset of the old. - -An exported method set is a method set with all unexported methods removed. -When comparing methods of a method set, we require identical names and -corresponding signatures. - -Removing an exported method is clearly a breaking change. But removing an -unexported one (or changing its signature) can be breaking as well, if it -results in the type no longer implementing an interface. See "Whole-Package -Compatibility," below. - -#### Channels - -> A new channel type is compatible with an old one if -> 1. The element types correspond, and -> 2. Either the directions are the same, or the new type has no direction. - -Other than assignment, the only ways to use values of a channel type are to send -and receive on them, to close them, and to use them as map keys. Changes to a -channel type cannot cause code that closes a channel or uses it as a map key to -fail to compile, so we need not consider those operations. - -Rule 1 ensures that any operations on the values sent or received will compile. -Rule 2 captures the fact that any program that compiles with a directed channel -must use either only sends, or only receives, so allowing the other operation -by removing the channel direction cannot break any code. - - -#### Interfaces - -> A new interface is compatible with an old one if and only if: -> 1. The old interface does not have an unexported method, and it corresponds -> to the new interfaces (i.e. they have the same method set), or -> 2. The old interface has an unexported method and the new exported method set is a -> superset of the old. - -Other than assignment, the only ways to use an interface are to implement it, -embed it, or call one of its methods. (Interface values can also be used as map -keys, but that cannot cause a compile-time error.) - -Certainly, removing an exported method from an interface could break a client -call, so neither rule allows it. - -Rule 1 also disallows adding a method to an interface without an existing unexported -method. Such an interface can be implemented in client code. If adding a method -were allowed, a type that implements the old interface could fail to implement -the new one: - -``` -type I interface { M1() } // old -type I interface { M1(); M2() } // new - -// client -type t struct{} -func (t) M1() {} -var i pkg.I = t{} // fails with new, because t lacks M2 -``` - -Rule 2 is based on the observation that if an interface has an unexported -method, the only way a client can implement it is to embed it. -Adding a method is compatible in this case, because the embedding struct will -continue to implement the interface. Adding a method also cannot break any call -sites, since no program that compiles could have any such call sites. - -#### Structs - -> A new struct is compatible with an old one if all of the following hold: -> 1. The new set of top-level exported fields is a superset of the old. -> 2. The new set of _selectable_ exported fields is a superset of the old. -> 3. If the old struct is comparable, so is the new one. - -The set of selectable exported fields is the set of exported fields `F` -such that `x.F` is a valid selector expression for a value `x` of the struct -type. `F` may be at the top level of the struct, or it may be a field of an -embedded struct. - -Two fields are the same if they have the same name and corresponding types. - -Other than assignment, there are only four ways to use a struct: write a struct -literal, select a field, use a value of the struct as a map key, or compare two -values for equality. The first clause ensures that struct literals compile; the -second, that selections compile; and the third, that equality expressions and -map index expressions compile. - -#### Numeric Types - -> A new numeric type is compatible with an old one if and only if they are -> both unsigned integers, both signed integers, both floats or both complex -> types, and the new one is at least as large as the old on both 32-bit and -> 64-bit architectures. - -Other than in assignments, numeric types appear in arithmetic and comparison -expressions. Since all arithmetic operations but shifts (see below) require that -operand types be identical, and by assumption the old and new types underly -defined types (see "Compatibility, Types and Names," below), there is no way for -client code to write an arithmetic expression that compiles with operands of the -old type but not the new. - -Numeric types can also appear in type switches and type assertions. Again, since -the old and new types underly defined types, type switches and type assertions -that compiled using the old defined type will continue to compile with the new -defined type. - -Going from an unsigned to a signed integer type is an incompatible change for -the sole reason that only an unsigned type can appear as the right operand of a -shift. If this rule is relaxed, then changes from an unsigned type to a larger -signed type would be compatible. See [this -issue](https://github.com/golang/go/issues/19113). - -Only integer types can be used in bitwise and shift operations, and for indexing -slices and arrays. That is why switching from an integer to a floating-point -type--even one that can represent all values of the integer type--is an -incompatible change. - - -Conversions from floating-point to complex types or vice versa are not permitted -(the predeclared functions real, imag, and complex must be used instead). To -prevent valid floating-point or complex conversions from becoming invalid, -changing a floating-point type to a complex type or vice versa is considered an -incompatible change. - -Although conversions between any two integer types are valid, assigning a -constant value to a variable of integer type that is too small to represent the -constant is not permitted. That is why the only compatible changes are to -a new type whose values are a superset of the old. The requirement that the new -set of values must include the old on both 32-bit and 64-bit machines allows -conversions from `int32` to `int` and from `int` to `int64`, but not the other -direction; and similarly for `uint`. - -Changing a type to or from `uintptr` is considered an incompatible change. Since -its size is not specified, there is no way to know whether the new type's values -are a superset of the old type's. - -## Whole-Package Compatibility - -Some changes that are compatible for a single type are not compatible when the -package is considered as a whole. For example, if you remove an unexported -method on a defined type, it may no longer implement an interface of the -package. This can break client code: - -``` -// old -type T int -func (T) m() {} -type I interface { m() } - -// new -type T int // no method m anymore - -// client -var i pkg.I = pkg.T{} // fails with new because T lacks m -``` - -Similarly, adding a method to an interface can cause defined types -in the package to stop implementing it. - -The second clause in the definition for package compatibility handles these -cases. To repeat: -> 2. For every exposed type that implements an exposed interface in the old package, -> its corresponding type should implement the corresponding interface in the new package. -Recall that a type is exposed if it is part of the package's API, even if it is -unexported. - -Other incompatibilities that involve more than one type in the package can arise -whenever two types with identical underlying types exist in the old or new -package. Here, a change "splits" an identical underlying type into two, breaking -conversions: - -``` -// old -type B struct { X int } -type C struct { X int } - -// new -type B struct { X int } -type C struct { X, Y int } - -// client -var b B -_ = C(b) // fails with new: cannot convert B to C -``` -Finally, changes that are compatible for the package in which they occur can -break downstream packages. That can happen even if they involve unexported -methods, thanks to embedding. - -The definitions given here don't account for these sorts of problems. - - -## Compatibility, Types and Names - -The above definitions state that the only types that can differ compatibly are -defined types and the types that underly them. Changes to other type literals -are considered incompatible. For instance, it is considered an incompatible -change to add a field to the struct in this variable declaration: - -``` -var V struct { X int } -``` -or this alias definition: -``` -type T = struct { X int } -``` - -We make this choice to keep the definition of compatibility (relatively) simple. -A more precise definition could, for instance, distinguish between - -``` -func F(struct { X int }) -``` -where any changes to the struct are incompatible, and - -``` -func F(struct { X, u int }) -``` -where adding a field is compatible (since clients cannot write the signature, -and thus cannot assign `F` to a variable of the signature type). The definition -should then also allow other function signature changes that only require -call-site compatibility, like - -``` -func F(struct { X, u int }, ...int) -``` -The result would be a much more complex definition with little benefit, since -the examples in this section rarely arise in practice. diff --git a/internal/apidiff/apidiff.go b/internal/apidiff/apidiff.go deleted file mode 100644 index a37d5daca38..00000000000 --- a/internal/apidiff/apidiff.go +++ /dev/null @@ -1,225 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// TODO: test swap corresponding types (e.g. u1 <-> u2 and u2 <-> u1) -// TODO: test exported alias refers to something in another package -- does correspondence work then? -// TODO: CODE COVERAGE -// TODO: note that we may miss correspondences because we bail early when we compare a signature (e.g. when lengths differ; we could do up to the shorter) -// TODO: if you add an unexported method to an exposed interface, you have to check that -// every exposed type that previously implemented the interface still does. Otherwise -// an external assignment of the exposed type to the interface type could fail. -// TODO: check constant values: large values aren't representable by some types. -// TODO: Document all the incompatibilities we don't check for. - -package apidiff - -import ( - "fmt" - "go/constant" - "go/token" - "go/types" -) - -// Changes reports on the differences between the APIs of the old and new packages. -// It classifies each difference as either compatible or incompatible (breaking.) For -// a detailed discussion of what constitutes an incompatible change, see the package -// documentation. -func Changes(old, new *types.Package) Report { - d := newDiffer(old, new) - d.checkPackage() - r := Report{} - for _, m := range d.incompatibles.collect() { - r.Changes = append(r.Changes, Change{Message: m, Compatible: false}) - } - for _, m := range d.compatibles.collect() { - r.Changes = append(r.Changes, Change{Message: m, Compatible: true}) - } - return r -} - -type differ struct { - old, new *types.Package - // Correspondences between named types. - // Even though it is the named types (*types.Named) that correspond, we use - // *types.TypeName as a map key because they are canonical. - // The values can be either named types or basic types. - correspondMap map[*types.TypeName]types.Type - - // Messages. - incompatibles messageSet - compatibles messageSet -} - -func newDiffer(old, new *types.Package) *differ { - return &differ{ - old: old, - new: new, - correspondMap: map[*types.TypeName]types.Type{}, - incompatibles: messageSet{}, - compatibles: messageSet{}, - } -} - -func (d *differ) incompatible(obj types.Object, part, format string, args ...interface{}) { - addMessage(d.incompatibles, obj, part, format, args) -} - -func (d *differ) compatible(obj types.Object, part, format string, args ...interface{}) { - addMessage(d.compatibles, obj, part, format, args) -} - -func addMessage(ms messageSet, obj types.Object, part, format string, args []interface{}) { - ms.add(obj, part, fmt.Sprintf(format, args...)) -} - -func (d *differ) checkPackage() { - // Old changes. - for _, name := range d.old.Scope().Names() { - oldobj := d.old.Scope().Lookup(name) - if !oldobj.Exported() { - continue - } - newobj := d.new.Scope().Lookup(name) - if newobj == nil { - d.incompatible(oldobj, "", "removed") - continue - } - d.checkObjects(oldobj, newobj) - } - // New additions. - for _, name := range d.new.Scope().Names() { - newobj := d.new.Scope().Lookup(name) - if newobj.Exported() && d.old.Scope().Lookup(name) == nil { - d.compatible(newobj, "", "added") - } - } - - // Whole-package satisfaction. - // For every old exposed interface oIface and its corresponding new interface nIface... - for otn1, nt1 := range d.correspondMap { - oIface, ok := otn1.Type().Underlying().(*types.Interface) - if !ok { - continue - } - nIface, ok := nt1.Underlying().(*types.Interface) - if !ok { - // If nt1 isn't an interface but otn1 is, then that's an incompatibility that - // we've already noticed, so there's no need to do anything here. - continue - } - // For every old type that implements oIface, its corresponding new type must implement - // nIface. - for otn2, nt2 := range d.correspondMap { - if otn1 == otn2 { - continue - } - if types.Implements(otn2.Type(), oIface) && !types.Implements(nt2, nIface) { - d.incompatible(otn2, "", "no longer implements %s", objectString(otn1)) - } - } - } -} - -func (d *differ) checkObjects(old, new types.Object) { - switch old := old.(type) { - case *types.Const: - if new, ok := new.(*types.Const); ok { - d.constChanges(old, new) - return - } - case *types.Var: - if new, ok := new.(*types.Var); ok { - d.checkCorrespondence(old, "", old.Type(), new.Type()) - return - } - case *types.Func: - switch new := new.(type) { - case *types.Func: - d.checkCorrespondence(old, "", old.Type(), new.Type()) - return - case *types.Var: - d.compatible(old, "", "changed from func to var") - d.checkCorrespondence(old, "", old.Type(), new.Type()) - return - - } - case *types.TypeName: - if new, ok := new.(*types.TypeName); ok { - d.checkCorrespondence(old, "", old.Type(), new.Type()) - return - } - default: - panic("unexpected obj type") - } - // Here if kind of type changed. - d.incompatible(old, "", "changed from %s to %s", - objectKindString(old), objectKindString(new)) -} - -// Compare two constants. -func (d *differ) constChanges(old, new *types.Const) { - ot := old.Type() - nt := new.Type() - // Check for change of type. - if !d.correspond(ot, nt) { - d.typeChanged(old, "", ot, nt) - return - } - // Check for change of value. - // We know the types are the same, so constant.Compare shouldn't panic. - if !constant.Compare(old.Val(), token.EQL, new.Val()) { - d.incompatible(old, "", "value changed from %s to %s", old.Val(), new.Val()) - } -} - -func objectKindString(obj types.Object) string { - switch obj.(type) { - case *types.Const: - return "const" - case *types.Var: - return "var" - case *types.Func: - return "func" - case *types.TypeName: - return "type" - default: - return "???" - } -} - -func (d *differ) checkCorrespondence(obj types.Object, part string, old, new types.Type) { - if !d.correspond(old, new) { - d.typeChanged(obj, part, old, new) - } -} - -func (d *differ) typeChanged(obj types.Object, part string, old, new types.Type) { - old = removeNamesFromSignature(old) - new = removeNamesFromSignature(new) - olds := types.TypeString(old, types.RelativeTo(d.old)) - news := types.TypeString(new, types.RelativeTo(d.new)) - d.incompatible(obj, part, "changed from %s to %s", olds, news) -} - -// go/types always includes the argument and result names when formatting a signature. -// Since these can change without affecting compatibility, we don't want users to -// be distracted by them, so we remove them. -func removeNamesFromSignature(t types.Type) types.Type { - t = types.Unalias(t) - sig, ok := t.(*types.Signature) - if !ok { - return t - } - - dename := func(p *types.Tuple) *types.Tuple { - var vars []*types.Var - for i := 0; i < p.Len(); i++ { - v := p.At(i) - vars = append(vars, types.NewVar(v.Pos(), v.Pkg(), "", types.Unalias(v.Type()))) - } - return types.NewTuple(vars...) - } - - return types.NewSignature(sig.Recv(), dename(sig.Params()), dename(sig.Results()), sig.Variadic()) -} diff --git a/internal/apidiff/apidiff_test.go b/internal/apidiff/apidiff_test.go deleted file mode 100644 index 2c8479667b4..00000000000 --- a/internal/apidiff/apidiff_test.go +++ /dev/null @@ -1,174 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package apidiff - -import ( - "bufio" - "fmt" - "go/types" - "os" - "path/filepath" - "sort" - "strings" - "testing" - - "github.com/google/go-cmp/cmp" - "golang.org/x/tools/go/packages" - "golang.org/x/tools/internal/testenv" -) - -func TestChanges(t *testing.T) { - dir, err := os.MkdirTemp("", "apidiff_test") - if err != nil { - t.Fatal(err) - } - dir = filepath.Join(dir, "go") - wanti, wantc := splitIntoPackages(t, dir) - defer os.RemoveAll(dir) - sort.Strings(wanti) - sort.Strings(wantc) - - oldpkg, err := load(t, "apidiff/old", dir) - if err != nil { - t.Fatal(err) - } - newpkg, err := load(t, "apidiff/new", dir) - if err != nil { - t.Fatal(err) - } - - report := Changes(oldpkg.Types, newpkg.Types) - - got := report.messages(false) - if diff := cmp.Diff(wanti, got); diff != "" { - t.Errorf("incompatibles (-want +got):\n%s", diff) - } - got = report.messages(true) - if diff := cmp.Diff(wantc, got); diff != "" { - t.Errorf("compatibles (-want +got):\n%s", diff) - } -} - -func splitIntoPackages(t *testing.T, dir string) (incompatibles, compatibles []string) { - // Read the input file line by line. - // Write a line into the old or new package, - // dependent on comments. - // Also collect expected messages. - f, err := os.Open("testdata/tests.go") - if err != nil { - t.Fatal(err) - } - defer f.Close() - - if err := os.MkdirAll(filepath.Join(dir, "src", "apidiff"), 0700); err != nil { - t.Fatal(err) - } - if err := os.WriteFile(filepath.Join(dir, "src", "apidiff", "go.mod"), []byte("module apidiff\n"), 0666); err != nil { - t.Fatal(err) - } - - oldd := filepath.Join(dir, "src/apidiff/old") - newd := filepath.Join(dir, "src/apidiff/new") - if err := os.MkdirAll(oldd, 0700); err != nil { - t.Fatal(err) - } - if err := os.Mkdir(newd, 0700); err != nil && !os.IsExist(err) { - t.Fatal(err) - } - - oldf, err := os.Create(filepath.Join(oldd, "old.go")) - if err != nil { - t.Fatal(err) - } - newf, err := os.Create(filepath.Join(newd, "new.go")) - if err != nil { - t.Fatal(err) - } - - wl := func(f *os.File, line string) { - if _, err := fmt.Fprintln(f, line); err != nil { - t.Fatal(err) - } - } - writeBoth := func(line string) { wl(oldf, line); wl(newf, line) } - writeln := writeBoth - s := bufio.NewScanner(f) - for s.Scan() { - line := s.Text() - tl := strings.TrimSpace(line) - switch { - case tl == "// old": - writeln = func(line string) { wl(oldf, line) } - case tl == "// new": - writeln = func(line string) { wl(newf, line) } - case tl == "// both": - writeln = writeBoth - case strings.HasPrefix(tl, "// i "): - incompatibles = append(incompatibles, strings.TrimSpace(tl[4:])) - case strings.HasPrefix(tl, "// c "): - compatibles = append(compatibles, strings.TrimSpace(tl[4:])) - default: - writeln(line) - } - } - if s.Err() != nil { - t.Fatal(s.Err()) - } - return -} - -func load(t *testing.T, importPath, goPath string) (*packages.Package, error) { - testenv.NeedsGoPackages(t) - - cfg := &packages.Config{ - Mode: packages.LoadTypes, - } - if goPath != "" { - cfg.Env = append(os.Environ(), "GOPATH="+goPath) - cfg.Dir = filepath.Join(goPath, "src", filepath.FromSlash(importPath)) - } - pkgs, err := packages.Load(cfg, importPath) - if err != nil { - return nil, err - } - if len(pkgs[0].Errors) > 0 { - return nil, pkgs[0].Errors[0] - } - return pkgs[0], nil -} - -func TestExportedFields(t *testing.T) { - pkg, err := load(t, "golang.org/x/tools/internal/apidiff/testdata/exported_fields", "") - if err != nil { - t.Fatal(err) - } - typeof := func(name string) types.Type { - return pkg.Types.Scope().Lookup(name).Type() - } - - s := typeof("S") - su := s.(*types.Named).Underlying().(*types.Struct) - - ef := exportedSelectableFields(su) - wants := []struct { - name string - typ types.Type - }{ - {"A1", typeof("A1")}, - {"D", types.Typ[types.Bool]}, - {"E", types.Typ[types.Int]}, - {"F", typeof("F")}, - {"S", types.NewPointer(s)}, - } - - if got, want := len(ef), len(wants); got != want { - t.Errorf("got %d fields, want %d\n%+v", got, want, ef) - } - for _, w := range wants { - if got := ef[w.name]; got != nil && !types.Identical(got.Type(), w.typ) { - t.Errorf("%s: got %v, want %v", w.name, got.Type(), w.typ) - } - } -} diff --git a/internal/apidiff/compatibility.go b/internal/apidiff/compatibility.go deleted file mode 100644 index f8e59d611bd..00000000000 --- a/internal/apidiff/compatibility.go +++ /dev/null @@ -1,355 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package apidiff - -import ( - "fmt" - "go/types" - "reflect" - - "golang.org/x/tools/internal/typesinternal" -) - -func (d *differ) checkCompatible(otn *types.TypeName, old, new types.Type) { - old = types.Unalias(old) - new = types.Unalias(new) - switch old := old.(type) { - case *types.Interface: - if new, ok := new.(*types.Interface); ok { - d.checkCompatibleInterface(otn, old, new) - return - } - - case *types.Struct: - if new, ok := new.(*types.Struct); ok { - d.checkCompatibleStruct(otn, old, new) - return - } - - case *types.Chan: - if new, ok := new.(*types.Chan); ok { - d.checkCompatibleChan(otn, old, new) - return - } - - case *types.Basic: - if new, ok := new.(*types.Basic); ok { - d.checkCompatibleBasic(otn, old, new) - return - } - - case *types.Named: - panic("unreachable") - - default: - d.checkCorrespondence(otn, "", old, new) - return - - } - // Here if old and new are different kinds of types. - d.typeChanged(otn, "", old, new) -} - -func (d *differ) checkCompatibleChan(otn *types.TypeName, old, new *types.Chan) { - d.checkCorrespondence(otn, ", element type", old.Elem(), new.Elem()) - if old.Dir() != new.Dir() { - if new.Dir() == types.SendRecv { - d.compatible(otn, "", "removed direction") - } else { - d.incompatible(otn, "", "changed direction") - } - } -} - -func (d *differ) checkCompatibleBasic(otn *types.TypeName, old, new *types.Basic) { - // Certain changes to numeric types are compatible. Approximately, the info must - // be the same, and the new values must be a superset of the old. - if old.Kind() == new.Kind() { - // old and new are identical - return - } - if compatibleBasics[[2]types.BasicKind{old.Kind(), new.Kind()}] { - d.compatible(otn, "", "changed from %s to %s", old, new) - } else { - d.typeChanged(otn, "", old, new) - } -} - -// All pairs (old, new) of compatible basic types. -var compatibleBasics = map[[2]types.BasicKind]bool{ - {types.Uint8, types.Uint16}: true, - {types.Uint8, types.Uint32}: true, - {types.Uint8, types.Uint}: true, - {types.Uint8, types.Uint64}: true, - {types.Uint16, types.Uint32}: true, - {types.Uint16, types.Uint}: true, - {types.Uint16, types.Uint64}: true, - {types.Uint32, types.Uint}: true, - {types.Uint32, types.Uint64}: true, - {types.Uint, types.Uint64}: true, - {types.Int8, types.Int16}: true, - {types.Int8, types.Int32}: true, - {types.Int8, types.Int}: true, - {types.Int8, types.Int64}: true, - {types.Int16, types.Int32}: true, - {types.Int16, types.Int}: true, - {types.Int16, types.Int64}: true, - {types.Int32, types.Int}: true, - {types.Int32, types.Int64}: true, - {types.Int, types.Int64}: true, - {types.Float32, types.Float64}: true, - {types.Complex64, types.Complex128}: true, -} - -// Interface compatibility: -// If the old interface has an unexported method, the new interface is compatible -// if its exported method set is a superset of the old. (Users could not implement, -// only embed.) -// -// If the old interface did not have an unexported method, the new interface is -// compatible if its exported method set is the same as the old, and it has no -// unexported methods. (Adding an unexported method makes the interface -// unimplementable outside the package.) -// -// TODO: must also check that if any methods were added or removed, every exposed -// type in the package that implemented the interface in old still implements it in -// new. Otherwise external assignments could fail. -func (d *differ) checkCompatibleInterface(otn *types.TypeName, old, new *types.Interface) { - // Method sets are checked in checkCompatibleDefined. - - // Does the old interface have an unexported method? - if unexportedMethod(old) != nil { - d.checkMethodSet(otn, old, new, additionsCompatible) - } else { - // Perform an equivalence check, but with more information. - d.checkMethodSet(otn, old, new, additionsIncompatible) - if u := unexportedMethod(new); u != nil { - d.incompatible(otn, u.Name(), "added unexported method") - } - } -} - -// Return an unexported method from the method set of t, or nil if there are none. -func unexportedMethod(t *types.Interface) *types.Func { - for i := 0; i < t.NumMethods(); i++ { - if m := t.Method(i); !m.Exported() { - return m - } - } - return nil -} - -// We need to check three things for structs: -// 1. The set of exported fields must be compatible. This ensures that keyed struct -// literals continue to compile. (There is no compatibility guarantee for unkeyed -// struct literals.) -// 2. The set of exported *selectable* fields must be compatible. This includes the exported -// fields of all embedded structs. This ensures that selections continue to compile. -// 3. If the old struct is comparable, so must the new one be. This ensures that equality -// expressions and uses of struct values as map keys continue to compile. -// -// An unexported embedded struct can't appear in a struct literal outside the -// package, so it doesn't have to be present, or have the same name, in the new -// struct. -// -// Field tags are ignored: they have no compile-time implications. -func (d *differ) checkCompatibleStruct(obj types.Object, old, new *types.Struct) { - d.checkCompatibleObjectSets(obj, exportedFields(old), exportedFields(new)) - d.checkCompatibleObjectSets(obj, exportedSelectableFields(old), exportedSelectableFields(new)) - // Removing comparability from a struct is an incompatible change. - if types.Comparable(old) && !types.Comparable(new) { - d.incompatible(obj, "", "old is comparable, new is not") - } -} - -// exportedFields collects all the immediate fields of the struct that are exported. -// This is also the set of exported keys for keyed struct literals. -func exportedFields(s *types.Struct) map[string]types.Object { - m := map[string]types.Object{} - for i := 0; i < s.NumFields(); i++ { - f := s.Field(i) - if f.Exported() { - m[f.Name()] = f - } - } - return m -} - -// exportedSelectableFields collects all the exported fields of the struct, including -// exported fields of embedded structs. -// -// We traverse the struct breadth-first, because of the rule that a lower-depth field -// shadows one at a higher depth. -func exportedSelectableFields(s *types.Struct) map[string]types.Object { - var ( - m = map[string]types.Object{} - next []*types.Struct // embedded structs at the next depth - seen []*types.Struct // to handle recursive embedding - ) - for cur := []*types.Struct{s}; len(cur) > 0; cur, next = next, nil { - seen = append(seen, cur...) - // We only want to consider unambiguous fields. Ambiguous fields (where there - // is more than one field of the same name at the same level) are legal, but - // cannot be selected. - for name, f := range unambiguousFields(cur) { - // Record an exported field we haven't seen before. If we have seen it, - // it occurred a lower depth, so it shadows this field. - if f.Exported() && m[name] == nil { - m[name] = f - } - // Remember embedded structs for processing at the next depth, - // but only if we haven't seen the struct at this depth or above. - if !f.Anonymous() { - continue - } - t := f.Type().Underlying() - if p, ok := t.(*types.Pointer); ok { - t = p.Elem().Underlying() - } - if t, ok := t.(*types.Struct); ok && !contains(seen, t) { - next = append(next, t) - } - } - } - return m -} - -func contains(ts []*types.Struct, t *types.Struct) bool { - for _, s := range ts { - if types.Identical(s, t) { - return true - } - } - return false -} - -// Given a set of structs at the same depth, the unambiguous fields are the ones whose -// names appear exactly once. -func unambiguousFields(structs []*types.Struct) map[string]*types.Var { - fields := map[string]*types.Var{} - seen := map[string]bool{} - for _, s := range structs { - for i := 0; i < s.NumFields(); i++ { - f := s.Field(i) - name := f.Name() - if seen[name] { - delete(fields, name) - } else { - seen[name] = true - fields[name] = f - } - } - } - return fields -} - -// Anything removed or change from the old set is an incompatible change. -// Anything added to the new set is a compatible change. -func (d *differ) checkCompatibleObjectSets(obj types.Object, old, new map[string]types.Object) { - for name, oldo := range old { - newo := new[name] - if newo == nil { - d.incompatible(obj, name, "removed") - } else { - d.checkCorrespondence(obj, name, oldo.Type(), newo.Type()) - } - } - for name := range new { - if old[name] == nil { - d.compatible(obj, name, "added") - } - } -} - -func (d *differ) checkCompatibleDefined(otn *types.TypeName, old *types.Named, new types.Type) { - // We've already checked that old and new correspond. - d.checkCompatible(otn, old.Underlying(), new.Underlying()) - // If there are different kinds of types (e.g. struct and interface), don't bother checking - // the method sets. - if reflect.TypeOf(old.Underlying()) != reflect.TypeOf(new.Underlying()) { - return - } - // Interface method sets are checked in checkCompatibleInterface. - if types.IsInterface(old) { - return - } - - // A new method set is compatible with an old if the new exported methods are a superset of the old. - d.checkMethodSet(otn, old, new, additionsCompatible) - d.checkMethodSet(otn, types.NewPointer(old), types.NewPointer(new), additionsCompatible) -} - -const ( - additionsCompatible = true - additionsIncompatible = false -) - -func (d *differ) checkMethodSet(otn *types.TypeName, oldt, newt types.Type, addcompat bool) { - // TODO: find a way to use checkCompatibleObjectSets for this. - oldMethodSet := exportedMethods(oldt) - newMethodSet := exportedMethods(newt) - msname := otn.Name() - if _, ok := types.Unalias(oldt).(*types.Pointer); ok { - msname = "*" + msname - } - for name, oldMethod := range oldMethodSet { - newMethod := newMethodSet[name] - if newMethod == nil { - var part string - // Due to embedding, it's possible that the method's receiver type is not - // the same as the defined type whose method set we're looking at. So for - // a type T with removed method M that is embedded in some other type U, - // we will generate two "removed" messages for T.M, one for its own type - // T and one for the embedded type U. We want both messages to appear, - // but the messageSet dedup logic will allow only one message for a given - // object. So use the part string to distinguish them. - recv := oldMethod.Type().(*types.Signature).Recv() - if _, named := typesinternal.ReceiverNamed(recv); named.Obj() != otn { - part = fmt.Sprintf(", method set of %s", msname) - } - d.incompatible(oldMethod, part, "removed") - } else { - obj := oldMethod - // If a value method is changed to a pointer method and has a signature - // change, then we can get two messages for the same method definition: one - // for the value method set that says it's removed, and another for the - // pointer method set that says it changed. To keep both messages (since - // messageSet dedups), use newMethod for the second. (Slight hack.) - if !hasPointerReceiver(oldMethod) && hasPointerReceiver(newMethod) { - obj = newMethod - } - d.checkCorrespondence(obj, "", oldMethod.Type(), newMethod.Type()) - } - } - - // Check for added methods. - for name, newMethod := range newMethodSet { - if oldMethodSet[name] == nil { - if addcompat { - d.compatible(newMethod, "", "added") - } else { - d.incompatible(newMethod, "", "added") - } - } - } -} - -// exportedMethods collects all the exported methods of type's method set. -func exportedMethods(t types.Type) map[string]*types.Func { - m := make(map[string]*types.Func) - ms := types.NewMethodSet(t) - for i := 0; i < ms.Len(); i++ { - obj := ms.At(i).Obj().(*types.Func) - if obj.Exported() { - m[obj.Name()] = obj - } - } - return m -} - -func hasPointerReceiver(method *types.Func) bool { - isptr, _ := typesinternal.ReceiverNamed(method.Type().(*types.Signature).Recv()) - return isptr -} diff --git a/internal/apidiff/correspondence.go b/internal/apidiff/correspondence.go deleted file mode 100644 index a626e066430..00000000000 --- a/internal/apidiff/correspondence.go +++ /dev/null @@ -1,225 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package apidiff - -import ( - "go/types" - "sort" -) - -// Two types are correspond if they are identical except for defined types, -// which must correspond. -// -// Two defined types correspond if they can be interchanged in the old and new APIs, -// possibly after a renaming. -// -// This is not a pure function. If we come across named types while traversing, -// we establish correspondence. -func (d *differ) correspond(old, new types.Type) bool { - return d.corr(old, new, nil) -} - -// corr determines whether old and new correspond. The argument p is a list of -// known interface identities, to avoid infinite recursion. -// -// corr calls itself recursively as much as possible, to establish more -// correspondences and so check more of the API. E.g. if the new function has more -// parameters than the old, compare all the old ones before returning false. -// -// Compare this to the implementation of go/types.Identical. -func (d *differ) corr(old, new types.Type, p *ifacePair) bool { - // Structure copied from types.Identical. - old = types.Unalias(old) - new = types.Unalias(new) - switch old := old.(type) { - case *types.Basic: - return types.Identical(old, new) - - case *types.Array: - if new, ok := new.(*types.Array); ok { - return d.corr(old.Elem(), new.Elem(), p) && old.Len() == new.Len() - } - - case *types.Slice: - if new, ok := new.(*types.Slice); ok { - return d.corr(old.Elem(), new.Elem(), p) - } - - case *types.Map: - if new, ok := new.(*types.Map); ok { - return d.corr(old.Key(), new.Key(), p) && d.corr(old.Elem(), new.Elem(), p) - } - - case *types.Chan: - if new, ok := new.(*types.Chan); ok { - return d.corr(old.Elem(), new.Elem(), p) && old.Dir() == new.Dir() - } - - case *types.Pointer: - if new, ok := new.(*types.Pointer); ok { - return d.corr(old.Elem(), new.Elem(), p) - } - - case *types.Signature: - if new, ok := new.(*types.Signature); ok { - pe := d.corr(old.Params(), new.Params(), p) - re := d.corr(old.Results(), new.Results(), p) - return old.Variadic() == new.Variadic() && pe && re - } - - case *types.Tuple: - if new, ok := new.(*types.Tuple); ok { - for i := 0; i < old.Len(); i++ { - if i >= new.Len() || !d.corr(old.At(i).Type(), new.At(i).Type(), p) { - return false - } - } - return old.Len() == new.Len() - } - - case *types.Struct: - if new, ok := new.(*types.Struct); ok { - for i := 0; i < old.NumFields(); i++ { - if i >= new.NumFields() { - return false - } - of := old.Field(i) - nf := new.Field(i) - if of.Anonymous() != nf.Anonymous() || - old.Tag(i) != new.Tag(i) || - !d.corr(of.Type(), nf.Type(), p) || - !d.corrFieldNames(of, nf) { - return false - } - } - return old.NumFields() == new.NumFields() - } - - case *types.Interface: - if new, ok := new.(*types.Interface); ok { - // Deal with circularity. See the comment in types.Identical. - q := &ifacePair{old, new, p} - for p != nil { - if p.identical(q) { - return true // same pair was compared before - } - p = p.prev - } - oldms := d.sortedMethods(old) - newms := d.sortedMethods(new) - for i, om := range oldms { - if i >= len(newms) { - return false - } - nm := newms[i] - if d.methodID(om) != d.methodID(nm) || !d.corr(om.Type(), nm.Type(), q) { - return false - } - } - return old.NumMethods() == new.NumMethods() - } - - case *types.Named: - if new, ok := new.(*types.Named); ok { - return d.establishCorrespondence(old, new) - } - if new, ok := new.(*types.Basic); ok { - // Basic types are defined types, too, so we have to support them. - - return d.establishCorrespondence(old, new) - } - - default: - panic("unknown type kind") - } - return false -} - -// Compare old and new field names. We are determining correspondence across packages, -// so just compare names, not packages. For an unexported, embedded field of named -// type (non-named embedded fields are possible with aliases), we check that the type -// names correspond. We check the types for correspondence before this is called, so -// we've established correspondence. -func (d *differ) corrFieldNames(of, nf *types.Var) bool { - if of.Anonymous() && nf.Anonymous() && !of.Exported() && !nf.Exported() { - if on, ok := of.Type().(*types.Named); ok { - nn := nf.Type().(*types.Named) - return d.establishCorrespondence(on, nn) - } - } - return of.Name() == nf.Name() -} - -// Establish that old corresponds with new if it does not already -// correspond to something else. -func (d *differ) establishCorrespondence(old *types.Named, new types.Type) bool { - oldname := old.Obj() - oldc := d.correspondMap[oldname] - if oldc == nil { - // For now, assume the types don't correspond unless they are from the old - // and new packages, respectively. - // - // This is too conservative. For instance, - // [old] type A = q.B; [new] type A q.C - // could be OK if in package q, B is an alias for C. - // Or, using p as the name of the current old/new packages: - // [old] type A = q.B; [new] type A int - // could be OK if in q, - // [old] type B int; [new] type B = p.A - // In this case, p.A and q.B name the same type in both old and new worlds. - // Note that this case doesn't imply circular package imports: it's possible - // that in the old world, p imports q, but in the new, q imports p. - // - // However, if we didn't do something here, then we'd incorrectly allow cases - // like the first one above in which q.B is not an alias for q.C - // - // What we should do is check that the old type, in the new world's package - // of the same path, doesn't correspond to something other than the new type. - // That is a bit hard, because there is no easy way to find a new package - // matching an old one. - if newn, ok := new.(*types.Named); ok { - if old.Obj().Pkg() != d.old || newn.Obj().Pkg() != d.new { - return old.Obj().Id() == newn.Obj().Id() - } - } - // If there is no correspondence, create one. - d.correspondMap[oldname] = new - // Check that the corresponding types are compatible. - d.checkCompatibleDefined(oldname, old, new) - return true - } - return types.Identical(oldc, new) -} - -func (d *differ) sortedMethods(iface *types.Interface) []*types.Func { - ms := make([]*types.Func, iface.NumMethods()) - for i := 0; i < iface.NumMethods(); i++ { - ms[i] = iface.Method(i) - } - sort.Slice(ms, func(i, j int) bool { return d.methodID(ms[i]) < d.methodID(ms[j]) }) - return ms -} - -func (d *differ) methodID(m *types.Func) string { - // If the method belongs to one of the two packages being compared, use - // just its name even if it's unexported. That lets us treat unexported names - // from the old and new packages as equal. - if m.Pkg() == d.old || m.Pkg() == d.new { - return m.Name() - } - return m.Id() -} - -// Copied from the go/types package: - -// An ifacePair is a node in a stack of interface type pairs compared for identity. -type ifacePair struct { - x, y *types.Interface - prev *ifacePair -} - -func (p *ifacePair) identical(q *ifacePair) bool { - return p.x == q.x && p.y == q.y || p.x == q.y && p.y == q.x -} diff --git a/internal/apidiff/messageset.go b/internal/apidiff/messageset.go deleted file mode 100644 index 895e5f878a4..00000000000 --- a/internal/apidiff/messageset.go +++ /dev/null @@ -1,83 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// TODO: show that two-non-empty dotjoin can happen, by using an anon struct as a field type -// TODO: don't report removed/changed methods for both value and pointer method sets? - -package apidiff - -import ( - "fmt" - "go/types" - "sort" - "strings" -) - -// There can be at most one message for each object or part thereof. -// Parts include interface methods and struct fields. -// -// The part thing is necessary. Method (Func) objects have sufficient info, but field -// Vars do not: they just have a field name and a type, without the enclosing struct. -type messageSet map[types.Object]map[string]string - -// Add a message for obj and part, overwriting a previous message -// (shouldn't happen). -// obj is required but part can be empty. -func (m messageSet) add(obj types.Object, part, msg string) { - s := m[obj] - if s == nil { - s = map[string]string{} - m[obj] = s - } - if f, ok := s[part]; ok && f != msg { - fmt.Printf("! second, different message for obj %s, part %q\n", obj, part) - fmt.Printf(" first: %s\n", f) - fmt.Printf(" second: %s\n", msg) - } - s[part] = msg -} - -func (m messageSet) collect() []string { - var s []string - for obj, parts := range m { - // Format each object name relative to its own package. - objstring := objectString(obj) - for part, msg := range parts { - var p string - - if strings.HasPrefix(part, ",") { - p = objstring + part - } else { - p = dotjoin(objstring, part) - } - s = append(s, p+": "+msg) - } - } - sort.Strings(s) - return s -} - -func objectString(obj types.Object) string { - if f, ok := obj.(*types.Func); ok { - sig := f.Type().(*types.Signature) - if recv := sig.Recv(); recv != nil { - tn := types.TypeString(recv.Type(), types.RelativeTo(obj.Pkg())) - if tn[0] == '*' { - tn = "(" + tn + ")" - } - return fmt.Sprintf("%s.%s", tn, obj.Name()) - } - } - return obj.Name() -} - -func dotjoin(s1, s2 string) string { - if s1 == "" { - return s2 - } - if s2 == "" { - return s1 - } - return s1 + "." + s2 -} diff --git a/internal/apidiff/report.go b/internal/apidiff/report.go deleted file mode 100644 index c3f08a9d396..00000000000 --- a/internal/apidiff/report.go +++ /dev/null @@ -1,75 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package apidiff - -import ( - "bytes" - "fmt" - "io" -) - -// Report describes the changes detected by Changes. -type Report struct { - Changes []Change -} - -// A Change describes a single API change. -type Change struct { - Message string - Compatible bool -} - -func (r Report) messages(compatible bool) []string { - var msgs []string - for _, c := range r.Changes { - if c.Compatible == compatible { - msgs = append(msgs, c.Message) - } - } - return msgs -} - -func (r Report) String() string { - var buf bytes.Buffer - if err := r.Text(&buf); err != nil { - return fmt.Sprintf("!!%v", err) - } - return buf.String() -} - -func (r Report) Text(w io.Writer) error { - if err := r.TextIncompatible(w, true); err != nil { - return err - } - return r.TextCompatible(w) -} - -func (r Report) TextIncompatible(w io.Writer, withHeader bool) error { - if withHeader { - return r.writeMessages(w, "Incompatible changes:", r.messages(false)) - } - return r.writeMessages(w, "", r.messages(false)) -} - -func (r Report) TextCompatible(w io.Writer) error { - return r.writeMessages(w, "Compatible changes:", r.messages(true)) -} - -func (r Report) writeMessages(w io.Writer, header string, msgs []string) error { - if len(msgs) == 0 { - return nil - } - if header != "" { - if _, err := fmt.Fprintf(w, "%s\n", header); err != nil { - return err - } - } - for _, m := range msgs { - if _, err := fmt.Fprintf(w, "- %s\n", m); err != nil { - return err - } - } - return nil -} diff --git a/internal/apidiff/testdata/exported_fields/ef.go b/internal/apidiff/testdata/exported_fields/ef.go deleted file mode 100644 index 19da716c46d..00000000000 --- a/internal/apidiff/testdata/exported_fields/ef.go +++ /dev/null @@ -1,37 +0,0 @@ -package exported_fields - -// Used for testing exportedFields. -// Its exported fields are: -// A1 [1]int -// D bool -// E int -// F F -// S *S -type ( - S struct { - int - *embed2 - embed - E int // shadows embed.E - alias - A1 - *S - } - - A1 [1]int - - embed struct { - E string - } - - embed2 struct { - embed3 - F // shadows embed3.F - } - embed3 struct { - F bool - } - alias = struct{ D bool } - - F int -) diff --git a/internal/apidiff/testdata/tests.go b/internal/apidiff/testdata/tests.go deleted file mode 100644 index 567e6077758..00000000000 --- a/internal/apidiff/testdata/tests.go +++ /dev/null @@ -1,924 +0,0 @@ -// This file is split into two packages, old and new. -// It is syntactically valid Go so that gofmt can process it. -// -// If a comment begins with: Then: -// old write subsequent lines to the "old" package -// new write subsequent lines to the "new" package -// both write subsequent lines to both packages -// c expect a compatible error with the following text -// i expect an incompatible error with the following text -package ignore - -// both -import "io" - -//////////////// Basics - -//// Same type in both: OK. -// both -type A int - -//// Changing the type is an incompatible change. -// old -type B int - -// new -// i B: changed from int to string -type B string - -//// Adding a new type, whether alias or not, is a compatible change. -// new -// c AA: added -type AA = A - -// c B1: added -type B1 bool - -//// Change of type for an unexported name doesn't matter... -// old -type t int - -// new -type t string // OK: t isn't part of the API - -//// ...unless it is exposed. -// both -var V2 u - -// old -type u string - -// new -// i u: changed from string to int -type u int - -//// An exposed, unexported type can be renamed. -// both -type u2 int - -// old -type u1 int - -var V5 u1 - -// new -var V5 u2 // OK: V5 has changed type, but old u1 corresponds to new u2 - -//// Splitting a single type into two is an incompatible change. -// both -type u3 int - -// old -type ( - Split1 = u1 - Split2 = u1 -) - -// new -type ( - Split1 = u2 // OK, since old u1 corresponds to new u2 - - // This tries to make u1 correspond to u3 - // i Split2: changed from u1 to u3 - Split2 = u3 -) - -//// Merging two types into one is OK. -// old -type ( - GoodMerge1 = u2 - GoodMerge2 = u3 -) - -// new -type ( - GoodMerge1 = u3 - GoodMerge2 = u3 -) - -//// Merging isn't OK here because a method is lost. -// both -type u4 int - -func (u4) M() {} - -// old -type ( - BadMerge1 = u3 - BadMerge2 = u4 -) - -// new -type ( - BadMerge1 = u3 - // i u4.M: removed - // What's really happening here is that old u4 corresponds to new u3, - // and new u3's method set is not a superset of old u4's. - BadMerge2 = u3 -) - -// old -type Rem int - -// new -// i Rem: removed - -//////////////// Constants - -//// type changes -// old -const ( - C1 = 1 - C2 int = 2 - C3 = 3 - C4 u1 = 4 -) - -var V8 int - -// new -const ( - // i C1: changed from untyped int to untyped string - C1 = "1" - // i C2: changed from int to untyped int - C2 = -1 - // i C3: changed from untyped int to int - C3 int = 3 - // i V8: changed from var to const - V8 int = 1 - C4 u2 = 4 // OK: u1 corresponds to u2 -) - -// value change -// old -const ( - Cr1 = 1 - Cr2 = "2" - Cr3 = 3.5 - Cr4 = complex(0, 4.1) -) - -// new -const ( - // i Cr1: value changed from 1 to -1 - Cr1 = -1 - // i Cr2: value changed from "2" to "3" - Cr2 = "3" - // i Cr3: value changed from 3.5 to 3.8 - Cr3 = 3.8 - // i Cr4: value changed from (0 + 4.1i) to (4.1 + 0i) - Cr4 = complex(4.1, 0) -) - -//////////////// Variables - -//// simple type changes -// old -var ( - V1 string - V3 A - V7 <-chan int -) - -// new -var ( - // i V1: changed from string to []string - V1 []string - V3 A // OK: same - // i V7: changed from <-chan int to chan int - V7 chan int -) - -//// interface type changes -// old -var ( - V9 interface{ M() } - V10 interface{ M() } - V11 interface{ M() } -) - -// new -var ( - // i V9: changed from interface{M()} to interface{} - V9 interface{} - // i V10: changed from interface{M()} to interface{M(); M2()} - V10 interface { - M2() - M() - } - // i V11: changed from interface{M()} to interface{M(int)} - V11 interface{ M(int) } -) - -//// struct type changes -// old -var ( - VS1 struct{ A, B int } - VS2 struct{ A, B int } - VS3 struct{ A, B int } - VS4 struct { - A int - u1 - } -) - -// new -var ( - // i VS1: changed from struct{A int; B int} to struct{B int; A int} - VS1 struct{ B, A int } - // i VS2: changed from struct{A int; B int} to struct{A int} - VS2 struct{ A int } - // i VS3: changed from struct{A int; B int} to struct{A int; B int; C int} - VS3 struct{ A, B, C int } - VS4 struct { - A int - u2 - } -) - -//////////////// Types - -// old -const C5 = 3 - -type ( - A1 [1]int - A2 [2]int - A3 [C5]int -) - -// new -// i C5: value changed from 3 to 4 -const C5 = 4 - -type ( - A1 [1]int - // i A2: changed from [2]int to [2]bool - A2 [2]bool - // i A3: changed from [3]int to [4]int - A3 [C5]int -) - -// old -type ( - Sl []int - P1 *int - P2 *u1 -) - -// new -type ( - // i Sl: changed from []int to []string - Sl []string - // i P1: changed from *int to **bool - P1 **bool - P2 *u2 // OK: u1 corresponds to u2 -) - -// old -type Bc1 int32 -type Bc2 uint -type Bc3 float32 -type Bc4 complex64 - -// new -// c Bc1: changed from int32 to int -type Bc1 int - -// c Bc2: changed from uint to uint64 -type Bc2 uint64 - -// c Bc3: changed from float32 to float64 -type Bc3 float64 - -// c Bc4: changed from complex64 to complex128 -type Bc4 complex128 - -// old -type Bi1 int32 -type Bi2 uint -type Bi3 float64 -type Bi4 complex128 - -// new -// i Bi1: changed from int32 to int16 -type Bi1 int16 - -// i Bi2: changed from uint to uint32 -type Bi2 uint32 - -// i Bi3: changed from float64 to float32 -type Bi3 float32 - -// i Bi4: changed from complex128 to complex64 -type Bi4 complex64 - -// old -type ( - M1 map[string]int - M2 map[string]int - M3 map[string]int -) - -// new -type ( - M1 map[string]int - // i M2: changed from map[string]int to map[int]int - M2 map[int]int - // i M3: changed from map[string]int to map[string]string - M3 map[string]string -) - -// old -type ( - Ch1 chan int - Ch2 <-chan int - Ch3 chan int - Ch4 <-chan int -) - -// new -type ( - // i Ch1, element type: changed from int to bool - Ch1 chan bool - // i Ch2: changed direction - Ch2 chan<- int - // i Ch3: changed direction - Ch3 <-chan int - // c Ch4: removed direction - Ch4 chan int -) - -// old -type I1 interface { - M1() - M2() -} - -// new -type I1 interface { - // M1() - // i I1.M1: removed - M2(int) - // i I1.M2: changed from func() to func(int) - M3() - // i I1.M3: added - m() - // i I1.m: added unexported method -} - -// old -type I2 interface { - M1() - m() -} - -// new -type I2 interface { - M1() - // m() Removing an unexported method is OK. - m2() // OK, because old already had an unexported method - // c I2.M2: added - M2() -} - -// old -type I3 interface { - io.Reader - M() -} - -// new -// OK: what matters is the method set; the name of the embedded -// interface isn't important. -type I3 interface { - M() - Read([]byte) (int, error) -} - -// old -type I4 io.Writer - -// new -// OK: in both, I4 is a distinct type from io.Writer, and -// the old and new I4s have the same method set. -type I4 interface { - Write([]byte) (int, error) -} - -// old -type I5 = io.Writer - -// new -// i I5: changed from io.Writer to I5 -// In old, I5 and io.Writer are the same type; in new, -// they are different. That can break something like: -// var _ func(io.Writer) = func(pkg.I6) {} -type I5 io.Writer - -// old -type I6 interface{ Write([]byte) (int, error) } - -// new -// i I6: changed from I6 to io.Writer -// Similar to the above. -type I6 = io.Writer - -//// correspondence with a basic type -// Basic types are technically defined types, but they aren't -// represented that way in go/types, so the cases below are special. - -// both -type T1 int - -// old -var VT1 T1 - -// new -// i VT1: changed from T1 to int -// This fails because old T1 corresponds to both int and new T1. -var VT1 int - -// old -type t2 int - -var VT2 t2 - -// new -// OK: t2 corresponds to int. It's fine that old t2 -// doesn't exist in new. -var VT2 int - -// both -type t3 int - -func (t3) M() {} - -// old -var VT3 t3 - -// new -// i t3.M: removed -// Here the change from t3 to int is incompatible -// because old t3 has an exported method. -var VT3 int - -// old -var VT4 int - -// new -type t4 int - -// i VT4: changed from int to t4 -// This is incompatible because of code like -// VT4 + int(1) -// which works in old but fails in new. -// The difference from the above cases is that -// in those, we were merging two types into one; -// here, we are splitting int into t4 and int. -var VT4 t4 - -//////////////// Functions - -// old -func F1(a int, b string) map[u1]A { return nil } -func F2(int) {} -func F3(int) {} -func F4(int) int { return 0 } -func F5(int) int { return 0 } -func F6(int) {} -func F7(interface{}) {} - -// new -func F1(c int, d string) map[u2]AA { return nil } //OK: same (since u1 corresponds to u2) - -// i F2: changed from func(int) to func(int) bool -func F2(int) bool { return true } - -// i F3: changed from func(int) to func(int, int) -func F3(int, int) {} - -// i F4: changed from func(int) int to func(bool) int -func F4(bool) int { return 0 } - -// i F5: changed from func(int) int to func(int) string -func F5(int) string { return "" } - -// i F6: changed from func(int) to func(...int) -func F6(...int) {} - -// i F7: changed from func(interface{}) to func(interface{x()}) -func F7(a interface{ x() }) {} - -// old -func F8(bool) {} - -// new -// c F8: changed from func to var -var F8 func(bool) - -// old -var F9 func(int) - -// new -// i F9: changed from var to func -func F9(int) {} - -// both -// OK, even though new S1 is incompatible with old S1 (see below) -func F10(S1) {} - -//////////////// Structs - -// old -type S1 struct { - A int - B string - C bool - d float32 -} - -// new -type S1 = s1 - -type s1 struct { - C chan int - // i S1.C: changed from bool to chan int - A int - // i S1.B: removed - // i S1: old is comparable, new is not - x []int - d float32 - E bool - // c S1.E: added -} - -// old -type embed struct { - E string -} - -type S2 struct { - A int - embed -} - -// new -type embedx struct { - E string -} - -type S2 struct { - embedx // OK: the unexported embedded field changed names, but the exported field didn't - A int -} - -// both -type F int - -// old -type S3 struct { - A int - embed -} - -// new -type embed struct{ F int } - -type S3 struct { - // i S3.E: removed - embed - // c S3.F: added - A int -} - -// old -type embed2 struct { - embed3 - F // shadows embed3.F -} - -type embed3 struct { - F bool -} - -type alias = struct{ D bool } - -type S4 struct { - int - *embed2 - embed - E int // shadows embed.E - alias - A1 - *S4 -} - -// new -type S4 struct { - // OK: removed unexported fields - // D and F marked as added because they are now part of the immediate fields - D bool - // c S4.D: added - E int // OK: same as in old - F F - // c S4.F: added - A1 // OK: same - *S4 // OK: same (recursive embedding) -} - -//// Difference between exported selectable fields and exported immediate fields. -// both -type S5 struct{ A int } - -// old -// Exported immediate fields: A, S5 -// Exported selectable fields: A int, S5 S5 -type S6 struct { - S5 S5 - A int -} - -// new -// Exported immediate fields: S5 -// Exported selectable fields: A int, S5 S5. - -// i S6.A: removed -type S6 struct { - S5 -} - -//// Ambiguous fields can exist; they just can't be selected. -// both -type ( - embed7a struct{ E int } - embed7b struct{ E bool } -) - -// old -type S7 struct { // legal, but no selectable fields - embed7a - embed7b -} - -// new -type S7 struct { - embed7a - embed7b - // c S7.E: added - E string -} - -//////////////// Method sets - -// old -type SM struct { - embedm - Embedm -} - -func (SM) V1() {} -func (SM) V2() {} -func (SM) V3() {} -func (SM) V4() {} -func (SM) v() {} - -func (*SM) P1() {} -func (*SM) P2() {} -func (*SM) P3() {} -func (*SM) P4() {} -func (*SM) p() {} - -type embedm int - -func (embedm) EV1() {} -func (embedm) EV2() {} -func (embedm) EV3() {} -func (*embedm) EP1() {} -func (*embedm) EP2() {} -func (*embedm) EP3() {} - -type Embedm struct { - A int -} - -func (Embedm) FV() {} -func (*Embedm) FP() {} - -type RepeatEmbedm struct { - Embedm -} - -// new -type SM struct { - embedm2 - embedm3 - Embedm - // i SM.A: changed from int to bool -} - -// c SMa: added -type SMa = SM - -func (SM) V1() {} // OK: same - -// func (SM) V2() {} -// i SM.V2: removed - -// i SM.V3: changed from func() to func(int) -func (SM) V3(int) {} - -// c SM.V5: added -func (SM) V5() {} - -func (SM) v(int) {} // OK: unexported method change -func (SM) v2() {} // OK: unexported method added - -func (*SM) P1() {} // OK: same -//func (*SM) P2() {} -// i (*SM).P2: removed - -// i (*SM).P3: changed from func() to func(int) -func (*SMa) P3(int) {} - -// c (*SM).P5: added -func (*SM) P5() {} - -// func (*SM) p() {} // OK: unexported method removed - -// Changing from a value to a pointer receiver or vice versa -// just looks like adding and removing a method. - -// i SM.V4: removed -// i (*SM).V4: changed from func() to func(int) -func (*SM) V4(int) {} - -// c SM.P4: added -// P4 is not removed from (*SM) because value methods -// are in the pointer method set. -func (SM) P4() {} - -type embedm2 int - -// i embedm.EV1: changed from func() to func(int) -func (embedm2) EV1(int) {} - -// i embedm.EV2, method set of SM: removed -// i embedm.EV2, method set of *SM: removed - -// i (*embedm).EP2, method set of *SM: removed -func (*embedm2) EP1() {} - -type embedm3 int - -func (embedm3) EV3() {} // OK: compatible with old embedm.EV3 -func (*embedm3) EP3() {} // OK: compatible with old (*embedm).EP3 - -type Embedm struct { - // i Embedm.A: changed from int to bool - A bool -} - -// i Embedm.FV: changed from func() to func(int) -func (Embedm) FV(int) {} -func (*Embedm) FP() {} - -type RepeatEmbedm struct { - // i RepeatEmbedm.A: changed from int to bool - Embedm -} - -//////////////// Whole-package interface satisfaction - -// old -type WI1 interface { - M1() - m1() -} - -type WI2 interface { - M2() - m2() -} - -type WS1 int - -func (WS1) M1() {} -func (WS1) m1() {} - -type WS2 int - -func (WS2) M2() {} -func (WS2) m2() {} - -// new -type WI1 interface { - M1() - m() -} - -type WS1 int - -func (WS1) M1() {} - -// i WS1: no longer implements WI1 -//func (WS1) m1() {} - -type WI2 interface { - M2() - m2() - // i WS2: no longer implements WI2 - m3() -} - -type WS2 int - -func (WS2) M2() {} -func (WS2) m2() {} - -//////////////// Miscellany - -// This verifies that the code works even through -// multiple levels of unexported typed. - -// old -var Z w - -type w []x -type x []z -type z int - -// new -var Z w - -type w []x -type x []z - -// i z: changed from int to bool -type z bool - -// old -type H struct{} - -func (H) M() {} - -// new -// i H: changed from struct{} to interface{M()} -type H interface { - M() -} - -//// Splitting types - -//// OK: in both old and new, {J1, K1, L1} name the same type. -// old -type ( - J1 = K1 - K1 = L1 - L1 int -) - -// new -type ( - J1 = K1 - K1 int - L1 = J1 -) - -//// Old has one type, K2; new has J2 and K2. -// both -type K2 int - -// old -type J2 = K2 - -// new -// i K2: changed from K2 to K2 -type J2 K2 // old K2 corresponds with new J2 -// old K2 also corresponds with new K2: problem - -// both -type k3 int - -var Vj3 j3 // expose j3 - -// old -type j3 = k3 - -// new -// OK: k3 isn't exposed -type j3 k3 - -// both -type k4 int - -var Vj4 j4 // expose j4 -var VK4 k4 // expose k4 - -// old -type j4 = k4 - -// new -// i Vj4: changed from k4 to j4 -// e.g. p.Vj4 = p.Vk4 -type j4 k4 diff --git a/internal/astutil/comment.go b/internal/astutil/comment.go new file mode 100644 index 00000000000..192d6430de0 --- /dev/null +++ b/internal/astutil/comment.go @@ -0,0 +1,28 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package astutil + +import ( + "go/ast" + "strings" +) + +// Deprecation returns the paragraph of the doc comment that starts with the +// conventional "Deprecation: " marker, as defined by +// https://go.dev/wiki/Deprecated, or "" if the documented symbol is not +// deprecated. +func Deprecation(doc *ast.CommentGroup) string { + for _, p := range strings.Split(doc.Text(), "\n\n") { + // There is still some ambiguity for deprecation message. This function + // only returns the paragraph introduced by "Deprecated: ". More + // information related to the deprecation may follow in additional + // paragraphs, but the deprecation message should be able to stand on + // its own. See golang/go#38743. + if strings.HasPrefix(p, "Deprecated: ") { + return p + } + } + return "" +} diff --git a/internal/astutil/cursor/cursor.go b/internal/astutil/cursor/cursor.go index 9f0b906f1c2..1052f65acfb 100644 --- a/internal/astutil/cursor/cursor.go +++ b/internal/astutil/cursor/cursor.go @@ -15,6 +15,7 @@ package cursor import ( + "fmt" "go/ast" "go/token" "iter" @@ -22,6 +23,7 @@ import ( "slices" "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/astutil/edge" ) // A Cursor represents an [ast.Node]. It is immutable. @@ -157,13 +159,6 @@ func (c Cursor) Inspect(types []ast.Node, f func(c Cursor, push bool) (descend b // must be empty. // // Stack must not be called on the Root node. -// -// TODO(adonovan): perhaps this should be replaced by: -// -// func (Cursor) Ancestors(filter []ast.Node) iter.Seq[Cursor] -// -// returning a filtering iterator up the parent chain. -// This finesses the question of allocation entirely. func (c Cursor) Stack(stack []Cursor) []Cursor { if len(stack) > 0 { panic("stack is non-empty") @@ -172,14 +167,37 @@ func (c Cursor) Stack(stack []Cursor) []Cursor { panic("Cursor.Stack called on Root node") } - events := c.events() - for i := c.index; i >= 0; i = events[i].parent { - stack = append(stack, Cursor{c.in, i}) - } + stack = append(stack, c) + stack = slices.AppendSeq(stack, c.Ancestors()) slices.Reverse(stack) return stack } +// Ancestors returns an iterator over the ancestors of the current +// node, starting with [Cursor.Parent]. +// +// Ancestors must not be called on the Root node (whose [Cursor.Node] returns nil). +// +// The types argument, if non-empty, enables type-based filtering of +// events: the sequence includes only ancestors whose type matches an +// element of the types slice. +func (c Cursor) Ancestors(types ...ast.Node) iter.Seq[Cursor] { + if c.index < 0 { + panic("Cursor.Ancestors called on Root node") + } + + mask := maskOf(types) + + return func(yield func(Cursor) bool) { + events := c.events() + for i := events[c.index].parent; i >= 0; i = events[i].parent { + if events[i].typ&mask != 0 && !yield(Cursor{c.in, i}) { + break + } + } + } +} + // Parent returns the parent of the current node. // // Parent must not be called on the Root node (whose [Cursor.Node] returns nil). @@ -191,12 +209,61 @@ func (c Cursor) Parent() Cursor { return Cursor{c.in, c.events()[c.index].parent} } -// NextSibling returns the cursor for the next sibling node in the -// same list (for example, of files, decls, specs, statements, fields, -// or expressions) as the current node. It returns zero if the node is -// the last node in the list, or is not part of a list. +// Edge returns the identity of the field in the parent node +// that holds this cursor's node, and if it is a list, the index within it. +// +// For example, f(x, y) is a CallExpr whose three children are Idents. +// f has edge kind [edge.CallExpr_Fun] and index -1. +// x and y have kind [edge.CallExpr_Args] and indices 0 and 1, respectively. +// +// Edge must not be called on the Root node (whose [Cursor.Node] returns nil). +// +// If called on a child of the Root node, it returns ([edge.Invalid], -1). +func (c Cursor) Edge() (edge.Kind, int) { + if c.index < 0 { + panic("Cursor.Edge called on Root node") + } + events := c.events() + pop := events[c.index].index + return unpackEdgeKindAndIndex(events[pop].parent) +} + +// Child returns the cursor for n, which must be a direct child of c's Node. +// +// Child must not be called on the Root node (whose [Cursor.Node] returns nil). +func (c Cursor) Child(n ast.Node) Cursor { + if c.index < 0 { + panic("Cursor.Child called on Root node") + } + + if false { + // reference implementation + for child := range c.Children() { + if child.Node() == n { + return child + } + } + + } else { + // optimized implementation + events := c.events() + for i := c.index + 1; events[i].index > i; i = events[i].index + 1 { + if events[i].node == n { + return Cursor{c.in, i} + } + } + } + panic(fmt.Sprintf("Child(%T): not a child of %v", n, c)) +} + +// NextSibling returns the cursor for the next sibling node in the same list +// (for example, of files, decls, specs, statements, fields, or expressions) as +// the current node. It returns (zero, false) if the node is the last node in +// the list, or is not part of a list. // // NextSibling must not be called on the Root node. +// +// See note at [Cursor.Children]. func (c Cursor) NextSibling() (Cursor, bool) { if c.index < 0 { panic("Cursor.NextSibling called on Root node") @@ -218,6 +285,8 @@ func (c Cursor) NextSibling() (Cursor, bool) { // the first node in the list, or is not part of a list. // // It must not be called on the Root node. +// +// See note at [Cursor.Children]. func (c Cursor) PrevSibling() (Cursor, bool) { if c.index < 0 { panic("Cursor.PrevSibling called on Root node") @@ -266,6 +335,29 @@ func (c Cursor) LastChild() (Cursor, bool) { // Children returns an iterator over the direct children of the // current node, if any. +// +// When using Children, NextChild, and PrevChild, bear in mind that a +// Node's children may come from different fields, some of which may +// be lists of nodes without a distinguished intervening container +// such as [ast.BlockStmt]. +// +// For example, [ast.CaseClause] has a field List of expressions and a +// field Body of statements, so the children of a CaseClause are a mix +// of expressions and statements. Other nodes that have "uncontained" +// list fields include: +// +// - [ast.ValueSpec] (Names, Values) +// - [ast.CompositeLit] (Type, Elts) +// - [ast.IndexListExpr] (X, Indices) +// - [ast.CallExpr] (Fun, Args) +// - [ast.AssignStmt] (Lhs, Rhs) +// +// So, do not assume that the previous sibling of an ast.Stmt is also +// an ast.Stmt, or if it is, that they are executed sequentially, +// unless you have established that, say, its parent is a BlockStmt +// or its [Cursor.Edge] is [edge.BlockStmt_List]. +// For example, given "for S1; ; S2 {}", the predecessor of S2 is S1, +// even though they are not executed in sequence. func (c Cursor) Children() iter.Seq[Cursor] { return func(yield func(Cursor) bool) { c, ok := c.FirstChild() diff --git a/internal/astutil/cursor/cursor_test.go b/internal/astutil/cursor/cursor_test.go index e578fa300a6..01f791f2833 100644 --- a/internal/astutil/cursor/cursor_test.go +++ b/internal/astutil/cursor/cursor_test.go @@ -15,12 +15,14 @@ import ( "iter" "log" "path/filepath" + "reflect" "slices" "strings" "testing" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/internal/astutil/cursor" + "golang.org/x/tools/internal/astutil/edge" ) // net/http package @@ -126,6 +128,13 @@ func g() { for curFunc := range cursor.Root(inspect).Preorder(funcDecls...) { _ = curFunc.Node().(*ast.FuncDecl) + + // Check edge and index. + if e, idx := curFunc.Edge(); e != edge.File_Decls || idx != nfuncs { + t.Errorf("%v.Edge() = (%v, %v), want edge.File_Decls, %d", + curFunc, e, idx, nfuncs) + } + nfuncs++ stack := curFunc.Stack(nil) @@ -152,6 +161,13 @@ func g() { if got, want := fmt.Sprint(stack), "[*ast.File *ast.FuncDecl *ast.BlockStmt *ast.ExprStmt *ast.CallExpr]"; got != want { t.Errorf("curCall.Stack() = %q, want %q", got, want) } + + // Ancestors = Reverse(Stack[:last]). + stack = stack[:len(stack)-1] + slices.Reverse(stack) + if got, want := slices.Collect(curCall.Ancestors()), stack; !reflect.DeepEqual(got, want) { + t.Errorf("Ancestors = %v, Reverse(Stack - last element) = %v", got, want) + } } // nested Inspect traversal @@ -320,6 +336,57 @@ func TestCursor_FindNode(t *testing.T) { // TODO(adonovan): FindPos needs a test (not just a benchmark). } +func TestCursor_Edge(t *testing.T) { + root := cursor.Root(netInspect) + for cur := range root.Preorder() { + if cur == root { + continue // root node + } + + e, idx := cur.Edge() + parent := cur.Parent() + + // ast.File, child of root? + if parent.Node() == nil { + if e != edge.Invalid || idx != -1 { + t.Errorf("%v.Edge = (%v, %d), want (Invalid, -1)", cur, e, idx) + } + continue + } + + // Check Edge.NodeType matches type of Parent.Node. + if e.NodeType() != reflect.TypeOf(parent.Node()) { + t.Errorf("Edge.NodeType = %v, Parent.Node has type %T", + e.NodeType(), parent.Node()) + } + + // Check consistency of c.Edge.Get(c.Parent().Node()) == c.Node(). + if got := e.Get(parent.Node(), idx); got != cur.Node() { + t.Errorf("cur=%v@%s: %s.Get(cur.Parent().Node(), %d) = %T@%s, want cur.Node()", + cur, netFset.Position(cur.Node().Pos()), e, idx, got, netFset.Position(got.Pos())) + } + + // Check that reflection on the parent finds the current node. + fv := reflect.ValueOf(parent.Node()).Elem().FieldByName(e.FieldName()) + if idx >= 0 { + fv = fv.Index(idx) // element of []ast.Node + } + if fv.Kind() == reflect.Interface { + fv = fv.Elem() // e.g. ast.Expr -> *ast.Ident + } + got := fv.Interface().(ast.Node) + if got != cur.Node() { + t.Errorf("%v.Edge = (%v, %d); FieldName/Index reflection gave %T@%s, not original node", + cur, e, idx, got, netFset.Position(got.Pos())) + } + + // Check that Cursor.Child is the reverse of Parent. + if cur.Parent().Child(cur.Node()) != cur { + t.Errorf("Cursor.Parent.Child = %v, want %v", cur.Parent().Child(cur.Node()), cur) + } + } +} + func is[T any](x any) bool { _, ok := x.(T) return ok @@ -381,6 +448,15 @@ func BenchmarkInspectCalls(b *testing.B) { // And if the calls to Stack are very selective, // or are replaced by 2 calls to Parent, it runs // 27% faster than WithStack. + // + // But the purpose of inspect.WithStack is not to obtain the + // stack on every node, but to perform a traversal in which it + // one as the _option_ to access the stack if it should be + // needed, but the need is rare and usually only for a small + // portion. Arguably, because Cursor traversals always + // provide, at no extra cost, the option to access the + // complete stack, the right comparison is the plain Cursor + // benchmark below. b.Run("CursorStack", func(b *testing.B) { var ncalls int for range b.N { @@ -392,6 +468,28 @@ func BenchmarkInspectCalls(b *testing.B) { } } }) + + b.Run("Cursor", func(b *testing.B) { + var ncalls int + for range b.N { + for cur := range cursor.Root(inspect).Preorder(callExprs...) { + _ = cur.Node().(*ast.CallExpr) + ncalls++ + } + } + }) + + b.Run("CursorAncestors", func(b *testing.B) { + var ncalls int + for range b.N { + for cur := range cursor.Root(inspect).Preorder(callExprs...) { + _ = cur.Node().(*ast.CallExpr) + for range cur.Ancestors() { + } + ncalls++ + } + } + }) } // This benchmark compares methods for finding a known node in a tree. diff --git a/internal/astutil/cursor/hooks.go b/internal/astutil/cursor/hooks.go index 47aaaae37e0..8b61f5ddc11 100644 --- a/internal/astutil/cursor/hooks.go +++ b/internal/astutil/cursor/hooks.go @@ -11,6 +11,7 @@ import ( _ "unsafe" // for go:linkname "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/astutil/edge" ) // This file defines backdoor access to inspector. @@ -21,7 +22,7 @@ type event struct { node ast.Node typ uint64 // typeOf(node) on push event, or union of typ strictly between push and pop events on pop events index int32 // index of corresponding push or pop event (relative to this event's index, +ve=push, -ve=pop) - parent int32 // index of parent's push node (defined for push nodes only) + parent int32 // index of parent's push node (push nodes only); or edge and index, bit packed (pop nodes only) } //go:linkname maskOf golang.org/x/tools/go/ast/inspector.maskOf @@ -30,4 +31,7 @@ func maskOf(nodes []ast.Node) uint64 //go:linkname events golang.org/x/tools/go/ast/inspector.events func events(in *inspector.Inspector) []event +//go:linkname unpackEdgeKindAndIndex golang.org/x/tools/go/ast/inspector.unpackEdgeKindAndIndex +func unpackEdgeKindAndIndex(int32) (edge.Kind, int) + func (c Cursor) events() []event { return events(c.in) } diff --git a/internal/astutil/edge/edge.go b/internal/astutil/edge/edge.go new file mode 100644 index 00000000000..4f6ccfd6e5e --- /dev/null +++ b/internal/astutil/edge/edge.go @@ -0,0 +1,295 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package edge defines identifiers for each field of an ast.Node +// struct type that refers to another Node. +package edge + +import ( + "fmt" + "go/ast" + "reflect" +) + +// A Kind describes a field of an ast.Node struct. +type Kind uint8 + +// String returns a description of the edge kind. +func (k Kind) String() string { + if k == Invalid { + return "" + } + info := fieldInfos[k] + return fmt.Sprintf("%v.%s", info.nodeType.Elem().Name(), info.name) +} + +// NodeType returns the pointer-to-struct type of the ast.Node implementation. +func (k Kind) NodeType() reflect.Type { return fieldInfos[k].nodeType } + +// FieldName returns the name of the field. +func (k Kind) FieldName() string { return fieldInfos[k].name } + +// FieldType returns the declared type of the field. +func (k Kind) FieldType() reflect.Type { return fieldInfos[k].fieldType } + +// Get returns the direct child of n identified by (k, idx). +// n's type must match k.NodeType(). +// idx must be a valid slice index, or -1 for a non-slice. +func (k Kind) Get(n ast.Node, idx int) ast.Node { + if k.NodeType() != reflect.TypeOf(n) { + panic(fmt.Sprintf("%v.Get(%T): invalid node type", k, n)) + } + v := reflect.ValueOf(n).Elem().Field(fieldInfos[k].index) + if idx != -1 { + v = v.Index(idx) // asserts valid index + } else { + // (The type assertion below asserts that v is not a slice.) + } + return v.Interface().(ast.Node) // may be nil +} + +const ( + Invalid Kind = iota // for nodes at the root of the traversal + + // Kinds are sorted alphabetically. + // Numbering is not stable. + // Each is named Type_Field, where Type is the + // ast.Node struct type and Field is the name of the field + + ArrayType_Elt + ArrayType_Len + AssignStmt_Lhs + AssignStmt_Rhs + BinaryExpr_X + BinaryExpr_Y + BlockStmt_List + BranchStmt_Label + CallExpr_Args + CallExpr_Fun + CaseClause_Body + CaseClause_List + ChanType_Value + CommClause_Body + CommClause_Comm + CommentGroup_List + CompositeLit_Elts + CompositeLit_Type + DeclStmt_Decl + DeferStmt_Call + Ellipsis_Elt + ExprStmt_X + FieldList_List + Field_Comment + Field_Doc + Field_Names + Field_Tag + Field_Type + File_Decls + File_Doc + File_Name + ForStmt_Body + ForStmt_Cond + ForStmt_Init + ForStmt_Post + FuncDecl_Body + FuncDecl_Doc + FuncDecl_Name + FuncDecl_Recv + FuncDecl_Type + FuncLit_Body + FuncLit_Type + FuncType_Params + FuncType_Results + FuncType_TypeParams + GenDecl_Doc + GenDecl_Specs + GoStmt_Call + IfStmt_Body + IfStmt_Cond + IfStmt_Else + IfStmt_Init + ImportSpec_Comment + ImportSpec_Doc + ImportSpec_Name + ImportSpec_Path + IncDecStmt_X + IndexExpr_Index + IndexExpr_X + IndexListExpr_Indices + IndexListExpr_X + InterfaceType_Methods + KeyValueExpr_Key + KeyValueExpr_Value + LabeledStmt_Label + LabeledStmt_Stmt + MapType_Key + MapType_Value + ParenExpr_X + RangeStmt_Body + RangeStmt_Key + RangeStmt_Value + RangeStmt_X + ReturnStmt_Results + SelectStmt_Body + SelectorExpr_Sel + SelectorExpr_X + SendStmt_Chan + SendStmt_Value + SliceExpr_High + SliceExpr_Low + SliceExpr_Max + SliceExpr_X + StarExpr_X + StructType_Fields + SwitchStmt_Body + SwitchStmt_Init + SwitchStmt_Tag + TypeAssertExpr_Type + TypeAssertExpr_X + TypeSpec_Comment + TypeSpec_Doc + TypeSpec_Name + TypeSpec_Type + TypeSpec_TypeParams + TypeSwitchStmt_Assign + TypeSwitchStmt_Body + TypeSwitchStmt_Init + UnaryExpr_X + ValueSpec_Comment + ValueSpec_Doc + ValueSpec_Names + ValueSpec_Type + ValueSpec_Values + + maxKind +) + +// Assert that the encoding fits in 7 bits, +// as the inspector relies on this. +// (We are currently at 104.) +var _ = [1 << 7]struct{}{}[maxKind] + +type fieldInfo struct { + nodeType reflect.Type // pointer-to-struct type of ast.Node implementation + name string + index int + fieldType reflect.Type +} + +func info[N ast.Node](fieldName string) fieldInfo { + nodePtrType := reflect.TypeFor[N]() + f, ok := nodePtrType.Elem().FieldByName(fieldName) + if !ok { + panic(fieldName) + } + return fieldInfo{nodePtrType, fieldName, f.Index[0], f.Type} +} + +var fieldInfos = [...]fieldInfo{ + Invalid: {}, + ArrayType_Elt: info[*ast.ArrayType]("Elt"), + ArrayType_Len: info[*ast.ArrayType]("Len"), + AssignStmt_Lhs: info[*ast.AssignStmt]("Lhs"), + AssignStmt_Rhs: info[*ast.AssignStmt]("Rhs"), + BinaryExpr_X: info[*ast.BinaryExpr]("X"), + BinaryExpr_Y: info[*ast.BinaryExpr]("Y"), + BlockStmt_List: info[*ast.BlockStmt]("List"), + BranchStmt_Label: info[*ast.BranchStmt]("Label"), + CallExpr_Args: info[*ast.CallExpr]("Args"), + CallExpr_Fun: info[*ast.CallExpr]("Fun"), + CaseClause_Body: info[*ast.CaseClause]("Body"), + CaseClause_List: info[*ast.CaseClause]("List"), + ChanType_Value: info[*ast.ChanType]("Value"), + CommClause_Body: info[*ast.CommClause]("Body"), + CommClause_Comm: info[*ast.CommClause]("Comm"), + CommentGroup_List: info[*ast.CommentGroup]("List"), + CompositeLit_Elts: info[*ast.CompositeLit]("Elts"), + CompositeLit_Type: info[*ast.CompositeLit]("Type"), + DeclStmt_Decl: info[*ast.DeclStmt]("Decl"), + DeferStmt_Call: info[*ast.DeferStmt]("Call"), + Ellipsis_Elt: info[*ast.Ellipsis]("Elt"), + ExprStmt_X: info[*ast.ExprStmt]("X"), + FieldList_List: info[*ast.FieldList]("List"), + Field_Comment: info[*ast.Field]("Comment"), + Field_Doc: info[*ast.Field]("Doc"), + Field_Names: info[*ast.Field]("Names"), + Field_Tag: info[*ast.Field]("Tag"), + Field_Type: info[*ast.Field]("Type"), + File_Decls: info[*ast.File]("Decls"), + File_Doc: info[*ast.File]("Doc"), + File_Name: info[*ast.File]("Name"), + ForStmt_Body: info[*ast.ForStmt]("Body"), + ForStmt_Cond: info[*ast.ForStmt]("Cond"), + ForStmt_Init: info[*ast.ForStmt]("Init"), + ForStmt_Post: info[*ast.ForStmt]("Post"), + FuncDecl_Body: info[*ast.FuncDecl]("Body"), + FuncDecl_Doc: info[*ast.FuncDecl]("Doc"), + FuncDecl_Name: info[*ast.FuncDecl]("Name"), + FuncDecl_Recv: info[*ast.FuncDecl]("Recv"), + FuncDecl_Type: info[*ast.FuncDecl]("Type"), + FuncLit_Body: info[*ast.FuncLit]("Body"), + FuncLit_Type: info[*ast.FuncLit]("Type"), + FuncType_Params: info[*ast.FuncType]("Params"), + FuncType_Results: info[*ast.FuncType]("Results"), + FuncType_TypeParams: info[*ast.FuncType]("TypeParams"), + GenDecl_Doc: info[*ast.GenDecl]("Doc"), + GenDecl_Specs: info[*ast.GenDecl]("Specs"), + GoStmt_Call: info[*ast.GoStmt]("Call"), + IfStmt_Body: info[*ast.IfStmt]("Body"), + IfStmt_Cond: info[*ast.IfStmt]("Cond"), + IfStmt_Else: info[*ast.IfStmt]("Else"), + IfStmt_Init: info[*ast.IfStmt]("Init"), + ImportSpec_Comment: info[*ast.ImportSpec]("Comment"), + ImportSpec_Doc: info[*ast.ImportSpec]("Doc"), + ImportSpec_Name: info[*ast.ImportSpec]("Name"), + ImportSpec_Path: info[*ast.ImportSpec]("Path"), + IncDecStmt_X: info[*ast.IncDecStmt]("X"), + IndexExpr_Index: info[*ast.IndexExpr]("Index"), + IndexExpr_X: info[*ast.IndexExpr]("X"), + IndexListExpr_Indices: info[*ast.IndexListExpr]("Indices"), + IndexListExpr_X: info[*ast.IndexListExpr]("X"), + InterfaceType_Methods: info[*ast.InterfaceType]("Methods"), + KeyValueExpr_Key: info[*ast.KeyValueExpr]("Key"), + KeyValueExpr_Value: info[*ast.KeyValueExpr]("Value"), + LabeledStmt_Label: info[*ast.LabeledStmt]("Label"), + LabeledStmt_Stmt: info[*ast.LabeledStmt]("Stmt"), + MapType_Key: info[*ast.MapType]("Key"), + MapType_Value: info[*ast.MapType]("Value"), + ParenExpr_X: info[*ast.ParenExpr]("X"), + RangeStmt_Body: info[*ast.RangeStmt]("Body"), + RangeStmt_Key: info[*ast.RangeStmt]("Key"), + RangeStmt_Value: info[*ast.RangeStmt]("Value"), + RangeStmt_X: info[*ast.RangeStmt]("X"), + ReturnStmt_Results: info[*ast.ReturnStmt]("Results"), + SelectStmt_Body: info[*ast.SelectStmt]("Body"), + SelectorExpr_Sel: info[*ast.SelectorExpr]("Sel"), + SelectorExpr_X: info[*ast.SelectorExpr]("X"), + SendStmt_Chan: info[*ast.SendStmt]("Chan"), + SendStmt_Value: info[*ast.SendStmt]("Value"), + SliceExpr_High: info[*ast.SliceExpr]("High"), + SliceExpr_Low: info[*ast.SliceExpr]("Low"), + SliceExpr_Max: info[*ast.SliceExpr]("Max"), + SliceExpr_X: info[*ast.SliceExpr]("X"), + StarExpr_X: info[*ast.StarExpr]("X"), + StructType_Fields: info[*ast.StructType]("Fields"), + SwitchStmt_Body: info[*ast.SwitchStmt]("Body"), + SwitchStmt_Init: info[*ast.SwitchStmt]("Init"), + SwitchStmt_Tag: info[*ast.SwitchStmt]("Tag"), + TypeAssertExpr_Type: info[*ast.TypeAssertExpr]("Type"), + TypeAssertExpr_X: info[*ast.TypeAssertExpr]("X"), + TypeSpec_Comment: info[*ast.TypeSpec]("Comment"), + TypeSpec_Doc: info[*ast.TypeSpec]("Doc"), + TypeSpec_Name: info[*ast.TypeSpec]("Name"), + TypeSpec_Type: info[*ast.TypeSpec]("Type"), + TypeSpec_TypeParams: info[*ast.TypeSpec]("TypeParams"), + TypeSwitchStmt_Assign: info[*ast.TypeSwitchStmt]("Assign"), + TypeSwitchStmt_Body: info[*ast.TypeSwitchStmt]("Body"), + TypeSwitchStmt_Init: info[*ast.TypeSwitchStmt]("Init"), + UnaryExpr_X: info[*ast.UnaryExpr]("X"), + ValueSpec_Comment: info[*ast.ValueSpec]("Comment"), + ValueSpec_Doc: info[*ast.ValueSpec]("Doc"), + ValueSpec_Names: info[*ast.ValueSpec]("Names"), + ValueSpec_Type: info[*ast.ValueSpec]("Type"), + ValueSpec_Values: info[*ast.ValueSpec]("Values"), +} diff --git a/internal/astutil/util.go b/internal/astutil/util.go new file mode 100644 index 00000000000..849d45d8539 --- /dev/null +++ b/internal/astutil/util.go @@ -0,0 +1,59 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package astutil + +import ( + "fmt" + "go/ast" + "go/token" + "strconv" + "unicode/utf8" +) + +// RangeInStringLiteral calculates the positional range within a string literal +// corresponding to the specified start and end byte offsets within the logical string. +func RangeInStringLiteral(lit *ast.BasicLit, start, end int) (token.Pos, token.Pos, error) { + startPos, err := PosInStringLiteral(lit, start) + if err != nil { + return 0, 0, fmt.Errorf("start: %v", err) + } + endPos, err := PosInStringLiteral(lit, end) + if err != nil { + return 0, 0, fmt.Errorf("end: %v", err) + } + return startPos, endPos, nil +} + +// PosInStringLiteral returns the position within a string literal +// corresponding to the specified byte offset within the logical +// string that it denotes. +func PosInStringLiteral(lit *ast.BasicLit, offset int) (token.Pos, error) { + raw := lit.Value + + value, err := strconv.Unquote(raw) + if err != nil { + return 0, err + } + if !(0 <= offset && offset <= len(value)) { + return 0, fmt.Errorf("invalid offset") + } + + // remove quotes + quote := raw[0] // '"' or '`' + raw = raw[1 : len(raw)-1] + + var ( + i = 0 // byte index within logical value + pos = lit.ValuePos + 1 // position within literal + ) + for raw != "" && i < offset { + r, _, rest, _ := strconv.UnquoteChar(raw, quote) // can't fail + sz := len(raw) - len(rest) // length of literal char in raw bytes + pos += token.Pos(sz) + raw = raw[sz:] + i += utf8.RuneLen(r) + } + return pos, nil +} diff --git a/internal/diff/merge.go b/internal/diff/merge.go new file mode 100644 index 00000000000..eeae98adf76 --- /dev/null +++ b/internal/diff/merge.go @@ -0,0 +1,81 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package diff + +import ( + "slices" +) + +// Merge merges two valid, ordered lists of edits. +// It returns zero if there was a conflict. +// +// If corresponding edits in x and y are identical, +// they are coalesced in the result. +// +// If x and y both provide different insertions at the same point, +// the insertions from x will be first in the result. +// +// TODO(adonovan): this algorithm could be improved, for example by +// working harder to coalesce non-identical edits that share a common +// deletion or common prefix of insertion (see the tests). +// Survey the academic literature for insights. +func Merge(x, y []Edit) ([]Edit, bool) { + // Make a defensive (premature) copy of the arrays. + x = slices.Clone(x) + y = slices.Clone(y) + + var merged []Edit + add := func(edit Edit) { + merged = append(merged, edit) + } + var xi, yi int + for xi < len(x) && yi < len(y) { + px := &x[xi] + py := &y[yi] + + if *px == *py { + // x and y are identical: coalesce. + add(*px) + xi++ + yi++ + + } else if px.End <= py.Start { + // x is entirely before y, + // or an insertion at start of y. + add(*px) + xi++ + + } else if py.End <= px.Start { + // y is entirely before x, + // or an insertion at start of x. + add(*py) + yi++ + + } else if px.Start < py.Start { + // x is partly before y: + // split it into a deletion and an edit. + add(Edit{px.Start, py.Start, ""}) + px.Start = py.Start + + } else if py.Start < px.Start { + // y is partly before x: + // split it into a deletion and an edit. + add(Edit{py.Start, px.Start, ""}) + py.Start = px.Start + + } else { + // x and y are unequal non-insertions + // at the same point: conflict. + return nil, false + } + } + for ; xi < len(x); xi++ { + add(x[xi]) + } + for ; yi < len(y); yi++ { + add(y[yi]) + } + return merged, true +} diff --git a/internal/diff/merge_test.go b/internal/diff/merge_test.go new file mode 100644 index 00000000000..637a13abd46 --- /dev/null +++ b/internal/diff/merge_test.go @@ -0,0 +1,65 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package diff_test + +import ( + "testing" + + "golang.org/x/tools/internal/diff" +) + +func TestMerge(t *testing.T) { + // For convenience, we test Merge using strings, not sequences + // of edits, though this does put us at the mercy of the diff + // algorithm. + for _, test := range []struct { + base, x, y string + want string // "!" => conflict + }{ + // independent insertions + {"abcdef", "abXcdef", "abcdeYf", "abXcdeYf"}, + // independent deletions + {"abcdef", "acdef", "abcdf", "acdf"}, + // colocated insertions (X first) + {"abcdef", "abcXdef", "abcYdef", "abcXYdef"}, + // colocated identical insertions (coalesced) + {"abcdef", "abcXdef", "abcXdef", "abcXdef"}, + // colocated insertions with common prefix (X first) + // TODO(adonovan): would "abcXYdef" be better? + // i.e. should we dissect the insertions? + {"abcdef", "abcXdef", "abcXYdef", "abcXXYdef"}, + // mix of identical and independent insertions (X first) + {"abcdef", "aIbcdXef", "aIbcdYef", "aIbcdXYef"}, + // independent deletions + {"abcdef", "def", "abc", ""}, + // overlapping deletions: conflict + {"abcdef", "adef", "abef", "!"}, + // overlapping deletions with distinct insertions, X first + {"abcdef", "abXef", "abcYf", "!"}, + // overlapping deletions with distinct insertions, Y first + {"abcdef", "abcXf", "abYef", "!"}, + // overlapping deletions with common insertions + {"abcdef", "abXef", "abcXf", "!"}, + // trailing insertions in X (observe X bias) + {"abcdef", "aXbXcXdXeXfX", "aYbcdef", "aXYbXcXdXeXfX"}, + // trailing insertions in Y (observe X bias) + {"abcdef", "aXbcdef", "aYbYcYdYeYfY", "aXYbYcYdYeYfY"}, + } { + dx := diff.Strings(test.base, test.x) + dy := diff.Strings(test.base, test.y) + got := "!" // conflict + if dz, ok := diff.Merge(dx, dy); ok { + var err error + got, err = diff.Apply(test.base, dz) + if err != nil { + t.Errorf("Merge(%q, %q, %q) produced invalid edits %v: %v", test.base, test.x, test.y, dz, err) + continue + } + } + if test.want != got { + t.Errorf("base=%q x=%q y=%q: got %q, want %q", test.base, test.x, test.y, got, test.want) + } + } +} diff --git a/internal/event/export/metric/info.go b/internal/event/export/metric/info.go index a178343b2ef..5662fbeaef6 100644 --- a/internal/event/export/metric/info.go +++ b/internal/event/export/metric/info.go @@ -31,7 +31,7 @@ type HistogramInt64 struct { Buckets []int64 } -// HistogramFloat64 represents the construction information for an float64 histogram metric. +// HistogramFloat64 represents the construction information for a float64 histogram metric. type HistogramFloat64 struct { // Name is the unique name of this metric. Name string diff --git a/internal/expect/extract.go b/internal/expect/extract.go index db6b66aaf21..1fb4349c48e 100644 --- a/internal/expect/extract.go +++ b/internal/expect/extract.go @@ -21,7 +21,7 @@ import ( const commentStart = "@" const commentStartLen = len(commentStart) -// Identifier is the type for an identifier in an Note argument list. +// Identifier is the type for an identifier in a Note argument list. type Identifier string // Parse collects all the notes present in a file. diff --git a/internal/fmtstr/main.go b/internal/fmtstr/main.go new file mode 100644 index 00000000000..7fcbfdbbf2c --- /dev/null +++ b/internal/fmtstr/main.go @@ -0,0 +1,94 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build ignore + +// The fmtstr command parses the format strings of calls to selected +// printf-like functions in the specified source file, and prints the +// formatting operations and their operands. +// +// It is intended only for debugging and is not a supported interface. +package main + +import ( + "flag" + "fmt" + "go/ast" + "go/parser" + "go/printer" + "go/token" + "log" + "strconv" + "strings" + + "golang.org/x/tools/internal/fmtstr" +) + +func main() { + log.SetPrefix("fmtstr: ") + log.SetFlags(0) + flag.Parse() + + fset := token.NewFileSet() + f, err := parser.ParseFile(fset, flag.Args()[0], nil, 0) + if err != nil { + log.Fatal(err) + } + + functions := map[string]int{ + "fmt.Errorf": 0, + "fmt.Fprintf": 1, + "fmt.Printf": 0, + "fmt.Sprintf": 0, + "log.Printf": 0, + } + + ast.Inspect(f, func(n ast.Node) bool { + if call, ok := n.(*ast.CallExpr); ok && !call.Ellipsis.IsValid() { + if sel, ok := call.Fun.(*ast.SelectorExpr); ok && is[*ast.Ident](sel.X) { + name := sel.X.(*ast.Ident).Name + "." + sel.Sel.Name // e.g. "fmt.Printf" + if fmtstrIndex, ok := functions[name]; ok && + len(call.Args) > fmtstrIndex { + // Is it a string literal? + if fmtstrArg, ok := call.Args[fmtstrIndex].(*ast.BasicLit); ok && + fmtstrArg.Kind == token.STRING { + // Have fmt.Printf("format", ...) + format, _ := strconv.Unquote(fmtstrArg.Value) + + ops, err := fmtstr.Parse(format, 0) + if err != nil { + log.Printf("%s: %v", fset.Position(fmtstrArg.Pos()), err) + return true + } + + fmt.Printf("%s: %s(%s, ...)\n", + fset.Position(fmtstrArg.Pos()), + name, + fmtstrArg.Value) + for _, op := range ops { + // TODO(adonovan): show more detail. + fmt.Printf("\t%q\t%v\n", + op.Text, + formatNode(fset, call.Args[op.Verb.ArgIndex])) + } + } + } + } + } + return true + }) +} + +func is[T any](x any) bool { + _, ok := x.(T) + return ok +} + +func formatNode(fset *token.FileSet, n ast.Node) string { + var buf strings.Builder + if err := printer.Fprint(&buf, fset, n); err != nil { + return "" + } + return buf.String() +} diff --git a/internal/fmtstr/parse.go b/internal/fmtstr/parse.go new file mode 100644 index 00000000000..9ab264f45d6 --- /dev/null +++ b/internal/fmtstr/parse.go @@ -0,0 +1,370 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package fmtstr defines a parser for format strings as used by [fmt.Printf]. +package fmtstr + +import ( + "fmt" + "strconv" + "strings" + "unicode/utf8" +) + +// Operation holds the parsed representation of a printf operation such as "%3.*[4]d". +// It is constructed by [Parse]. +type Operation struct { + Text string // full text of the operation, e.g. "%[2]*.3d" + Verb Verb // verb specifier, guaranteed to exist, e.g., 'd' in '%[1]d' + Range Range // the range of Text within the overall format string + Flags string // formatting flags, e.g. "-0" + Width Size // width specifier, e.g., '3' in '%3d' + Prec Size // precision specifier, e.g., '.4' in '%.4f' +} + +// Size describes an optional width or precision in a format operation. +// It may represent no value, a literal number, an asterisk, or an indexed asterisk. +type Size struct { + // At most one of these two fields is non-negative. + Fixed int // e.g. 4 from "%4d", otherwise -1 + Dynamic int // index of argument providing dynamic size (e.g. %*d or %[3]*d), otherwise -1 + + Index int // If the width or precision uses an indexed argument (e.g. 2 in %[2]*d), this is the index, otherwise -1 + Range Range // position of the size specifier within the operation +} + +// Verb represents the verb character of a format operation (e.g., 'd', 's', 'f'). +// It also includes positional information and any explicit argument indexing. +type Verb struct { + Verb rune + Range Range // positional range of the verb in the format string + Index int // index of an indexed argument, (e.g. 2 in %[2]d), otherwise -1 + ArgIndex int // argument index (0-based) associated with this verb, relative to CallExpr +} + +// byte offsets of format string +type Range struct { + Start, End int +} + +// Parse takes a format string and its index in the printf-like call, +// parses out all format operations, returns a slice of parsed +// [Operation] which describes flags, width, precision, verb, and argument indexing, +// or an error if parsing fails. +// +// All error messages are in predicate form ("call has a problem") +// so that they may be affixed into a subject ("log.Printf "). +// +// The flags will only be a subset of ['#', '0', '+', '-', ' ']. +// It does not perform any validation of verbs, nor the +// existence of corresponding arguments (obviously it can't). The provided format string may differ +// from the one in CallExpr, such as a concatenated string or a string +// referred to by the argument in the CallExpr. +func Parse(format string, idx int) ([]*Operation, error) { + if !strings.Contains(format, "%") { + return nil, fmt.Errorf("call has arguments but no formatting directives") + } + + firstArg := idx + 1 // Arguments are immediately after format string. + argNum := firstArg + var operations []*Operation + for i, w := 0, 0; i < len(format); i += w { + w = 1 + if format[i] != '%' { + continue + } + state, err := parseOperation(format[i:], firstArg, argNum) + if err != nil { + return nil, err + } + + state.operation.addOffset(i) + operations = append(operations, state.operation) + + w = len(state.operation.Text) + // Do not waste an argument for '%'. + if state.operation.Verb.Verb != '%' { + argNum = state.argNum + 1 + } + } + return operations, nil +} + +// Internal parsing state to operation. +type state struct { + operation *Operation + firstArg int // index of the first argument after the format string + argNum int // which argument we're expecting to format now + hasIndex bool // whether the argument is indexed + index int // the encountered index + indexPos int // the encountered index's offset + indexPending bool // whether we have an indexed argument that has not resolved + nbytes int // number of bytes of the format string consumed +} + +// parseOperation parses one format operation starting at the given substring `format`, +// which should begin with '%'. It returns a fully populated state or an error +// if the operation is malformed. The firstArg and argNum parameters help determine how +// arguments map to this operation. +// +// Parse sequence: '%' -> flags -> {[N]* or width} -> .{[N]* or precision} -> [N] -> verb. +func parseOperation(format string, firstArg, argNum int) (*state, error) { + state := &state{ + operation: &Operation{ + Text: format, + Width: Size{ + Fixed: -1, + Dynamic: -1, + Index: -1, + }, + Prec: Size{ + Fixed: -1, + Dynamic: -1, + Index: -1, + }, + }, + firstArg: firstArg, + argNum: argNum, + hasIndex: false, + index: 0, + indexPos: 0, + indexPending: false, + nbytes: len("%"), // There's guaranteed to be a percent sign. + } + // There may be flags. + state.parseFlags() + // There may be an index. + if err := state.parseIndex(); err != nil { + return nil, err + } + // There may be a width. + state.parseSize(Width) + // There may be a precision. + if err := state.parsePrecision(); err != nil { + return nil, err + } + // Now a verb, possibly prefixed by an index (which we may already have). + if !state.indexPending { + if err := state.parseIndex(); err != nil { + return nil, err + } + } + if state.nbytes == len(state.operation.Text) { + return nil, fmt.Errorf("format %s is missing verb at end of string", state.operation.Text) + } + verb, w := utf8.DecodeRuneInString(state.operation.Text[state.nbytes:]) + + // Ensure there must be a verb. + if state.indexPending { + state.operation.Verb = Verb{ + Verb: verb, + Range: Range{ + Start: state.indexPos, + End: state.nbytes + w, + }, + Index: state.index, + ArgIndex: state.argNum, + } + } else { + state.operation.Verb = Verb{ + Verb: verb, + Range: Range{ + Start: state.nbytes, + End: state.nbytes + w, + }, + Index: -1, + ArgIndex: state.argNum, + } + } + + state.nbytes += w + state.operation.Text = state.operation.Text[:state.nbytes] + return state, nil +} + +// addOffset adjusts the recorded positions in Verb, Width, Prec, and the +// operation's overall Range to be relative to the position in the full format string. +func (s *Operation) addOffset(parsedLen int) { + s.Verb.Range.Start += parsedLen + s.Verb.Range.End += parsedLen + + s.Range.Start = parsedLen + s.Range.End = s.Verb.Range.End + + // one of Fixed or Dynamic is non-negative means existence. + if s.Prec.Fixed != -1 || s.Prec.Dynamic != -1 { + s.Prec.Range.Start += parsedLen + s.Prec.Range.End += parsedLen + } + if s.Width.Fixed != -1 || s.Width.Dynamic != -1 { + s.Width.Range.Start += parsedLen + s.Width.Range.End += parsedLen + } +} + +// parseFlags accepts any printf flags. +func (s *state) parseFlags() { + s.operation.Flags = prefixOf(s.operation.Text[s.nbytes:], "#0+- ") + s.nbytes += len(s.operation.Flags) +} + +// prefixOf returns the prefix of s composed only of runes from the specified set. +func prefixOf(s, set string) string { + rest := strings.TrimLeft(s, set) + return s[:len(s)-len(rest)] +} + +// parseIndex parses an argument index of the form "[n]" that can appear +// in a printf operation (e.g., "%[2]d"). Returns an error if syntax is +// malformed or index is invalid. +func (s *state) parseIndex() error { + if s.nbytes == len(s.operation.Text) || s.operation.Text[s.nbytes] != '[' { + return nil + } + // Argument index present. + s.nbytes++ // skip '[' + start := s.nbytes + if num, ok := s.scanNum(); ok { + // Later consumed/stored by a '*' or verb. + s.index = num + s.indexPos = start - 1 + } + + ok := true + if s.nbytes == len(s.operation.Text) || s.nbytes == start || s.operation.Text[s.nbytes] != ']' { + ok = false // syntax error is either missing "]" or invalid index. + s.nbytes = strings.Index(s.operation.Text[start:], "]") + if s.nbytes < 0 { + return fmt.Errorf("format %s is missing closing ]", s.operation.Text) + } + s.nbytes = s.nbytes + start + } + arg32, err := strconv.ParseInt(s.operation.Text[start:s.nbytes], 10, 32) + if err != nil || !ok || arg32 <= 0 { + return fmt.Errorf("format has invalid argument index [%s]", s.operation.Text[start:s.nbytes]) + } + + s.nbytes++ // skip ']' + arg := int(arg32) + arg += s.firstArg - 1 // We want to zero-index the actual arguments. + s.argNum = arg + s.hasIndex = true + s.indexPending = true + return nil +} + +// scanNum advances through a decimal number if present, which represents a [Size] or [Index]. +func (s *state) scanNum() (int, bool) { + start := s.nbytes + for ; s.nbytes < len(s.operation.Text); s.nbytes++ { + c := s.operation.Text[s.nbytes] + if c < '0' || '9' < c { + if start < s.nbytes { + num, _ := strconv.ParseInt(s.operation.Text[start:s.nbytes], 10, 32) + return int(num), true + } else { + return 0, false + } + } + } + return 0, false +} + +type sizeType int + +const ( + Width sizeType = iota + Precision +) + +// parseSize parses a width or precision specifier. It handles literal numeric +// values (e.g., "%3d"), asterisk values (e.g., "%*d"), or indexed asterisk values (e.g., "%[2]*d"). +func (s *state) parseSize(kind sizeType) { + if s.nbytes < len(s.operation.Text) && s.operation.Text[s.nbytes] == '*' { + s.nbytes++ + if s.indexPending { + // Absorb it. + s.indexPending = false + size := Size{ + Fixed: -1, + Dynamic: s.argNum, + Index: s.index, + Range: Range{ + Start: s.indexPos, + End: s.nbytes, + }, + } + switch kind { + case Width: + s.operation.Width = size + case Precision: + // Include the leading '.'. + size.Range.Start -= len(".") + s.operation.Prec = size + default: + panic(kind) + } + } else { + // Non-indexed asterisk: "%*d". + size := Size{ + Dynamic: s.argNum, + Index: -1, + Fixed: -1, + Range: Range{ + Start: s.nbytes - 1, + End: s.nbytes, + }, + } + switch kind { + case Width: + s.operation.Width = size + case Precision: + // For precision, include the '.' in the range. + size.Range.Start -= 1 + s.operation.Prec = size + default: + panic(kind) + } + } + s.argNum++ + } else { // Literal number, e.g. "%10d" + start := s.nbytes + if num, ok := s.scanNum(); ok { + size := Size{ + Fixed: num, + Index: -1, + Dynamic: -1, + Range: Range{ + Start: start, + End: s.nbytes, + }, + } + switch kind { + case Width: + s.operation.Width = size + case Precision: + // Include the leading '.'. + size.Range.Start -= 1 + s.operation.Prec = size + default: + panic(kind) + } + } + } +} + +// parsePrecision checks if there's a precision specified after a '.' character. +// If found, it may also parse an index or an asterisk. Returns an error if any index +// parsing fails. +func (s *state) parsePrecision() error { + // If there's a period, there may be a precision. + if s.nbytes < len(s.operation.Text) && s.operation.Text[s.nbytes] == '.' { + s.nbytes++ + if err := s.parseIndex(); err != nil { + return err + } + s.parseSize(Precision) + } + return nil +} diff --git a/internal/gcimporter/iimport.go b/internal/gcimporter/iimport.go index 69b1d697cbe..12943927159 100644 --- a/internal/gcimporter/iimport.go +++ b/internal/gcimporter/iimport.go @@ -671,7 +671,9 @@ func (r *importReader) obj(name string) { case varTag: typ := r.typ() - r.declare(types.NewVar(pos, r.currPkg, name, typ)) + v := types.NewVar(pos, r.currPkg, name, typ) + typesinternal.SetVarKind(v, typesinternal.PackageVar) + r.declare(v) default: errorf("unexpected tag: %v", tag) diff --git a/internal/gcimporter/ureader_yes.go b/internal/gcimporter/ureader_yes.go index 6cdab448eca..522287d18d6 100644 --- a/internal/gcimporter/ureader_yes.go +++ b/internal/gcimporter/ureader_yes.go @@ -14,6 +14,7 @@ import ( "golang.org/x/tools/internal/aliases" "golang.org/x/tools/internal/pkgbits" + "golang.org/x/tools/internal/typesinternal" ) // A pkgReader holds the shared state for reading a unified IR package @@ -572,6 +573,7 @@ func (pr *pkgReader) objIdx(idx pkgbits.Index) (*types.Package, string) { sig := fn.Type().(*types.Signature) recv := types.NewVar(fn.Pos(), fn.Pkg(), "", named) + typesinternal.SetVarKind(recv, typesinternal.RecvVar) methods[i] = types.NewFunc(fn.Pos(), fn.Pkg(), fn.Name(), types.NewSignature(recv, sig.Params(), sig.Results(), sig.Variadic())) } @@ -619,7 +621,9 @@ func (pr *pkgReader) objIdx(idx pkgbits.Index) (*types.Package, string) { case pkgbits.ObjVar: pos := r.pos() typ := r.typ() - declare(types.NewVar(pos, objPkg, objName, typ)) + v := types.NewVar(pos, objPkg, objName, typ) + typesinternal.SetVarKind(v, typesinternal.PackageVar) + declare(v) } } diff --git a/internal/gocommand/invoke.go b/internal/gocommand/invoke.go index e333efc87f9..7ea9013447b 100644 --- a/internal/gocommand/invoke.go +++ b/internal/gocommand/invoke.go @@ -28,7 +28,7 @@ import ( "golang.org/x/tools/internal/event/label" ) -// An Runner will run go command invocations and serialize +// A Runner will run go command invocations and serialize // them if it sees a concurrency error. type Runner struct { // once guards the runner initialization. @@ -179,7 +179,7 @@ type Invocation struct { CleanEnv bool Env []string WorkingDir string - Logf func(format string, args ...interface{}) + Logf func(format string, args ...any) } // Postcondition: both error results have same nilness. @@ -388,7 +388,9 @@ func runCmdContext(ctx context.Context, cmd *exec.Cmd) (err error) { case err := <-resChan: return err case <-timer.C: - HandleHangingGoCommand(startTime, cmd) + // HandleHangingGoCommand terminates this process. + // Pass off resChan in case we can collect the command error. + handleHangingGoCommand(startTime, cmd, resChan) case <-ctx.Done(): } } else { @@ -413,8 +415,6 @@ func runCmdContext(ctx context.Context, cmd *exec.Cmd) (err error) { } // Didn't shut down in response to interrupt. Kill it hard. - // TODO(rfindley): per advice from bcmills@, it may be better to send SIGQUIT - // on certain platforms, such as unix. if err := cmd.Process.Kill(); err != nil && !errors.Is(err, os.ErrProcessDone) && debug { log.Printf("error killing the Go command: %v", err) } @@ -422,15 +422,17 @@ func runCmdContext(ctx context.Context, cmd *exec.Cmd) (err error) { return <-resChan } -func HandleHangingGoCommand(start time.Time, cmd *exec.Cmd) { +// handleHangingGoCommand outputs debugging information to help diagnose the +// cause of a hanging Go command, and then exits with log.Fatalf. +func handleHangingGoCommand(start time.Time, cmd *exec.Cmd, resChan chan error) { switch runtime.GOOS { - case "linux", "darwin", "freebsd", "netbsd": + case "linux", "darwin", "freebsd", "netbsd", "openbsd": fmt.Fprintln(os.Stderr, `DETECTED A HANGING GO COMMAND -The gopls test runner has detected a hanging go command. In order to debug -this, the output of ps and lsof/fstat is printed below. + The gopls test runner has detected a hanging go command. In order to debug + this, the output of ps and lsof/fstat is printed below. -See golang/go#54461 for more details.`) + See golang/go#54461 for more details.`) fmt.Fprintln(os.Stderr, "\nps axo ppid,pid,command:") fmt.Fprintln(os.Stderr, "-------------------------") @@ -438,7 +440,7 @@ See golang/go#54461 for more details.`) psCmd.Stdout = os.Stderr psCmd.Stderr = os.Stderr if err := psCmd.Run(); err != nil { - panic(fmt.Sprintf("running ps: %v", err)) + log.Printf("Handling hanging Go command: running ps: %v", err) } listFiles := "lsof" @@ -452,10 +454,24 @@ See golang/go#54461 for more details.`) listFilesCmd.Stdout = os.Stderr listFilesCmd.Stderr = os.Stderr if err := listFilesCmd.Run(); err != nil { - panic(fmt.Sprintf("running %s: %v", listFiles, err)) + log.Printf("Handling hanging Go command: running %s: %v", listFiles, err) + } + // Try to extract information about the slow go process by issuing a SIGQUIT. + if err := cmd.Process.Signal(sigStuckProcess); err == nil { + select { + case err := <-resChan: + stderr := "not a bytes.Buffer" + if buf, _ := cmd.Stderr.(*bytes.Buffer); buf != nil { + stderr = buf.String() + } + log.Printf("Quit hanging go command:\n\terr:%v\n\tstderr:\n%v\n\n", err, stderr) + case <-time.After(5 * time.Second): + } + } else { + log.Printf("Sending signal %d to hanging go command: %v", sigStuckProcess, err) } } - panic(fmt.Sprintf("detected hanging go command (golang/go#54461); waited %s\n\tcommand:%s\n\tpid:%d", time.Since(start), cmd, cmd.Process.Pid)) + log.Fatalf("detected hanging go command (golang/go#54461); waited %s\n\tcommand:%s\n\tpid:%d", time.Since(start), cmd, cmd.Process.Pid) } func cmdDebugStr(cmd *exec.Cmd) string { diff --git a/internal/gocommand/invoke_notunix.go b/internal/gocommand/invoke_notunix.go new file mode 100644 index 00000000000..469c648e4d8 --- /dev/null +++ b/internal/gocommand/invoke_notunix.go @@ -0,0 +1,13 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build !unix + +package gocommand + +import "os" + +// sigStuckProcess is the signal to send to kill a hanging subprocess. +// On Unix we send SIGQUIT, but on non-Unix we only have os.Kill. +var sigStuckProcess = os.Kill diff --git a/internal/gocommand/invoke_unix.go b/internal/gocommand/invoke_unix.go new file mode 100644 index 00000000000..169d37c8e93 --- /dev/null +++ b/internal/gocommand/invoke_unix.go @@ -0,0 +1,13 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build unix + +package gocommand + +import "syscall" + +// Sigstuckprocess is the signal to send to kill a hanging subprocess. +// Send SIGQUIT to get a stack trace. +var sigStuckProcess = syscall.SIGQUIT diff --git a/internal/imports/fix.go b/internal/imports/fix.go index 5ae576977a2..bf6b0aaddde 100644 --- a/internal/imports/fix.go +++ b/internal/imports/fix.go @@ -780,7 +780,7 @@ func GetAllCandidates(ctx context.Context, wrapped func(ImportFix), searchPrefix return true }, dirFound: func(pkg *pkg) bool { - if !canUse(filename, pkg.dir) { + if !CanUse(filename, pkg.dir) { return false } // Try the assumed package name first, then a simpler path match @@ -815,7 +815,7 @@ func GetImportPaths(ctx context.Context, wrapped func(ImportFix), searchPrefix, return true }, dirFound: func(pkg *pkg) bool { - if !canUse(filename, pkg.dir) { + if !CanUse(filename, pkg.dir) { return false } return strings.HasPrefix(pkg.importPathShort, searchPrefix) @@ -927,7 +927,7 @@ type ProcessEnv struct { WorkingDir string // If Logf is non-nil, debug logging is enabled through this function. - Logf func(format string, args ...interface{}) + Logf func(format string, args ...any) // If set, ModCache holds a shared cache of directory info to use across // multiple ProcessEnvs. @@ -1132,6 +1132,9 @@ func addStdlibCandidates(pass *pass, refs References) error { // but we have no way of figuring out what the user is using // TODO: investigate using the toolchain version to disambiguate in the stdlib add("math/rand/v2") + // math/rand has an overlapping API + // TestIssue66407 fails without this + add("math/rand") continue } for importPath := range stdlib.PackageSymbols { @@ -1736,7 +1739,7 @@ func (s *symbolSearcher) searchOne(ctx context.Context, c pkgDistance, symbols m // searching for "client.New") func pkgIsCandidate(filename string, refs References, pkg *pkg) bool { // Check "internal" and "vendor" visibility: - if !canUse(filename, pkg.dir) { + if !CanUse(filename, pkg.dir) { return false } @@ -1759,9 +1762,9 @@ func pkgIsCandidate(filename string, refs References, pkg *pkg) bool { return false } -// canUse reports whether the package in dir is usable from filename, +// CanUse reports whether the package in dir is usable from filename, // respecting the Go "internal" and "vendor" visibility rules. -func canUse(filename, dir string) bool { +func CanUse(filename, dir string) bool { // Fast path check, before any allocations. If it doesn't contain vendor // or internal, it's not tricky: // Note that this can false-negative on directories like "notinternal", diff --git a/internal/imports/source_env.go b/internal/imports/source_env.go index d14abaa3195..ec996c3ccf6 100644 --- a/internal/imports/source_env.go +++ b/internal/imports/source_env.go @@ -67,7 +67,7 @@ func (s *ProcessEnvSource) ResolveReferences(ctx context.Context, filename strin // same package name. Don't try to import ourselves. return false } - if !canUse(filename, pkg.dir) { + if !CanUse(filename, pkg.dir) { return false } mu.Lock() diff --git a/internal/jsonrpc2/messages.go b/internal/jsonrpc2/messages.go index 721168fd4f2..e87d772f398 100644 --- a/internal/jsonrpc2/messages.go +++ b/internal/jsonrpc2/messages.go @@ -27,7 +27,7 @@ type Request interface { Message // Method is a string containing the method name to invoke. Method() string - // Params is an JSON value (object, array, null, or "") with the parameters of the method. + // Params is a JSON value (object, array, null, or "") with the parameters of the method. Params() json.RawMessage // isJSONRPC2Request is used to make the set of request implementations closed. isJSONRPC2Request() diff --git a/internal/jsonrpc2/serve.go b/internal/jsonrpc2/serve.go index cfbcbcb021c..76df52cd43b 100644 --- a/internal/jsonrpc2/serve.go +++ b/internal/jsonrpc2/serve.go @@ -46,7 +46,7 @@ func HandlerServer(h Handler) StreamServer { }) } -// ListenAndServe starts an jsonrpc2 server on the given address. If +// ListenAndServe starts a jsonrpc2 server on the given address. If // idleTimeout is non-zero, ListenAndServe exits after there are no clients for // this duration, otherwise it exits only on error. func ListenAndServe(ctx context.Context, network, addr string, server StreamServer, idleTimeout time.Duration) error { diff --git a/internal/modindex/lookup.go b/internal/modindex/lookup.go index 012fdd7134c..5499c5c67f3 100644 --- a/internal/modindex/lookup.go +++ b/internal/modindex/lookup.go @@ -35,6 +35,36 @@ const ( Func ) +// LookupAll only returns those Candidates whose import path +// finds all the nms. +func (ix *Index) LookupAll(pkg string, names ...string) map[string][]Candidate { + // this can be made faster when benchmarks show that it needs to be + names = uniquify(names) + byImpPath := make(map[string][]Candidate) + for _, nm := range names { + cands := ix.Lookup(pkg, nm, false) + for _, c := range cands { + byImpPath[c.ImportPath] = append(byImpPath[c.ImportPath], c) + } + } + for k, v := range byImpPath { + if len(v) != len(names) { + delete(byImpPath, k) + } + } + return byImpPath +} + +// remove duplicates +func uniquify(in []string) []string { + if len(in) == 0 { + return in + } + in = slices.Clone(in) + slices.Sort(in) + return slices.Compact(in) +} + // Lookup finds all the symbols in the index with the given PkgName and name. // If prefix is true, it finds all of these with name as a prefix. func (ix *Index) Lookup(pkg, name string, prefix bool) []Candidate { diff --git a/internal/modindex/lookup_test.go b/internal/modindex/lookup_test.go index 4c5ae35695d..191395cffc9 100644 --- a/internal/modindex/lookup_test.go +++ b/internal/modindex/lookup_test.go @@ -68,7 +68,6 @@ func okresult(r result, p Candidate) bool { } func TestLookup(t *testing.T) { - log.SetFlags(log.Lshortfile) dir := testModCache(t) wrtData(t, dir, thedata) if _, err := indexModCache(dir, true); err != nil { @@ -133,3 +132,67 @@ func wrtData(t *testing.T, dir string, data tdata) { fd.WriteString(item.code + "\n") } } + +func TestLookupAll(t *testing.T) { + log.SetFlags(log.Lshortfile) + dir := testModCache(t) + wrtModule := func(mod string, nms ...string) { + dname := filepath.Join(dir, mod) + if err := os.MkdirAll(dname, 0755); err != nil { + t.Fatal(err) + } + fname := filepath.Join(dname, "foo.go") + fd, err := os.Create(fname) + if err != nil { + t.Fatal(err) + } + defer fd.Close() + if _, err := fd.WriteString(fmt.Sprintf("package foo\n")); err != nil { + t.Fatal(err) + } + for _, nm := range nms { + fd.WriteString(fmt.Sprintf("func %s() {}\n", nm)) + } + } + wrtModule("a.com/go/x4@v1.1.1", "A", "B", "C", "D") + wrtModule("b.com/go/x3@v1.2.1", "A", "B", "C") + wrtModule("c.com/go/x5@v1.3.1", "A", "B", "C", "D", "E") + + if _, err := indexModCache(dir, true); err != nil { + t.Fatal(err) + } + ix, err := ReadIndex(dir) + if err != nil { + t.Fatal(err) + } + cands := ix.Lookup("foo", "A", false) + if len(cands) != 3 { + t.Errorf("got %d candidates for A, expected 3", len(cands)) + } + got := ix.LookupAll("foo", "A", "B", "C", "D") + if len(got) != 2 { + t.Errorf("got %d candidates for A,B,C,D, expected 2", len(got)) + } + got = ix.LookupAll("foo", []string{"A", "B", "C", "D", "E"}...) + if len(got) != 1 { + t.Errorf("got %d candidates for A,B,C,D,E, expected 1", len(got)) + } +} + +func TestUniquify(t *testing.T) { + var v []string + for i := 1; i < 4; i++ { + v = append(v, "A") + w := uniquify(v) + if len(w) != 1 { + t.Errorf("got %d, expected 1", len(w)) + } + } + for i := 1; i < 3; i++ { + v = append(v, "B", "C") + w := uniquify(v) + if len(w) != 3 { + t.Errorf("got %d, expected 3", len(w)) + } + } +} diff --git a/internal/modindex/symbols.go b/internal/modindex/symbols.go index 33bf2641f7b..b918529d43e 100644 --- a/internal/modindex/symbols.go +++ b/internal/modindex/symbols.go @@ -12,6 +12,7 @@ import ( "go/types" "os" "path/filepath" + "runtime" "slices" "strings" @@ -29,14 +30,14 @@ import ( type symbol struct { pkg string // name of the symbols's package name string // declared name - kind string // T, C, V, or F + kind string // T, C, V, or F, follwed by D if deprecated sig string // signature information, for F } // find the symbols for the best directories func getSymbols(cd Abspath, dirs map[string][]*directory) { var g errgroup.Group - g.SetLimit(-1) // maybe throttle this some day + g.SetLimit(max(2, runtime.GOMAXPROCS(0)/2)) for _, vv := range dirs { // throttling some day? d := vv[0] @@ -111,7 +112,7 @@ func getFileExports(f *ast.File) []symbol { // print struct tags. So for this to happen the type of a formal parameter // has to be a explict struct, e.g. foo(x struct{a int "$"}) and ExprString // would have to show the struct tag. Even testing for this case seems - // a waste of effort, but let's not ignore such pathologies + // a waste of effort, but let's remember the possibility if strings.Contains(tp, "$") { continue } diff --git a/internal/packagesinternal/packages.go b/internal/packagesinternal/packages.go index 66e69b4389d..784605914e0 100644 --- a/internal/packagesinternal/packages.go +++ b/internal/packagesinternal/packages.go @@ -5,7 +5,7 @@ // Package packagesinternal exposes internal-only fields from go/packages. package packagesinternal -var GetDepsErrors = func(p interface{}) []*PackageError { return nil } +var GetDepsErrors = func(p any) []*PackageError { return nil } type PackageError struct { ImportStack []string // shortest path from package named on command line to this one @@ -16,5 +16,5 @@ type PackageError struct { var TypecheckCgo int var DepsErrors int // must be set as a LoadMode to call GetDepsErrors -var SetModFlag = func(config interface{}, value string) {} +var SetModFlag = func(config any, value string) {} var SetModFile = func(config interface{}, value string) {} diff --git a/internal/packagestest/expect.go b/internal/packagestest/expect.go index 053d8e8a9db..e3e3509579d 100644 --- a/internal/packagestest/expect.go +++ b/internal/packagestest/expect.go @@ -411,7 +411,7 @@ func (e *Exported) rangeConverter(n *expect.Note, args []interface{}) (Range, [] eof := tokFile.Pos(tokFile.Size()) return newRange(tokFile, eof, eof), args, nil default: - // look up an marker by name + // look up a marker by name mark, ok := e.markers[string(arg)] if !ok { return Range{}, nil, fmt.Errorf("cannot find marker %v", arg) diff --git a/internal/refactor/inline/analyzer/analyzer.go b/internal/refactor/inline/analyzer/analyzer.go deleted file mode 100644 index 0e3fec82f95..00000000000 --- a/internal/refactor/inline/analyzer/analyzer.go +++ /dev/null @@ -1,170 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package analyzer - -import ( - "fmt" - "go/ast" - "go/token" - "go/types" - "slices" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/internal/diff" - "golang.org/x/tools/internal/refactor/inline" -) - -const Doc = `inline calls to functions with "//go:fix inline" doc comment` - -var Analyzer = &analysis.Analyzer{ - Name: "inline", - Doc: Doc, - URL: "https://pkg.go.dev/golang.org/x/tools/internal/refactor/inline/analyzer", - Run: run, - FactTypes: []analysis.Fact{new(goFixInlineFact)}, - Requires: []*analysis.Analyzer{inspect.Analyzer}, -} - -func run(pass *analysis.Pass) (any, error) { - // Memoize repeated calls for same file. - fileContent := make(map[string][]byte) - readFile := func(node ast.Node) ([]byte, error) { - filename := pass.Fset.File(node.Pos()).Name() - content, ok := fileContent[filename] - if !ok { - var err error - content, err = pass.ReadFile(filename) - if err != nil { - return nil, err - } - fileContent[filename] = content - } - return content, nil - } - - // Pass 1: find functions annotated with a "//go:fix inline" - // comment (the syntax proposed by #32816), - // and export a fact for each one. - inlinable := make(map[*types.Func]*inline.Callee) // memoization of fact import (nil => no fact) - for _, file := range pass.Files { - for _, decl := range file.Decls { - if decl, ok := decl.(*ast.FuncDecl); ok && - slices.ContainsFunc(directives(decl.Doc), func(d *directive) bool { - return d.Tool == "go" && d.Name == "fix" && d.Args == "inline" - }) { - - content, err := readFile(decl) - if err != nil { - pass.Reportf(decl.Doc.Pos(), "invalid inlining candidate: cannot read source file: %v", err) - continue - } - callee, err := inline.AnalyzeCallee(discard, pass.Fset, pass.Pkg, pass.TypesInfo, decl, content) - if err != nil { - pass.Reportf(decl.Doc.Pos(), "invalid inlining candidate: %v", err) - continue - } - fn := pass.TypesInfo.Defs[decl.Name].(*types.Func) - pass.ExportObjectFact(fn, &goFixInlineFact{callee}) - inlinable[fn] = callee - } - } - } - - // Pass 2. Inline each static call to an inlinable function. - // - // TODO(adonovan): handle multiple diffs that each add the same import. - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - nodeFilter := []ast.Node{ - (*ast.File)(nil), - (*ast.CallExpr)(nil), - } - var currentFile *ast.File - inspect.Preorder(nodeFilter, func(n ast.Node) { - if file, ok := n.(*ast.File); ok { - currentFile = file - return - } - call := n.(*ast.CallExpr) - if fn := typeutil.StaticCallee(pass.TypesInfo, call); fn != nil { - // Inlinable? - callee, ok := inlinable[fn] - if !ok { - var fact goFixInlineFact - if pass.ImportObjectFact(fn, &fact) { - callee = fact.Callee - inlinable[fn] = callee - } - } - if callee == nil { - return // nope - } - - // Inline the call. - content, err := readFile(call) - if err != nil { - pass.Reportf(call.Lparen, "invalid inlining candidate: cannot read source file: %v", err) - return - } - caller := &inline.Caller{ - Fset: pass.Fset, - Types: pass.Pkg, - Info: pass.TypesInfo, - File: currentFile, - Call: call, - Content: content, - } - res, err := inline.Inline(caller, callee, &inline.Options{Logf: discard}) - if err != nil { - pass.Reportf(call.Lparen, "%v", err) - return - } - if res.Literalized { - // Users are not fond of inlinings that literalize - // f(x) to func() { ... }(), so avoid them. - // - // (Unfortunately the inliner is very timid, - // and often literalizes when it cannot prove that - // reducing the call is safe; the user of this tool - // has no indication of what the problem is.) - return - } - got := res.Content - - // Suggest the "fix". - var textEdits []analysis.TextEdit - for _, edit := range diff.Bytes(content, got) { - textEdits = append(textEdits, analysis.TextEdit{ - Pos: currentFile.FileStart + token.Pos(edit.Start), - End: currentFile.FileStart + token.Pos(edit.End), - NewText: []byte(edit.New), - }) - } - msg := fmt.Sprintf("inline call of %v", callee) - pass.Report(analysis.Diagnostic{ - Pos: call.Pos(), - End: call.End(), - Message: msg, - SuggestedFixes: []analysis.SuggestedFix{{ - Message: msg, - TextEdits: textEdits, - }}, - }) - } - }) - - return nil, nil -} - -// A goFixInlineFact is exported for each function marked "//go:fix inline". -// It holds information about the callee to support inlining. -type goFixInlineFact struct{ Callee *inline.Callee } - -func (f *goFixInlineFact) String() string { return "goFixInline " + f.Callee.String() } -func (*goFixInlineFact) AFact() {} - -func discard(string, ...any) {} diff --git a/internal/refactor/inline/analyzer/testdata/src/a/a.go b/internal/refactor/inline/analyzer/testdata/src/a/a.go deleted file mode 100644 index 6e159a36894..00000000000 --- a/internal/refactor/inline/analyzer/testdata/src/a/a.go +++ /dev/null @@ -1,17 +0,0 @@ -package a - -func f() { - One() // want `inline call of a.One` - - new(T).Two() // want `inline call of \(a.T\).Two` -} - -type T struct{} - -//go:fix inline -func One() int { return one } // want One:`goFixInline a.One` - -const one = 1 - -//go:fix inline -func (T) Two() int { return 2 } // want Two:`goFixInline \(a.T\).Two` diff --git a/internal/refactor/inline/analyzer/testdata/src/a/a.go.golden b/internal/refactor/inline/analyzer/testdata/src/a/a.go.golden deleted file mode 100644 index ea94f3b0175..00000000000 --- a/internal/refactor/inline/analyzer/testdata/src/a/a.go.golden +++ /dev/null @@ -1,17 +0,0 @@ -package a - -func f() { - _ = one // want `inline call of a.One` - - _ = 2 // want `inline call of \(a.T\).Two` -} - -type T struct{} - -//go:fix inline -func One() int { return one } // want One:`goFixInline a.One` - -const one = 1 - -//go:fix inline -func (T) Two() int { return 2 } // want Two:`goFixInline \(a.T\).Two` diff --git a/internal/refactor/inline/analyzer/testdata/src/b/b.go b/internal/refactor/inline/analyzer/testdata/src/b/b.go deleted file mode 100644 index 069e670d51e..00000000000 --- a/internal/refactor/inline/analyzer/testdata/src/b/b.go +++ /dev/null @@ -1,9 +0,0 @@ -package b - -import "a" - -func f() { - a.One() // want `cannot inline call to a.One because body refers to non-exported one` - - new(a.T).Two() // want `inline call of \(a.T\).Two` -} diff --git a/internal/refactor/inline/analyzer/testdata/src/b/b.go.golden b/internal/refactor/inline/analyzer/testdata/src/b/b.go.golden deleted file mode 100644 index b871b4b5100..00000000000 --- a/internal/refactor/inline/analyzer/testdata/src/b/b.go.golden +++ /dev/null @@ -1,9 +0,0 @@ -package b - -import "a" - -func f() { - a.One() // want `cannot inline call to a.One because body refers to non-exported one` - - _ = 2 // want `inline call of \(a.T\).Two` -} diff --git a/internal/refactor/inline/inline.go b/internal/refactor/inline/inline.go index c981599b5b0..2c897c24954 100644 --- a/internal/refactor/inline/inline.go +++ b/internal/refactor/inline/inline.go @@ -25,6 +25,7 @@ import ( "golang.org/x/tools/imports" internalastutil "golang.org/x/tools/internal/astutil" "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/internal/typesinternal" ) // A Caller describes the function call and its enclosing context. @@ -1004,53 +1005,57 @@ func (st *state) inlineCall() (*inlineCallResult, error) { logf("strategy: reduce call to empty body") // Evaluate the arguments for effects and delete the call entirely. - stmt := callStmt(caller.path, false) // cannot fail - res.old = stmt - if nargs := len(remainingArgs); nargs > 0 { - // Emit "_, _ = args" to discard results. - - // TODO(adonovan): if args is the []T{a1, ..., an} - // literal synthesized during variadic simplification, - // consider unwrapping it to its (pure) elements. - // Perhaps there's no harm doing this for any slice literal. - - // Make correction for spread calls - // f(g()) or recv.f(g()) where g() is a tuple. - if last := last(args); last != nil && last.spread { - nspread := last.typ.(*types.Tuple).Len() - if len(args) > 1 { // [recv, g()] - // A single AssignStmt cannot discard both, so use a 2-spec var decl. - res.new = &ast.GenDecl{ - Tok: token.VAR, - Specs: []ast.Spec{ - &ast.ValueSpec{ - Names: []*ast.Ident{makeIdent("_")}, - Values: []ast.Expr{args[0].expr}, - }, - &ast.ValueSpec{ - Names: blanks[*ast.Ident](nspread), - Values: []ast.Expr{args[1].expr}, + // Note(golang/go#71486): stmt can be nil if the call is in a go or defer + // statement. + // TODO: discard go or defer statements as well. + if stmt := callStmt(caller.path, false); stmt != nil { + res.old = stmt + if nargs := len(remainingArgs); nargs > 0 { + // Emit "_, _ = args" to discard results. + + // TODO(adonovan): if args is the []T{a1, ..., an} + // literal synthesized during variadic simplification, + // consider unwrapping it to its (pure) elements. + // Perhaps there's no harm doing this for any slice literal. + + // Make correction for spread calls + // f(g()) or recv.f(g()) where g() is a tuple. + if last := last(args); last != nil && last.spread { + nspread := last.typ.(*types.Tuple).Len() + if len(args) > 1 { // [recv, g()] + // A single AssignStmt cannot discard both, so use a 2-spec var decl. + res.new = &ast.GenDecl{ + Tok: token.VAR, + Specs: []ast.Spec{ + &ast.ValueSpec{ + Names: []*ast.Ident{makeIdent("_")}, + Values: []ast.Expr{args[0].expr}, + }, + &ast.ValueSpec{ + Names: blanks[*ast.Ident](nspread), + Values: []ast.Expr{args[1].expr}, + }, }, - }, + } + return res, nil } - return res, nil + + // Sole argument is spread call. + nargs = nspread } - // Sole argument is spread call. - nargs = nspread - } + res.new = &ast.AssignStmt{ + Lhs: blanks[ast.Expr](nargs), + Tok: token.ASSIGN, + Rhs: remainingArgs, + } - res.new = &ast.AssignStmt{ - Lhs: blanks[ast.Expr](nargs), - Tok: token.ASSIGN, - Rhs: remainingArgs, + } else { + // No remaining arguments: delete call statement entirely + res.new = &ast.EmptyStmt{} } - - } else { - // No remaining arguments: delete call statement entirely - res.new = &ast.EmptyStmt{} + return res, nil } - return res, nil } // If all parameters have been substituted and no result @@ -1946,7 +1951,9 @@ func checkFalconConstraints(logf logger, params []*parameter, args []*argument, logf("falcon env: const %s %s = %v", name, param.info.FalconType, arg.constant) nconst++ } else { - pkg.Scope().Insert(types.NewVar(token.NoPos, pkg, name, arg.typ)) + v := types.NewVar(token.NoPos, pkg, name, arg.typ) + typesinternal.SetVarKind(v, typesinternal.PackageVar) + pkg.Scope().Insert(v) logf("falcon env: var %s %s", name, arg.typ) } } diff --git a/internal/refactor/inline/testdata/empty-body.txtar b/internal/refactor/inline/testdata/empty-body.txtar index 8983fda8c6e..fa0689a2125 100644 --- a/internal/refactor/inline/testdata/empty-body.txtar +++ b/internal/refactor/inline/testdata/empty-body.txtar @@ -101,3 +101,20 @@ func _() { var x T _ = x //@ inline(re"empty", empty4) } + +-- a/a5.go -- +package a + +func _() { + go empty() //@ inline(re"empty", empty5) +} + +func empty() {} +-- empty5 -- +package a + +func _() { + go func() {}() //@ inline(re"empty", empty5) +} + +func empty() {} diff --git a/internal/testenv/testenv.go b/internal/testenv/testenv.go index d217e28462c..144f4f8fd64 100644 --- a/internal/testenv/testenv.go +++ b/internal/testenv/testenv.go @@ -7,10 +7,12 @@ package testenv import ( + "bufio" "bytes" "context" "fmt" "go/build" + "log" "os" "os/exec" "path/filepath" @@ -553,3 +555,45 @@ func NeedsGOROOTDir(t *testing.T, dir string) { } } } + +// RedirectStderr causes os.Stderr (and the global logger) to be +// temporarily replaced so that writes to it are sent to t.Log. +// It is restored at test cleanup. +func RedirectStderr(t testing.TB) { + t.Setenv("RedirectStderr", "") // side effect: assert t.Parallel wasn't called + + // TODO(adonovan): if https://go.dev/issue/59928 is accepted, + // simply set w = t.Output() and dispense with the pipe. + r, w, err := os.Pipe() + if err != nil { + t.Fatalf("pipe: %v", err) + } + done := make(chan struct{}) + go func() { + for sc := bufio.NewScanner(r); sc.Scan(); { + t.Log(sc.Text()) + } + r.Close() + close(done) + }() + + // Also do the same for the global logger. + savedWriter, savedPrefix, savedFlags := log.Writer(), log.Prefix(), log.Flags() + log.SetPrefix("log: ") + log.SetOutput(w) + log.SetFlags(0) + + oldStderr := os.Stderr + os.Stderr = w + t.Cleanup(func() { + w.Close() // ignore error + os.Stderr = oldStderr + + log.SetOutput(savedWriter) + log.SetPrefix(savedPrefix) + log.SetFlags(savedFlags) + + // Don't let test finish before final t.Log. + <-done + }) +} diff --git a/internal/typeparams/coretype.go b/internal/typeparams/coretype.go index 6e83c6fb1a2..27a2b179299 100644 --- a/internal/typeparams/coretype.go +++ b/internal/typeparams/coretype.go @@ -109,8 +109,13 @@ func CoreType(T types.Type) types.Type { // // NormalTerms makes no guarantees about the order of terms, except that it // is deterministic. -func NormalTerms(typ types.Type) ([]*types.Term, error) { - switch typ := typ.Underlying().(type) { +func NormalTerms(T types.Type) ([]*types.Term, error) { + // typeSetOf(T) == typeSetOf(Unalias(T)) + typ := types.Unalias(T) + if named, ok := typ.(*types.Named); ok { + typ = named.Underlying() + } + switch typ := typ.(type) { case *types.TypeParam: return StructuralTerms(typ) case *types.Union: @@ -118,7 +123,7 @@ func NormalTerms(typ types.Type) ([]*types.Term, error) { case *types.Interface: return InterfaceTermSet(typ) default: - return []*types.Term{types.NewTerm(false, typ)}, nil + return []*types.Term{types.NewTerm(false, T)}, nil } } diff --git a/internal/typeparams/coretype_test.go b/internal/typeparams/coretype_test.go index a9575f9238e..371d9f8ed31 100644 --- a/internal/typeparams/coretype_test.go +++ b/internal/typeparams/coretype_test.go @@ -95,7 +95,7 @@ func TestCoreType(t *testing.T) { got = ct.String() } if got != test.want { - t.Errorf("coreType(%s) = %v, want %v", test.expr, got, test.want) + t.Errorf("CoreType(%s) = %v, want %v", test.expr, got, test.want) } } } diff --git a/internal/typesinternal/errorcode.go b/internal/typesinternal/errorcode.go index 131caab2847..235a6defc4c 100644 --- a/internal/typesinternal/errorcode.go +++ b/internal/typesinternal/errorcode.go @@ -966,7 +966,7 @@ const ( // var _ = string(x) InvalidConversion - // InvalidUntypedConversion occurs when an there is no valid implicit + // InvalidUntypedConversion occurs when there is no valid implicit // conversion from an untyped value satisfying the type constraints of the // context in which it is used. // diff --git a/internal/typesinternal/recv.go b/internal/typesinternal/recv.go index e54accc69a0..8352ea76173 100644 --- a/internal/typesinternal/recv.go +++ b/internal/typesinternal/recv.go @@ -12,7 +12,8 @@ import ( // type of recv, which may be of the form N or *N, or aliases thereof. // It also reports whether a Pointer was present. // -// The named result may be nil in ill-typed code. +// The named result may be nil if recv is from a method on an +// anonymous interface or struct types or in ill-typed code. func ReceiverNamed(recv *types.Var) (isPtr bool, named *types.Named) { t := recv.Type() if ptr, ok := types.Unalias(t).(*types.Pointer); ok { diff --git a/internal/typesinternal/types.go b/internal/typesinternal/types.go index a93d51f9882..34534879630 100644 --- a/internal/typesinternal/types.go +++ b/internal/typesinternal/types.go @@ -120,3 +120,8 @@ func Origin(t NamedOrAlias) NamedOrAlias { } return t } + +// IsPackageLevel reports whether obj is a package-level symbol. +func IsPackageLevel(obj types.Object) bool { + return obj.Pkg() != nil && obj.Parent() == obj.Pkg().Scope() +} diff --git a/internal/typesinternal/varkind.go b/internal/typesinternal/varkind.go new file mode 100644 index 00000000000..e5da0495111 --- /dev/null +++ b/internal/typesinternal/varkind.go @@ -0,0 +1,40 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package typesinternal + +// TODO(adonovan): when CL 645115 lands, define the go1.25 version of +// this API that actually does something. + +import "go/types" + +type VarKind uint8 + +const ( + _ VarKind = iota // (not meaningful) + PackageVar // a package-level variable + LocalVar // a local variable + RecvVar // a method receiver variable + ParamVar // a function parameter variable + ResultVar // a function result variable + FieldVar // a struct field +) + +func (kind VarKind) String() string { + return [...]string{ + 0: "VarKind(0)", + PackageVar: "PackageVar", + LocalVar: "LocalVar", + RecvVar: "RecvVar", + ParamVar: "ParamVar", + ResultVar: "ResultVar", + FieldVar: "FieldVar", + }[kind] +} + +// GetVarKind returns an invalid VarKind. +func GetVarKind(v *types.Var) VarKind { return 0 } + +// SetVarKind has no effect. +func SetVarKind(v *types.Var, kind VarKind) {} diff --git a/playground/socket/socket.go b/playground/socket/socket.go index 797dcc6dd4c..9e5b4a954d2 100644 --- a/playground/socket/socket.go +++ b/playground/socket/socket.go @@ -5,7 +5,7 @@ //go:build !appengine // +build !appengine -// Package socket implements an WebSocket-based playground backend. +// Package socket implements a WebSocket-based playground backend. // Clients connect to a websocket handler and send run/kill commands, and // the server sends the output and exit status of the running processes. // Multiple clients running multiple processes may be served concurrently. diff --git a/present/args.go b/present/args.go index b4f7503b6da..17b9d4e87e8 100644 --- a/present/args.go +++ b/present/args.go @@ -96,7 +96,7 @@ func addrToByteRange(addr string, start int, data []byte) (lo, hi int, err error j = i } pattern := addr[1:i] - lo, hi, err = addrRegexp(data, lo, hi, dir, pattern) + lo, hi, err = addrRegexp(data, hi, dir, pattern) prevc = c addr = addr[j:] continue @@ -202,7 +202,7 @@ func addrNumber(data []byte, lo, hi int, dir byte, n int, charOffset bool) (int, // addrRegexp searches for pattern in the given direction starting at lo, hi. // The direction dir is '+' (search forward from hi) or '-' (search backward from lo). // Backward searches are unimplemented. -func addrRegexp(data []byte, lo, hi int, dir byte, pattern string) (int, int, error) { +func addrRegexp(data []byte, hi int, dir byte, pattern string) (int, int, error) { // We want ^ and $ to work as in sam/acme, so use ?m. re, err := regexp.Compile("(?m:" + pattern + ")") if err != nil { diff --git a/present/code.go b/present/code.go index f00f1f49d0b..d98f8384414 100644 --- a/present/code.go +++ b/present/code.go @@ -238,8 +238,8 @@ func codeLines(src []byte, start, end int) (lines []codeLine) { return } -func parseArgs(name string, line int, args []string) (res []interface{}, err error) { - res = make([]interface{}, len(args)) +func parseArgs(name string, line int, args []string) (res []any, err error) { + res = make([]any, len(args)) for i, v := range args { if len(v) == 0 { return nil, fmt.Errorf("%s:%d bad code argument %q", name, line, v) diff --git a/present/parse.go b/present/parse.go index 162a382b060..8b41dd2df52 100644 --- a/present/parse.go +++ b/present/parse.go @@ -15,6 +15,7 @@ import ( "net/url" "os" "regexp" + "slices" "strings" "time" "unicode" @@ -166,7 +167,7 @@ type Elem interface { // renderElem implements the elem template function, used to render // sub-templates. func renderElem(t *template.Template, e Elem) (template.HTML, error) { - var data interface{} = e + var data any = e if s, ok := e.(Section); ok { data = struct { Section @@ -191,7 +192,7 @@ func init() { // execTemplate is a helper to execute a template and return the output as a // template.HTML value. -func execTemplate(t *template.Template, name string, data interface{}) (template.HTML, error) { +func execTemplate(t *template.Template, name string, data any) (template.HTML, error) { b := new(bytes.Buffer) err := t.ExecuteTemplate(b, name, data) if err != nil { @@ -394,7 +395,7 @@ func parseSections(ctx *Context, name, prefix string, lines *Lines, number []int } } section := Section{ - Number: append(append([]int{}, number...), i), + Number: append(slices.Clone(number), i), Title: title, ID: id, } diff --git a/refactor/importgraph/graph.go b/refactor/importgraph/graph.go index d2d8f098b3f..c24ff882c7b 100644 --- a/refactor/importgraph/graph.go +++ b/refactor/importgraph/graph.go @@ -68,7 +68,7 @@ func Build(ctxt *build.Context) (forward, reverse Graph, errors map[string]error err error } - ch := make(chan interface{}) + ch := make(chan any) go func() { sema := make(chan int, 20) // I/O concurrency limiting semaphore diff --git a/refactor/rename/check.go b/refactor/rename/check.go index 8350ad7bc32..7b29dbf6a72 100644 --- a/refactor/rename/check.go +++ b/refactor/rename/check.go @@ -19,7 +19,7 @@ import ( ) // errorf reports an error (e.g. conflict) and prevents file modification. -func (r *renamer) errorf(pos token.Pos, format string, args ...interface{}) { +func (r *renamer) errorf(pos token.Pos, format string, args ...any) { r.hadConflicts = true reportError(r.iprog.Fset.Position(pos), fmt.Sprintf(format, args...)) } @@ -36,7 +36,7 @@ func (r *renamer) check(from types.Object) { r.checkInFileBlock(from_) } else if from_, ok := from.(*types.Label); ok { r.checkLabel(from_) - } else if isPackageLevel(from) { + } else if typesinternal.IsPackageLevel(from) { r.checkInPackageBlock(from) } else if v, ok := from.(*types.Var); ok && v.IsField() { r.checkStructField(v) diff --git a/refactor/rename/util.go b/refactor/rename/util.go index bc6dc10cac9..7c1a634e4ed 100644 --- a/refactor/rename/util.go +++ b/refactor/rename/util.go @@ -61,10 +61,6 @@ func isLocal(obj types.Object) bool { return depth >= 4 } -func isPackageLevel(obj types.Object) bool { - return obj.Pkg().Scope().Lookup(obj.Name()) == obj -} - // -- Plundered from go/scanner: --------------------------------------- func isLetter(ch rune) bool {