From 6fa9024473a2dbf6a89969dc864d21e96da5f44b Mon Sep 17 00:00:00 2001 From: Grant Nelson Date: Wed, 8 Jan 2025 13:24:39 -0700 Subject: [PATCH] Adding Augmentor --- build/augmentor.go | 346 ++++++++++++++++++ build/build.go | 415 +--------------------- build/build_test.go | 7 +- build/context.go | 2 +- compiler/astutil/astutil.go | 308 +++++++++++++++- compiler/astutil/astutil_test.go | 563 +++++++++++++++++++++++++++++- internal/srctesting/srctesting.go | 4 +- 7 files changed, 1225 insertions(+), 420 deletions(-) create mode 100644 build/augmentor.go diff --git a/build/augmentor.go b/build/augmentor.go new file mode 100644 index 000000000..f609b8973 --- /dev/null +++ b/build/augmentor.go @@ -0,0 +1,346 @@ +package build + +import ( + "bytes" + "fmt" + "go/ast" + "go/format" + "go/parser" + "go/token" + "path" + "strconv" + "strings" + + "github.com/gopherjs/gopherjs/compiler/astutil" +) + +// overrideInfo is used by parseAndAugment methods to manage +// directives and how the overlay and original are merged. +type overrideInfo struct { + // KeepOriginal indicates that the original code should be kept + // but the identifier will be prefixed by `_gopherjs_original_foo`. + // If false the original code is removed. + keepOriginal bool + + // purgeMethods indicates that this info is for a type and + // if a method has this type as a receiver should also be removed. + // If the method is defined in the overlays and therefore has its + // own overrides, this will be ignored. + purgeMethods bool + + // overrideSignature is the function definition given in the overlays + // that should be used to replace the signature in the originals. + // Only receivers, type parameters, parameters, and results will be used. + overrideSignature *ast.FuncDecl +} + +// pkgOverrideInfo is the collection of overrides still needed for a package. +type pkgOverrideInfo struct { + // overrides is a map of identifier to overrideInfo to override + // individual named structs, interfaces, functions, and methods. + overrides map[string]overrideInfo + + // overlayFiles are the files from the natives that still haven't been + // appended to a file from the package, typically the first file. + overlayFiles []*ast.File + + // jsFiles are the additional JS files that are part of the natives. + jsFiles []JSFile +} + +// Augmentor is an on-the-fly package augmentor. +// +// When a file from a package is being parsed, the Augmentor will augment +// the AST with the changes loaded from the native overrides. +// The augmentor will hold onto the override information for additional files +// that come from the same package. This is designed to be used with +// `x/tools/go/packages.Load` as a middleware in the parse file step via +// `Config.ParseFile`. +// +// The first file from a package will have any additional methods and +// information from the natives injected into the AST. All files from a package +// will be augmented by the overrides. +type Augmentor struct { + // packages is a map of package import path to the package's override. + // This is used to keep track of the overrides for a package and indicate + // that additional files from the natives have already been applied. + packages map[string]*pkgOverrideInfo +} + +func (aug *Augmentor) Augment(xctx XContext, pkg *PackageData, fileSet *token.FileSet, file *ast.File) error { + pkgAug := aug.getPackageOverrides(xctx, pkg, fileSet) + + augmentOriginalImports(pkg.ImportPath, file) + + if len(pkgAug.overrides) > 0 { + augmentOriginalFile(file, pkgAug.overrides) + } + + if len(pkgAug.overlayFiles) > 0 { + // Append the overlay files to the first file of the package. + // This is to ensure that the package is augmented with all the + // additional methods and information from the natives. + err := astutil.ConcatenateFiles(file, pkgAug.overlayFiles...) + if err != nil { + return fmt.Errorf("failed to concatenate overlay files onto %q: %w", fileSet.Position(file.Package).Filename, err) + } + pkgAug.overlayFiles = nil + + // TODO: REMOVE + if file.Name.Name == "sync" { + buf := &bytes.Buffer{} + if err := format.Node(buf, fileSet, file); err != nil { + panic(fmt.Errorf("failed to format augmented file: %w", err)) + } + fmt.Println(">>>>>\n", buf.String(), "\n<<<<<") + fmt.Println(">>>>>") + ast.Print(fileSet, file) + fmt.Println("\n<<<<<") + } + } + + return nil +} + +func (aug *Augmentor) GetJSFiles(pkg *PackageData) []JSFile { + pkgAug, ok := aug.packages[pkg.ImportPath] + if !ok { + return nil + } + return pkgAug.jsFiles +} + +// getPackageOverrides looks up an already loaded package override +// or loads the package's natives, parses the overlay files, and +// stores the overrides for the package in the augmentor for next time. +func (aug *Augmentor) getPackageOverrides(xctx XContext, pkg *PackageData, fileSet *token.FileSet) *pkgOverrideInfo { + importPath := pkg.ImportPath + if pkgAug, ok := aug.packages[importPath]; ok { + return pkgAug + } + + jsFiles, overlayFiles := parseOverlayFiles(xctx, pkg, fileSet) + + overrides := make(map[string]overrideInfo) + for _, file := range overlayFiles { + augmentOverlayFile(file, overrides) + } + delete(overrides, `init`) + + pkgAug := &pkgOverrideInfo{ + overrides: overrides, + overlayFiles: overlayFiles, + jsFiles: jsFiles, + } + + if aug.packages == nil { + aug.packages = map[string]*pkgOverrideInfo{} + } + aug.packages[importPath] = pkgAug + return pkgAug +} + +// parseOverlayFiles loads and parses overlay files +// to augment the original files with. +func parseOverlayFiles(xctx XContext, pkg *PackageData, fileSet *token.FileSet) ([]JSFile, []*ast.File) { + importPath := pkg.ImportPath + isXTest := strings.HasSuffix(importPath, "_test") + if isXTest { + importPath = importPath[:len(importPath)-5] + } + + nativesContext := overlayCtx(xctx.Env()) + nativesPkg, err := nativesContext.Import(importPath, "", 0) + if err != nil { + return nil, nil + } + + jsFiles := nativesPkg.JSFiles + var files []*ast.File + names := nativesPkg.GoFiles + if pkg.IsTest { + names = append(names, nativesPkg.TestGoFiles...) + } + if isXTest { + names = nativesPkg.XTestGoFiles + } + + for _, name := range names { + fullPath := path.Join(nativesPkg.Dir, name) + r, err := nativesContext.bctx.OpenFile(fullPath) + if err != nil { + panic(err) + } + // Files should be uniquely named and in the original package directory in order to be + // ordered correctly + newPath := path.Join(pkg.Dir, "gopherjs__"+name) + file, err := parser.ParseFile(fileSet, newPath, r, parser.ParseComments) + if err != nil { + panic(err) + } + r.Close() + + files = append(files, file) + } + return jsFiles, files +} + +// augmentOverlayFile is the part of parseAndAugment that processes +// an overlay file AST to collect information such as compiler directives +// and perform any initial augmentation needed to the overlay. +func augmentOverlayFile(file *ast.File, overrides map[string]overrideInfo) { + anyChange := false + for i, decl := range file.Decls { + purgeDecl := astutil.Purge(decl) + switch d := decl.(type) { + case *ast.FuncDecl: + k := astutil.FuncKey(d) + oi := overrideInfo{ + keepOriginal: astutil.KeepOriginal(d), + } + if astutil.OverrideSignature(d) { + oi.overrideSignature = d + purgeDecl = true + } + overrides[k] = oi + case *ast.GenDecl: + for j, spec := range d.Specs { + purgeSpec := purgeDecl || astutil.Purge(spec) + switch s := spec.(type) { + case *ast.TypeSpec: + overrides[s.Name.Name] = overrideInfo{ + purgeMethods: purgeSpec, + } + case *ast.ValueSpec: + for _, name := range s.Names { + overrides[name.Name] = overrideInfo{} + } + } + if purgeSpec { + anyChange = true + d.Specs[j] = nil + } + } + } + if purgeDecl { + anyChange = true + file.Decls[i] = nil + } + } + if anyChange { + astutil.FinalizeRemovals(file) + astutil.PruneImports(file) + } +} + +// augmentOriginalImports is the part of parseAndAugment that processes +// an original file AST to modify the imports for that file. +func augmentOriginalImports(importPath string, file *ast.File) { + switch importPath { + case "crypto/rand", "encoding/gob", "encoding/json", "expvar", "go/token", "log", "math/big", "math/rand", "regexp", "time": + for _, spec := range file.Imports { + path, _ := strconv.Unquote(spec.Path.Value) + if path == "sync" { + if spec.Name == nil { + spec.Name = ast.NewIdent("sync") + } + spec.Path.Value = `"github.com/gopherjs/gopherjs/nosync"` + } + } + } +} + +// augmentOriginalFile is the part of parseAndAugment that processes an +// original file AST to augment the source code using the overrides from +// the overlay files. +func augmentOriginalFile(file *ast.File, overrides map[string]overrideInfo) { + anyChange := false + for i, decl := range file.Decls { + switch d := decl.(type) { + case *ast.FuncDecl: + if info, ok := overrides[astutil.FuncKey(d)]; ok { + anyChange = true + removeFunc := true + if info.keepOriginal { + // Allow overridden function calls + // The standard library implementation of foo() becomes _gopherjs_original_foo() + d.Name.Name = "_gopherjs_original_" + d.Name.Name + removeFunc = false + } + if overSig := info.overrideSignature; overSig != nil { + d.Recv = overSig.Recv + d.Type.TypeParams = overSig.Type.TypeParams + d.Type.Params = overSig.Type.Params + d.Type.Results = overSig.Type.Results + removeFunc = false + } + if removeFunc { + file.Decls[i] = nil + } + } else if recvKey := astutil.FuncReceiverKey(d); len(recvKey) > 0 { + // check if the receiver has been purged, if so, remove the method too. + if info, ok := overrides[recvKey]; ok && info.purgeMethods { + anyChange = true + file.Decls[i] = nil + } + } + case *ast.GenDecl: + for j, spec := range d.Specs { + switch s := spec.(type) { + case *ast.TypeSpec: + if _, ok := overrides[s.Name.Name]; ok { + anyChange = true + d.Specs[j] = nil + } + case *ast.ValueSpec: + if len(s.Names) == len(s.Values) { + // multi-value context + // e.g. var a, b = 2, foo[int]() + // A removal will also remove the value which may be from a + // function call. This allows us to remove unwanted statements. + // However, if that call has a side effect which still needs + // to be run, add the call into the overlay. + for k, name := range s.Names { + if _, ok := overrides[name.Name]; ok { + anyChange = true + s.Names[k] = nil + s.Values[k] = nil + } + } + } else { + // single-value context + // e.g. var a, b = foo[int]() + // If a removal from the overlays makes all returned values unused, + // then remove the function call as well. This allows us to stop + // unwanted calls if needed. If that call has a side effect which + // still needs to be run, add the call into the overlay. + nameRemoved := false + for _, name := range s.Names { + if _, ok := overrides[name.Name]; ok { + nameRemoved = true + name.Name = `_` + } + } + if nameRemoved { + removeSpec := true + for _, name := range s.Names { + if name.Name != `_` { + removeSpec = false + break + } + } + if removeSpec { + anyChange = true + d.Specs[j] = nil + } + } + } + } + } + } + } + if anyChange { + astutil.FinalizeRemovals(file) + astutil.PruneImports(file) + } +} diff --git a/build/build.go b/build/build.go index 2ec712fc7..97a35eeb3 100644 --- a/build/build.go +++ b/build/build.go @@ -16,7 +16,6 @@ import ( "io/fs" "os" "os/exec" - "path" "path/filepath" "sort" "strconv" @@ -26,7 +25,6 @@ import ( "github.com/fsnotify/fsnotify" "github.com/gopherjs/gopherjs/compiler" - "github.com/gopherjs/gopherjs/compiler/astutil" log "github.com/sirupsen/logrus" "github.com/neelance/sourcemap" @@ -118,30 +116,11 @@ func ImportDir(dir string, mode build.ImportMode, installSuffix string, buildTag return pkg, nil } -// overrideInfo is used by parseAndAugment methods to manage -// directives and how the overlay and original are merged. -type overrideInfo struct { - // KeepOriginal indicates that the original code should be kept - // but the identifier will be prefixed by `_gopherjs_original_foo`. - // If false the original code is removed. - keepOriginal bool - - // purgeMethods indicates that this info is for a type and - // if a method has this type as a receiver should also be removed. - // If the method is defined in the overlays and therefore has its - // own overrides, this will be ignored. - purgeMethods bool - - // overrideSignature is the function definition given in the overlays - // that should be used to replace the signature in the originals. - // Only receivers, type parameters, parameters, and results will be used. - overrideSignature *ast.FuncDecl -} - // parseAndAugment parses and returns all .go files of given pkg. // Standard Go library packages are augmented with files in compiler/natives folder. -// If isTest is true and pkg.ImportPath has no _test suffix, package is built for running internal tests. -// If isTest is true and pkg.ImportPath has _test suffix, package is built for running external tests. +// +// If pkg.IsTest is true and pkg.ImportPath has no _test suffix, package is built for running internal tests. +// If pkg.IsTest is true and pkg.ImportPath has _test suffix, package is built for running external tests. // // The native packages are augmented by the contents of natives.FS in the following way. // The file names do not matter except the usual `_test` suffix. The files for @@ -164,76 +143,22 @@ type overrideInfo struct { // - Otherwise for identifiers that exist in the original and the overrides, // the original is removed. // - New identifiers that don't exist in original package get added. -func parseAndAugment(xctx XContext, pkg *PackageData, isTest bool, fileSet *token.FileSet) ([]*ast.File, []JSFile, error) { - jsFiles, overlayFiles := parseOverlayFiles(xctx, pkg, isTest, fileSet) - +func parseAndAugment(xctx XContext, pkg *PackageData, fileSet *token.FileSet) ([]*ast.File, []JSFile, error) { originalFiles, err := parserOriginalFiles(pkg, fileSet) if err != nil { return nil, nil, err } - overrides := make(map[string]overrideInfo) - for _, file := range overlayFiles { - augmentOverlayFile(file, overrides) - } - delete(overrides, "init") - + aug := &Augmentor{} for _, file := range originalFiles { - augmentOriginalImports(pkg.ImportPath, file) - } - - if len(overrides) > 0 { - for _, file := range originalFiles { - augmentOriginalFile(file, overrides) - } - } - - return append(overlayFiles, originalFiles...), jsFiles, nil -} - -// parseOverlayFiles loads and parses overlay files -// to augment the original files with. -func parseOverlayFiles(xctx XContext, pkg *PackageData, isTest bool, fileSet *token.FileSet) ([]JSFile, []*ast.File) { - isXTest := strings.HasSuffix(pkg.ImportPath, "_test") - importPath := pkg.ImportPath - if isXTest { - importPath = importPath[:len(importPath)-5] - } - - nativesContext := overlayCtx(xctx.Env()) - nativesPkg, err := nativesContext.Import(importPath, "", 0) - if err != nil { - return nil, nil - } - - jsFiles := nativesPkg.JSFiles - var files []*ast.File - names := nativesPkg.GoFiles - if isTest { - names = append(names, nativesPkg.TestGoFiles...) - } - if isXTest { - names = nativesPkg.XTestGoFiles - } - - for _, name := range names { - fullPath := path.Join(nativesPkg.Dir, name) - r, err := nativesContext.bctx.OpenFile(fullPath) - if err != nil { - panic(err) - } - // Files should be uniquely named and in the original package directory in order to be - // ordered correctly - newPath := path.Join(pkg.Dir, "gopherjs__"+name) - file, err := parser.ParseFile(fileSet, newPath, r, parser.ParseComments) + err := aug.Augment(xctx, pkg, fileSet, file) if err != nil { - panic(err) + return nil, nil, err } - r.Close() - - files = append(files, file) } - return jsFiles, files + + jsFiles := aug.GetJSFiles(pkg) + return originalFiles, jsFiles, nil } // parserOriginalFiles loads and parses the original files to augment. @@ -275,324 +200,6 @@ func parserOriginalFiles(pkg *PackageData, fileSet *token.FileSet) ([]*ast.File, return files, nil } -// augmentOverlayFile is the part of parseAndAugment that processes -// an overlay file AST to collect information such as compiler directives -// and perform any initial augmentation needed to the overlay. -func augmentOverlayFile(file *ast.File, overrides map[string]overrideInfo) { - anyChange := false - for i, decl := range file.Decls { - purgeDecl := astutil.Purge(decl) - switch d := decl.(type) { - case *ast.FuncDecl: - k := astutil.FuncKey(d) - oi := overrideInfo{ - keepOriginal: astutil.KeepOriginal(d), - } - if astutil.OverrideSignature(d) { - oi.overrideSignature = d - purgeDecl = true - } - overrides[k] = oi - case *ast.GenDecl: - for j, spec := range d.Specs { - purgeSpec := purgeDecl || astutil.Purge(spec) - switch s := spec.(type) { - case *ast.TypeSpec: - overrides[s.Name.Name] = overrideInfo{ - purgeMethods: purgeSpec, - } - case *ast.ValueSpec: - for _, name := range s.Names { - overrides[name.Name] = overrideInfo{} - } - } - if purgeSpec { - anyChange = true - d.Specs[j] = nil - } - } - } - if purgeDecl { - anyChange = true - file.Decls[i] = nil - } - } - if anyChange { - finalizeRemovals(file) - pruneImports(file) - } -} - -// augmentOriginalImports is the part of parseAndAugment that processes -// an original file AST to modify the imports for that file. -func augmentOriginalImports(importPath string, file *ast.File) { - switch importPath { - case "crypto/rand", "encoding/gob", "encoding/json", "expvar", "go/token", "log", "math/big", "math/rand", "regexp", "time": - for _, spec := range file.Imports { - path, _ := strconv.Unquote(spec.Path.Value) - if path == "sync" { - if spec.Name == nil { - spec.Name = ast.NewIdent("sync") - } - spec.Path.Value = `"github.com/gopherjs/gopherjs/nosync"` - } - } - } -} - -// augmentOriginalFile is the part of parseAndAugment that processes an -// original file AST to augment the source code using the overrides from -// the overlay files. -func augmentOriginalFile(file *ast.File, overrides map[string]overrideInfo) { - anyChange := false - for i, decl := range file.Decls { - switch d := decl.(type) { - case *ast.FuncDecl: - if info, ok := overrides[astutil.FuncKey(d)]; ok { - anyChange = true - removeFunc := true - if info.keepOriginal { - // Allow overridden function calls - // The standard library implementation of foo() becomes _gopherjs_original_foo() - d.Name.Name = "_gopherjs_original_" + d.Name.Name - removeFunc = false - } - if overSig := info.overrideSignature; overSig != nil { - d.Recv = overSig.Recv - d.Type.TypeParams = overSig.Type.TypeParams - d.Type.Params = overSig.Type.Params - d.Type.Results = overSig.Type.Results - removeFunc = false - } - if removeFunc { - file.Decls[i] = nil - } - } else if recvKey := astutil.FuncReceiverKey(d); len(recvKey) > 0 { - // check if the receiver has been purged, if so, remove the method too. - if info, ok := overrides[recvKey]; ok && info.purgeMethods { - anyChange = true - file.Decls[i] = nil - } - } - case *ast.GenDecl: - for j, spec := range d.Specs { - switch s := spec.(type) { - case *ast.TypeSpec: - if _, ok := overrides[s.Name.Name]; ok { - anyChange = true - d.Specs[j] = nil - } - case *ast.ValueSpec: - if len(s.Names) == len(s.Values) { - // multi-value context - // e.g. var a, b = 2, foo[int]() - // A removal will also remove the value which may be from a - // function call. This allows us to remove unwanted statements. - // However, if that call has a side effect which still needs - // to be run, add the call into the overlay. - for k, name := range s.Names { - if _, ok := overrides[name.Name]; ok { - anyChange = true - s.Names[k] = nil - s.Values[k] = nil - } - } - } else { - // single-value context - // e.g. var a, b = foo[int]() - // If a removal from the overlays makes all returned values unused, - // then remove the function call as well. This allows us to stop - // unwanted calls if needed. If that call has a side effect which - // still needs to be run, add the call into the overlay. - nameRemoved := false - for _, name := range s.Names { - if _, ok := overrides[name.Name]; ok { - nameRemoved = true - name.Name = `_` - } - } - if nameRemoved { - removeSpec := true - for _, name := range s.Names { - if name.Name != `_` { - removeSpec = false - break - } - } - if removeSpec { - anyChange = true - d.Specs[j] = nil - } - } - } - } - } - } - } - if anyChange { - finalizeRemovals(file) - pruneImports(file) - } -} - -// isOnlyImports determines if this file is empty except for imports. -func isOnlyImports(file *ast.File) bool { - for _, decl := range file.Decls { - if gen, ok := decl.(*ast.GenDecl); ok && gen.Tok == token.IMPORT { - continue - } - - // The decl was either a FuncDecl or a non-import GenDecl. - return false - } - return true -} - -// pruneImports will remove any unused imports from the file. -// -// This will not remove any dot (`.`) or blank (`_`) imports, unless -// there are no declarations or directives meaning that all the imports -// should be cleared. -// If the removal of code causes an import to be removed, the init's from that -// import may not be run anymore. If we still need to run an init for an import -// which is no longer used, add it to the overlay as a blank (`_`) import. -// -// This uses the given name or guesses at the name using the import path, -// meaning this doesn't work for packages which have a different package name -// from the path, including those paths which are versioned -// (e.g. `github.com/foo/bar/v2` where the package name is `bar`) -// or if the import is defined using a relative path (e.g. `./..`). -// Those cases don't exist in the native for Go, so we should only run -// this pruning when we have native overlays, but not for unknown packages. -func pruneImports(file *ast.File) { - if isOnlyImports(file) && !astutil.HasDirectivePrefix(file, `//go:linkname `) { - // The file is empty, remove all imports including any `.` or `_` imports. - file.Imports = nil - file.Decls = nil - return - } - - unused := make(map[string]int, len(file.Imports)) - for i, in := range file.Imports { - if name := astutil.ImportName(in); len(name) > 0 { - unused[name] = i - } - } - - // Remove "unused imports" for any import which is used. - ast.Inspect(file, func(n ast.Node) bool { - if sel, ok := n.(*ast.SelectorExpr); ok { - if id, ok := sel.X.(*ast.Ident); ok && id.Obj == nil { - delete(unused, id.Name) - } - } - return len(unused) > 0 - }) - if len(unused) == 0 { - return - } - - // Remove "unused imports" for any import used for a directive. - directiveImports := map[string]string{ - `unsafe`: `//go:linkname `, - `embed`: `//go:embed `, - } - for name, index := range unused { - in := file.Imports[index] - path, _ := strconv.Unquote(in.Path.Value) - directivePrefix, hasPath := directiveImports[path] - if hasPath && astutil.HasDirectivePrefix(file, directivePrefix) { - // since the import is otherwise unused set the name to blank. - in.Name = ast.NewIdent(`_`) - delete(unused, name) - } - } - if len(unused) == 0 { - return - } - - // Remove all unused import specifications - isUnusedSpec := map[*ast.ImportSpec]bool{} - for _, index := range unused { - isUnusedSpec[file.Imports[index]] = true - } - for _, decl := range file.Decls { - if d, ok := decl.(*ast.GenDecl); ok { - for i, spec := range d.Specs { - if other, ok := spec.(*ast.ImportSpec); ok && isUnusedSpec[other] { - d.Specs[i] = nil - } - } - } - } - - // Remove the unused import copies in the file - for _, index := range unused { - file.Imports[index] = nil - } - - finalizeRemovals(file) -} - -// finalizeRemovals fully removes any declaration, specification, imports -// that have been set to nil. This will also remove any unassociated comment -// groups, including the comments from removed code. -func finalizeRemovals(file *ast.File) { - fileChanged := false - for i, decl := range file.Decls { - switch d := decl.(type) { - case nil: - fileChanged = true - case *ast.GenDecl: - declChanged := false - for j, spec := range d.Specs { - switch s := spec.(type) { - case nil: - declChanged = true - case *ast.ValueSpec: - specChanged := false - for _, name := range s.Names { - if name == nil { - specChanged = true - break - } - } - if specChanged { - s.Names = astutil.Squeeze(s.Names) - s.Values = astutil.Squeeze(s.Values) - if len(s.Names) == 0 { - declChanged = true - d.Specs[j] = nil - } - } - } - } - if declChanged { - d.Specs = astutil.Squeeze(d.Specs) - if len(d.Specs) == 0 { - fileChanged = true - file.Decls[i] = nil - } - } - } - } - if fileChanged { - file.Decls = astutil.Squeeze(file.Decls) - } - - file.Imports = astutil.Squeeze(file.Imports) - - file.Comments = nil // clear this first so ast.Inspect doesn't walk it. - remComments := []*ast.CommentGroup{} - ast.Inspect(file, func(n ast.Node) bool { - if cg, ok := n.(*ast.CommentGroup); ok { - remComments = append(remComments, cg) - } - return true - }) - file.Comments = remComments -} - // Options controls build process behavior. type Options struct { Verbose bool @@ -1004,7 +611,7 @@ func (s *Session) BuildPackage(pkg *PackageData) (*compiler.Archive, error) { // Existing archive is out of date or doesn't exist, let's build the package. fileSet := token.NewFileSet() - files, overlayJsFiles, err := parseAndAugment(s.xctx, pkg, pkg.IsTest, fileSet) + files, overlayJsFiles, err := parseAndAugment(s.xctx, pkg, fileSet) if err != nil { return nil, err } diff --git a/build/build_test.go b/build/build_test.go index 7bda7f54a..ef9b8306b 100644 --- a/build/build_test.go +++ b/build/build_test.go @@ -7,6 +7,7 @@ import ( "strconv" "testing" + "github.com/gopherjs/gopherjs/compiler/astutil" "github.com/gopherjs/gopherjs/internal/srctesting" "github.com/shurcooL/go/importgraphutil" ) @@ -91,7 +92,7 @@ func TestNativesDontImportExtraPackages(t *testing.T) { // Use parseAndAugment to get a list of augmented AST files. fset := token.NewFileSet() - files, _, err := parseAndAugment(stdOnly, pkgVariant, pkgVariant.IsTest, fset) + files, _, err := parseAndAugment(stdOnly, pkgVariant, fset) if err != nil { t.Fatalf("github.com/gopherjs/gopherjs/build.parseAndAugment: %v", err) } @@ -423,7 +424,7 @@ func TestOverlayAugmentation(t *testing.T) { overrides := map[string]overrideInfo{} augmentOverlayFile(fileSrc, overrides) - pruneImports(fileSrc) + astutil.PruneImports(fileSrc) got := srctesting.Format(t, f.FileSet, fileSrc) @@ -724,7 +725,7 @@ func TestOriginalAugmentation(t *testing.T) { augmentOriginalImports(importPath, fileSrc) augmentOriginalFile(fileSrc, test.info) - pruneImports(fileSrc) + astutil.PruneImports(fileSrc) got := srctesting.Format(t, f.FileSet, fileSrc) diff --git a/build/context.go b/build/context.go index 316bfb2bb..3eaf5a397 100644 --- a/build/context.go +++ b/build/context.go @@ -320,7 +320,7 @@ func overlayCtx(e Env) *simpleCtx { // packages in case they are not present in the user's source tree. func gopherjsCtx(e Env) *simpleCtx { gopherjsRoot := filepath.Join(e.GOROOT, "src", "github.com", "gopherjs", "gopherjs") - return embeddedCtx(&withPrefix{gopherjspkg.FS, gopherjsRoot}, e) + return embeddedCtx(&withPrefix{fs: gopherjspkg.FS, prefix: gopherjsRoot}, e) } // goCtx creates simpleCtx that imports from the real file system GOROOT, GOPATH diff --git a/compiler/astutil/astutil.go b/compiler/astutil/astutil.go index 9ff88a48c..37853af8d 100644 --- a/compiler/astutil/astutil.go +++ b/compiler/astutil/astutil.go @@ -8,6 +8,7 @@ import ( "path" "reflect" "regexp" + "sort" "strconv" "strings" ) @@ -223,14 +224,14 @@ func hasDirective(node ast.Node, directiveAction string) bool { // HasDirectivePrefix determines if any line in the given file // has the given directive prefix in it. func HasDirectivePrefix(file *ast.File, prefix string) bool { - for _, cg := range file.Comments { - for _, c := range cg.List { - if strings.HasPrefix(c.Text, prefix) { - return true - } + foundDirective := false + ast.Inspect(file, func(n ast.Node) bool { + if c, ok := n.(*ast.Comment); ok && strings.HasPrefix(c.Text, prefix) { + foundDirective = true } - } - return false + return !foundDirective + }) + return foundDirective } // FindLoopStmt tries to find the loop statement among the AST nodes in the @@ -288,11 +289,110 @@ func EndsWithReturn(stmts []ast.Stmt) bool { } } -// Squeeze removes all nil nodes from the slice. +// isOnlyImports determines if this file is empty except for imports. +func isOnlyImports(file *ast.File) bool { + for _, decl := range file.Decls { + if gen, ok := decl.(*ast.GenDecl); ok && gen.Tok == token.IMPORT { + continue + } + + // The decl was either a FuncDecl or a non-import GenDecl. + return false + } + return true +} + +// PruneImports will remove any unused imports from the file. +// +// This will not remove any dot (`.`) or blank (`_`) imports, unless +// there are no declarations or directives meaning that all the imports +// should be cleared. +// If the removal of code causes an import to be removed, the init's from that +// import may not be run anymore. If we still need to run an init for an import +// which is no longer used, add it to the overlay as a blank (`_`) import. +// +// This uses the given name or guesses at the name using the import path, +// meaning this doesn't work for packages which have a different package name +// from the path, including those paths which are versioned +// (e.g. `github.com/foo/bar/v2` where the package name is `bar`) +// or if the import is defined using a relative path (e.g. `./..`). +// Those cases don't exist in the native for Go, so we should only run +// this pruning when we have native overlays, but not for unknown packages. +func PruneImports(file *ast.File) { + if isOnlyImports(file) && !HasDirectivePrefix(file, `//go:linkname `) { + // The file is empty, remove all imports including any `.` or `_` imports. + file.Imports = nil + file.Decls = nil + return + } + + unused := make(map[string]int, len(file.Imports)) + for i, in := range file.Imports { + if name := ImportName(in); len(name) > 0 { + unused[name] = i + } + } + + // Remove from "unused imports" for any import which is used. + ast.Inspect(file, func(n ast.Node) bool { + if sel, ok := n.(*ast.SelectorExpr); ok { + if id, ok := sel.X.(*ast.Ident); ok && id.Obj == nil { + delete(unused, id.Name) + } + } + return len(unused) > 0 + }) + if len(unused) == 0 { + return + } + + // Remove from "unused imports" for any import used for a directive. + directiveImports := map[string]string{ + `unsafe`: `//go:linkname `, + `embed`: `//go:embed `, + } + for name, index := range unused { + in := file.Imports[index] + path, _ := strconv.Unquote(in.Path.Value) + directivePrefix, hasPath := directiveImports[path] + if hasPath && HasDirectivePrefix(file, directivePrefix) { + // since the import is otherwise unused set the name to blank. + in.Name = ast.NewIdent(`_`) + delete(unused, name) + } + } + if len(unused) == 0 { + return + } + + // Remove all unused import specifications + isUnusedSpec := map[*ast.ImportSpec]bool{} + for _, index := range unused { + isUnusedSpec[file.Imports[index]] = true + } + for _, decl := range file.Decls { + if d, ok := decl.(*ast.GenDecl); ok { + for i, spec := range d.Specs { + if other, ok := spec.(*ast.ImportSpec); ok && isUnusedSpec[other] { + d.Specs[i] = nil + } + } + } + } + + // Remove the unused import copies in the file + for _, index := range unused { + file.Imports[index] = nil + } + + FinalizeRemovals(file) +} + +// squeeze removes all nil nodes from the slice. // // The given slice will be modified. This is designed for squeezing // declaration, specification, imports, and identifier lists. -func Squeeze[E ast.Node, S ~[]E](s S) S { +func squeeze[E ast.Node, S ~[]E](s S) S { var zero E count, dest := len(s), 0 for src := 0; src < count; src++ { @@ -305,3 +405,193 @@ func Squeeze[E ast.Node, S ~[]E](s S) S { } return s[:dest] } + +// updateFileComments rebuilds the file comments by reading the comments +// off of the nodes in the file. Any comments that are not associated with +// a node will be lost. +func updateFileComments(file *ast.File) { + file.Comments = nil // clear this first so ast.Inspect doesn't walk it. + remComments := []*ast.CommentGroup{} + ast.Inspect(file, func(n ast.Node) bool { + if cg, ok := n.(*ast.CommentGroup); ok { + remComments = append(remComments, cg) + } + return true + }) + file.Comments = remComments +} + +// FinalizeRemovals fully removes any declaration, specification, imports +// that have been set to nil. This will also remove any unassociated comment +// groups, including the comments from removed code. +// Comments that are floating and tied to a node will be lost. +func FinalizeRemovals(file *ast.File) { + fileChanged := false + for i, decl := range file.Decls { + switch d := decl.(type) { + case nil: + fileChanged = true + case *ast.GenDecl: + declChanged := false + for j, spec := range d.Specs { + switch s := spec.(type) { + case nil: + declChanged = true + case *ast.ValueSpec: + specChanged := false + for _, name := range s.Names { + if name == nil { + specChanged = true + break + } + } + if specChanged { + s.Names = squeeze(s.Names) + s.Values = squeeze(s.Values) + if len(s.Names) == 0 { + declChanged = true + d.Specs[j] = nil + } + } + } + } + if declChanged { + d.Specs = squeeze(d.Specs) + if len(d.Specs) == 0 { + fileChanged = true + file.Decls[i] = nil + } + } + } + } + if fileChanged { + file.Decls = squeeze(file.Decls) + } + + file.Imports = squeeze(file.Imports) + + updateFileComments(file) +} + +// ConcatenateFiles will concatenate the given tailing files onto the +// end of the first given AST file. +// +// This is designed to handle concatenating native overrides into the original +// source files so won't work for general purpose AST file concatenation. +// +// Returns an error if the concatenation fails. +// +// Caveats: +// - The Pos fields will not be modified so that that source locations will +// still show the correct file or virtual file positions. +// - The given file will be modified even if an error is returned and may +// be in an invalid state. +// - The tail files must be from the same package name and have the same import +// names for imports with the same import path. +// - Any duplicate objects must have been already resolved via an overlay +// augmentation prior to concatenation so that there are no duplicate objects. +// Any remaining duplicate objects will cause an error to be returned. +// - The tails will not be modified, however the nodes from the tails will be +// added into the target file so modifications to the tails after +// concatenation could cause the target file to be in an invalid state. +// - This will not modify the deprecated Unresolved or file Scope fields. +// - Any comments on import declarations will be lost since the imports will +// be merged into a single new import declaration. The comments on the +// individual import specs will be preserved. +// - The package comments will be concatenated. It will not check for +// build constraints or any file level directives, but simply append +// the tail comments as is. This may cause issues when formatting +// the resulting file including extra newlines or invalid code. +func ConcatenateFiles(file *ast.File, tails ...*ast.File) error { + // Populate the imports map from the target file. + // This map will be used to check for duplicate imports. + imports := make(map[string]*ast.ImportSpec, len(file.Imports)) + for _, imp := range file.Imports { + imports[imp.Path.Value] = imp + } + + // Get list of declarations not including the imports. + decls := make([]ast.Decl, 0, len(file.Decls)) + for _, decl := range file.Decls { + if gen, ok := decl.(*ast.GenDecl); !ok || gen.Tok != token.IMPORT { + decls = append(decls, decl) + } + } + + // Merge in all the tail files into the target file. + for _, tail := range tails { + + // Check the package names match. + if file.Name.Name != tail.Name.Name { + return fmt.Errorf("can not concatenate files with different package names: %q != %q", file.Name.Name, tail.Name.Name) + } + + // Concatenate the imports. + for _, imp := range tail.Imports { + path := imp.Path.Value + if oldImp, ok := imports[path]; ok { + // Import is in both files so check if the import name is not different. + oldName, newName := ImportName(oldImp), ImportName(imp) + if oldName != newName { + if len(oldName) == 0 { + // Update the import name to the new name. + // This assumes the import name was `_` and + // could cause problems if it was `.` + oldImp.Name = imp.Name + } else if len(newName) != 0 { + return fmt.Errorf("import from of %s can not be concatenated with different name: %q != %q", path, oldName, newName) + } + } + continue + } + imports[imp.Path.Value] = imp + } + + // Concatenate the declarations while skipping imports. + for _, decl := range tail.Decls { + if gen, ok := decl.(*ast.GenDecl); !ok || gen.Tok != token.IMPORT { + decls = append(decls, decl) + } + } + + // Concatenate the document comments. + if tail.Doc != nil { + if file.Doc == nil { + file.Doc = &ast.CommentGroup{} + } + file.Doc.List = append(file.Doc.List, tail.Doc.List...) + + // To help prevent issues when formatting causing a document comment + // to occur between `package` and the package name, move the package + // name to the Pos of the tail so it comes after the tail's package comment. + file.Package = tail.Package + file.Name.NamePos = tail.Name.NamePos + } + } + + // Update the target file's declarations with all the imports + // prepended to the list of declarations as one import declaration. + // Also sort the imports by path to ensure a consistent order. + if len(imports) > 0 { + importsGen := &ast.GenDecl{ + Tok: token.IMPORT, + Specs: make([]ast.Spec, 0, len(file.Imports)), + } + paths := make([]string, 0, len(imports)) + for path := range imports { + paths = append(paths, path) + } + sort.Strings(paths) + file.Imports = make([]*ast.ImportSpec, 0, len(imports)) + for _, path := range paths { + imp := imports[path] + importsGen.Specs = append(importsGen.Specs, imp) + file.Imports = append(file.Imports, imp) + } + decls = append([]ast.Decl{importsGen}, decls...) + } + file.Decls = decls + + updateFileComments(file) + return nil +} diff --git a/compiler/astutil/astutil_test.go b/compiler/astutil/astutil_test.go index 56dabc510..9d17df228 100644 --- a/compiler/astutil/astutil_test.go +++ b/compiler/astutil/astutil_test.go @@ -567,7 +567,7 @@ func TestSqueezeIdents(t *testing.T) { input[i] = ast.NewIdent(strconv.Itoa(i)) } - result := Squeeze(input) + result := squeeze(input) if len(result) != len(test.assign) { t.Errorf("Squeeze() returned a slice %d long, want %d", len(result), len(test.assign)) } @@ -588,3 +588,564 @@ func TestSqueezeIdents(t *testing.T) { }) } } + +func TestPruneImports(t *testing.T) { + tests := []struct { + name string + src string + want string + }{ + { + name: `no imports`, + src: `package testpackage + func foo() {}`, + want: `package testpackage + func foo() {}`, + }, { + name: `keep used imports`, + src: `package testpackage + import "fmt" + func foo() { fmt.Println("foo") }`, + want: `package testpackage + import "fmt" + func foo() { fmt.Println("foo") }`, + }, { + name: `remove imports that are not used`, + src: `package testpackage + import "fmt" + func foo() { }`, + want: `package testpackage + func foo() { }`, + }, { + name: `remove imports that are unused but masked by an object`, + src: `package testpackage + import "fmt" + var fmt = "format" + func foo() string { return fmt }`, + want: `package testpackage + var fmt = "format" + func foo() string { return fmt }`, + }, { + name: `remove imports from empty file`, + src: `package testpackage + import "fmt" + import _ "unsafe"`, + want: `package testpackage`, + }, { + name: `remove imports from empty file except for unsafe when linking`, + src: `package testpackage + import "fmt" + import "embed" + + //go:linkname foo runtime.foo + import "unsafe"`, + want: `package testpackage + + //go:linkname foo runtime.foo + import _ "unsafe"`, + }, { + name: `keep embed imports when embedding`, + src: `package testpackage + import "fmt" + import "embed" + import "unsafe" + + //go:embed "foo.txt" + var foo string`, + want: `package testpackage + import _ "embed" + + //go:embed "foo.txt" + var foo string`, + }, { + name: `keep imports that just needed an underscore`, + src: `package testpackage + import "embed" + //go:linkname foo runtime.foo + import "unsafe" + //go:embed "foo.txt" + var foo string`, + want: `package testpackage + import _ "embed" + //go:linkname foo runtime.foo + import _ "unsafe" + //go:embed "foo.txt" + var foo string`, + }, { + name: `keep imports without names`, + src: `package testpackage + import _ "fmt" + import "log" + import . "math" + + var foo string`, + want: `package testpackage + import _ "fmt" + + import . "math" + + var foo string`, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + st := srctesting.New(t) + + srcFile := st.Parse(`testSrc.go`, test.src) + PruneImports(srcFile) + got := srctesting.Format(t, st.FileSet, srcFile) + + // parse and format the expected result so that formatting matches + wantFile := st.Parse(`testWant.go`, test.want) + want := srctesting.Format(t, st.FileSet, wantFile) + + if got != want { + t.Errorf("Unexpected resulting AST after PruneImports:\n\tgot: %q\n\twant: %q", got, want) + } + }) + } +} + +func TestFinalizeRemovals(t *testing.T) { + tests := []struct { + name string + src string + perforator func(f *ast.File) + want string + }{ + { + name: `no removals`, + src: `package testpackage + // foo took a journey + func foo() {} + // bar went home + func bar[T any](v T) T { return v } + // baz is a mystery + var baz int = 42`, + perforator: func(f *ast.File) {}, + want: `package testpackage + // foo took a journey + func foo() {} + // bar went home + func bar[T any](v T) T { return v } + // baz is a mystery + var baz int = 42`, + }, { + name: `removal first decl`, + src: `package testpackage + // foo took a journey + func foo() {} + // bar went home + func bar[T any](v T) T { return v } + // baz is a mystery + var baz int = 42`, + perforator: func(f *ast.File) { + f.Decls[0] = nil + }, + want: `package testpackage + // bar went home + func bar[T any](v T) T { return v } + // baz is a mystery + var baz int = 42`, + }, { + name: `removal middle decl`, + src: `package testpackage + // foo took a journey + func foo() {} + // bar went home + func bar[T any](v T) T { return v } + // baz is a mystery + var baz int = 42`, + perforator: func(f *ast.File) { + f.Decls[1] = nil + }, + want: `package testpackage + // foo took a journey + func foo() {} + // baz is a mystery + var baz int = 42`, + }, { + name: `removal last decl`, + src: `package testpackage + // foo took a journey + func foo() {} + // bar went home + func bar[T any](v T) T { return v } + // baz is a mystery + var baz int = 42`, + perforator: func(f *ast.File) { + f.Decls[len(f.Decls)-1] = nil + }, + want: `package testpackage + // foo took a journey + func foo() {} + // bar went home + func bar[T any](v T) T { return v }`, + }, { + name: `removal one whole value spec`, + src: `package testpackage + var ( + foo string = "foo" + bar, baz int = 42, 36 + )`, + perforator: func(f *ast.File) { + f.Decls[0].(*ast.GenDecl).Specs[1] = nil + }, + want: `package testpackage + var ( + foo string = "foo" + )`, + }, { + name: `removal part of one value spec`, + src: `package testpackage + var ( + foo string = "foo" + bar, baz int = 42, 36 + )`, + perforator: func(f *ast.File) { + spec := f.Decls[0].(*ast.GenDecl).Specs[1].(*ast.ValueSpec) + spec.Names[1] = nil + spec.Values[1] = nil + }, + want: `package testpackage + var ( + foo string = "foo" + bar int = 42 + )`, + }, { + name: `removal all parts of one value spec`, + src: `package testpackage + var ( + foo string = "foo" + bar, baz int = 42, 36 + )`, + perforator: func(f *ast.File) { + spec := f.Decls[0].(*ast.GenDecl).Specs[1].(*ast.ValueSpec) + spec.Names[0] = nil + spec.Values[0] = nil + spec.Names[1] = nil + spec.Values[1] = nil + }, + want: `package testpackage + var ( + foo string = "foo" + )`, + }, + { + name: `removal all value specs`, + src: `package testpackage + var ( + foo string = "foo" + bar, baz int = 42, 36 + )`, + perforator: func(f *ast.File) { + decl := f.Decls[0].(*ast.GenDecl) + decl.Specs[0] = nil + decl.Specs[1] = nil + }, + want: `package testpackage`, + }, { + name: `removal one type spec`, + src: `package testpackage + type ( + foo interface{ String() string } + bar struct{ baz int } + )`, + perforator: func(f *ast.File) { + decl := f.Decls[0].(*ast.GenDecl) + decl.Specs[0] = nil + }, + want: `package testpackage + type ( + bar struct{ baz int } + )`, + }, { + name: `removal all type specs`, + src: `package testpackage + type ( + foo interface{ String() string } + bar struct{ baz int } + )`, + perforator: func(f *ast.File) { + decl := f.Decls[0].(*ast.GenDecl) + decl.Specs[0] = nil + decl.Specs[1] = nil + }, + want: `package testpackage`, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + st := srctesting.New(t) + + srcFile := st.Parse(`testSrc.go`, test.src) + test.perforator(srcFile) + FinalizeRemovals(srcFile) + got := srctesting.Format(t, st.FileSet, srcFile) + + // parse and format the expected result so that formatting matches + wantFile := st.Parse(`testWant.go`, test.want) + want := srctesting.Format(t, st.FileSet, wantFile) + + if got != want { + t.Errorf("Unexpected resulting AST:\n\tgot: %q\n\twant: %q", got, want) + } + }) + } +} + +func TestConcatenateFiles(t *testing.T) { + tests := []struct { + name string + srcHead string + srcTail string + want string + expErr string + }{ + { + name: `add a method with a comment`, + srcHead: `package testpackage + // foo is an original method. + func foo() {}`, + srcTail: `package testpackage + // bar is a concatenated method + // from an additional override file. + func bar() {}`, + want: `package testpackage + // foo is an original method. + func foo() {} + // bar is a concatenated method + // from an additional override file. + func bar() {}`, + }, { + name: `merge existing singular unnamed imports`, + srcHead: `package testpackage + import "fmt" + import "bytes" + + func prime(str fmt.Stringer) *bytes.Buffer { + return bytes.NewBufferString(str.String()) + }`, + srcTail: `package testpackage + import "bytes" + import "fmt" + + func cat(strs ...fmt.Stringer) fmt.Stringer { + buf := &bytes.Buffer{} + for _, str := range strs { + buf.WriteString(str.String()) + } + return buf + }`, + want: `package testpackage + import ( + "bytes" + "fmt" + ) + + func prime(str fmt.Stringer) *bytes.Buffer { + return bytes.NewBufferString(str.String()) + } + func cat(strs ...fmt.Stringer) fmt.Stringer { + buf := &bytes.Buffer{} + for _, str := range strs { + buf.WriteString(str.String()) + } + return buf + }`, + }, { + name: `merge existing named imports`, + srcHead: `package testpackage + import ( + foo "fmt" + bar "bytes" + ) + func prime(str foo.Stringer) *bar.Buffer { + return bar.NewBufferString(str.String()) + }`, + srcTail: `package testpackage + import ( + bar "bytes" + foo "fmt" + ) + func cat(strs ...foo.Stringer) foo.Stringer { + buf := &bar.Buffer{} + for _, str := range strs { + buf.WriteString(str.String()) + } + return buf + }`, + want: `package testpackage + import ( + bar "bytes" + foo "fmt" + ) + + func prime(str foo.Stringer) *bar.Buffer { + return bar.NewBufferString(str.String()) + } + func cat(strs ...foo.Stringer) foo.Stringer { + buf := &bar.Buffer{} + for _, str := range strs { + buf.WriteString(str.String()) + } + return buf + }`, + }, { + name: `merge imports that don't overlap`, + srcHead: `package testpackage + import ( + "fmt" + "bytes" + ) + func prime(str fmt.Stringer) *bytes.Buffer { + return bytes.NewBufferString(str.String()) + }`, + srcTail: `package testpackage + import "math" + import "log" + func NaNaNaBatman(name string, value float64) { + if math.IsNaN(value) { + log.Println("Warning: "+name+" is NaN") + } + }`, + want: `package testpackage + import ( + "bytes" + "fmt" + "log" + "math" + ) + func prime(str fmt.Stringer) *bytes.Buffer { + return bytes.NewBufferString(str.String()) + } + func NaNaNaBatman(name string, value float64) { + if math.IsNaN(value) { + log.Println("Warning: " + name + " is NaN") + } + }`, + }, { + name: `merge two package comments`, + srcHead: `// Original package comment + package testpackage + func foo() {}`, + srcTail: `// Additional package comment + package testpackage + var bar int`, + want: `// Original package comment + + // Additional package comment + package testpackage + func foo() {} + var bar int`, + }, { + name: `take package comment from tail`, + srcHead: `package testpackage + func foo() {}`, + srcTail: `// Additional package comment + package testpackage + var bar int`, + want: `// Additional package comment + package testpackage + func foo() {} + var bar int`, + }, { + name: `packages with different package names`, + srcHead: `package testpackage + func foo() {}`, + srcTail: `package otherTestPackage + func bar() {}`, + expErr: `can not concatenate files with different package names: "testpackage" != "otherTestPackage"`, + }, { + name: `import mismatch with one named`, + srcHead: `package testpackage + import "fmt" + func foo() { fmt.Println("foo") }`, + srcTail: `package testpackage + import f1 "fmt" + func bar() { f1.Println("bar") }`, + expErr: `import from of "fmt" can not be concatenated with different name: "fmt" != "f1"`, + }, { + name: `import mismatch with both named`, + srcHead: `package testpackage + import f1 "fmt" + func foo() { f1.Println("foo") }`, + srcTail: `package testpackage + import f2 "fmt" + func bar() { f2.Println("bar") }`, + expErr: `import from of "fmt" can not be concatenated with different name: "f1" != "f2"`, + }, { + name: `import mismatch with old being blank`, + srcHead: `package testpackage + import _ "unsafe" + //go:linkname foo runtime.foo + func bar()`, + srcTail: `package testpackage + import "unsafe" + func foo() unsafe.Pointer { return nil }`, + want: `package testpackage + import "unsafe" + //go:linkname foo runtime.foo + func bar() + func foo() unsafe.Pointer { return nil }`, + }, { + name: `import mismatch with new being blank`, + srcHead: `package testpackage + import "unsafe" + func foo() unsafe.Pointer { return nil }`, + srcTail: `package testpackage + import _ "unsafe" + //go:linkname foo runtime.foo + func bar()`, + want: `package testpackage + import "unsafe" + func foo() unsafe.Pointer { return nil } + //go:linkname foo runtime.foo + func bar()`, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + st := srctesting.New(t) + if (len(test.want) > 0) == (len(test.expErr) > 0) { + t.Fatal(`One and only one of "want" and "expErr" must be set`) + } + + headFile := st.Parse(`testHead.go`, test.srcHead) + tailFile := st.Parse(`testTail.go`, test.srcTail) + err := ConcatenateFiles(headFile, tailFile) + if err != nil { + if len(test.expErr) == 0 { + t.Errorf(`Expected an AST but got an error: %v`, err) + } else if err.Error() != test.expErr { + t.Errorf("Unexpected error:\n\tgot: %q\n\twant: %q", err.Error(), test.expErr) + } + return + } + + // The formatter expects the comment line numbers to be consecutive + // so that layout is preserved. We can't guarantee that the line + // numbers are correct after appending the files, which is fine + // as long as we aren't trying to format it. + // Setting the file comments to nil will force the formatter to use + // the comments on the AST nodes when the node is reached which + // gives a more accurate view of the concatenated file. + headFile.Comments = nil + got := srctesting.Format(t, st.FileSet, headFile) + if len(test.want) == 0 { + t.Errorf("Expected an error but got AST:\n\tgot: %q\n\twant: %q", got, test.expErr) + return + } + + // parse and format the expected result so that formatting matches. + wantFile := st.Parse("testWant.go", test.want) + want := srctesting.Format(t, st.FileSet, wantFile) + if got != want { + t.Errorf("Unexpected resulting AST:\n\tgot: %q\n\twant: %q", got, want) + } + }) + } +} diff --git a/internal/srctesting/srctesting.go b/internal/srctesting/srctesting.go index 83499c6dd..bf74bce51 100644 --- a/internal/srctesting/srctesting.go +++ b/internal/srctesting/srctesting.go @@ -187,9 +187,9 @@ type Source struct { // root package. At least one source file must be given. // The root package's path will be `command-line-arguments`. // -// The auxillary files can be for different packages but should have paths +// The auxiliary files can be for different packages but should have paths // added to the source name so that they can be grouped together by package. -// To import an auxillary package, the path should be prepended by +// To import an auxiliary package, the path should be prepended by // `github.com/gopherjs/gopherjs/compiler`. func ParseSources(t *testing.T, sourceFiles []Source, auxFiles []Source) *packages.Package { t.Helper()