diff --git a/build/build.go b/build/build.go index 4517979c0..3af3c16c4 100644 --- a/build/build.go +++ b/build/build.go @@ -759,14 +759,13 @@ type Session struct { // sources is a map of parsed packages that have been built and augmented. // This is keyed using resolved import paths. This is used to avoid // rebuilding and augmenting packages that are imported by several packages. - // These sources haven't been sorted nor simplified yet. + // The files in these sources haven't been sorted nor simplified yet. sources map[string]*sources.Sources // Binary archives produced during the current session and assumed to be // up to date with input sources and dependencies. In the -w ("watch") mode // must be cleared upon entering watching. UpToDateArchives map[string]*compiler.Archive - Types map[string]*types.Package Watcher *fsnotify.Watcher } @@ -788,7 +787,6 @@ func NewSession(options *Options) (*Session, error) { return nil, err } - s.Types = make(map[string]*types.Package) if options.Watch { if out, err := exec.Command("ulimit", "-n").Output(); err == nil { if n, err := strconv.Atoi(strings.TrimSpace(string(out))); err == nil && n < 1024 { @@ -906,7 +904,7 @@ func (s *Session) BuildFiles(filenames []string, pkgObj string, cwd string) erro if err != nil { return err } - if s.Types["main"].Name() != "main" { + if s.sources["main"].Package.Name() != "main" { return fmt.Errorf("cannot build/run non-main package") } return s.WriteCommandPackage(archive, pkgObj) @@ -918,7 +916,7 @@ func (s *Session) BuildProject(pkg *PackageData) (*compiler.Archive, error) { // ensure that runtime for gopherjs is imported pkg.Imports = append(pkg.Imports, `runtime`) - // Build the project to get the sources for the parsed packages. + // Load the project to get the sources for the parsed packages. var srcs *sources.Sources var err error if pkg.IsTest { @@ -930,13 +928,35 @@ func (s *Session) BuildProject(pkg *PackageData) (*compiler.Archive, error) { return nil, err } - // TODO(grantnelson-wf): At this point we have all the parsed packages we - // need to compile the whole project, including testmain, if needed. - // We can perform analysis on the whole project at this point to propagate - // flatten, blocking, etc. information and check types to get the type info - // with all the instances for all generics in the whole project. + // TODO(grantnelson-wf): We could investigate caching the results of + // the sources prior to preparing them to avoid re-parsing the same + // sources and augmenting them when the files on disk haven't changed. + // This would require a way to determine if the sources are up-to-date + // which could be done with the left over srcModTime from when the archives + // were being cached. - return s.compilePackages(srcs) + // Prepare and analyze the source code. + // This will be performed recursively for all dependencies. + tContext := types.NewContext() + allSources := s.getSortedSources() + err = compiler.PrepareAllSources(allSources, s.SourcesForImport, tContext) + if err != nil { + return nil, err + } + + // Compile the project into Archives containing the generated JS. + return s.compilePackages(srcs, tContext) +} + +// getSortedSources returns the sources sorted by import path. +// The files in the sources may still not be sorted yet. +func (s *Session) getSortedSources() []*sources.Sources { + allSources := make([]*sources.Sources, 0, len(s.sources)) + for _, srcs := range s.sources { + allSources = append(allSources, srcs) + } + sources.SortedSourcesSlice(allSources) + return allSources } func (s *Session) loadTestPackage(pkg *PackageData) (*sources.Sources, error) { @@ -965,6 +985,7 @@ func (s *Session) loadTestPackage(pkg *PackageData) (*sources.Sources, error) { Files: []*ast.File{mainFile}, FileSet: fset, } + s.sources[srcs.ImportPath] = srcs // Import dependencies for the testmain package. for _, importedPkgPath := range srcs.UnresolvedImports() { @@ -1103,16 +1124,27 @@ func (s *Session) loadPackages(pkg *PackageData) (*sources.Sources, error) { return srcs, nil } -func (s *Session) compilePackages(srcs *sources.Sources) (*compiler.Archive, error) { +func (s *Session) compilePackages(rootSrcs *sources.Sources, tContext *types.Context) (*compiler.Archive, error) { + for _, srcs := range s.sources { + if _, err := s.compilePackage(srcs, tContext); err != nil { + return nil, err + } + } + + rootArchive, ok := s.UpToDateArchives[rootSrcs.ImportPath] + if !ok { + // This is confirmation that the root package is in the sources map and got compiled. + return nil, fmt.Errorf(`root package %q not found`, rootSrcs.ImportPath) + } + return rootArchive, nil +} + +func (s *Session) compilePackage(srcs *sources.Sources, tContext *types.Context) (*compiler.Archive, error) { if archive, ok := s.UpToDateArchives[srcs.ImportPath]; ok { return archive, nil } - importContext := &compiler.ImportContext{ - Packages: s.Types, - ImportArchive: s.ImportResolverFor(srcs.Dir), - } - archive, err := compiler.Compile(*srcs, importContext, s.options.Minify) + archive, err := compiler.Compile(srcs, tContext, s.options.Minify) if err != nil { return nil, err } @@ -1152,6 +1184,20 @@ func (s *Session) getImportPath(path, srcDir string) (string, error) { return pkg.ImportPath, nil } +func (s *Session) SourcesForImport(path, srcDir string) (*sources.Sources, error) { + importPath, err := s.getImportPath(path, srcDir) + if err != nil { + return nil, err + } + + srcs, ok := s.sources[importPath] + if !ok { + return nil, fmt.Errorf(`sources for %q not found`, path) + } + + return srcs, nil +} + // ImportResolverFor returns a function which returns a compiled package archive // given an import path. func (s *Session) ImportResolverFor(srcDir string) func(string) (*compiler.Archive, error) { @@ -1165,12 +1211,7 @@ func (s *Session) ImportResolverFor(srcDir string) func(string) (*compiler.Archi return archive, nil } - // The archive hasn't been compiled yet so compile it with the sources. - if srcs, ok := s.sources[importPath]; ok { - return s.compilePackages(srcs) - } - - return nil, fmt.Errorf(`sources for %q not found`, importPath) + return nil, fmt.Errorf(`archive for %q not found`, importPath) } } @@ -1258,8 +1299,9 @@ func hasGopathPrefix(file, gopath string) (hasGopathPrefix bool, prefixLen int) func (s *Session) WaitForChange() { // Will need to re-validate up-to-dateness of all archives, so flush them from // memory. + s.importPaths = map[string]map[string]string{} + s.sources = map[string]*sources.Sources{} s.UpToDateArchives = map[string]*compiler.Archive{} - s.Types = map[string]*types.Package{} s.options.PrintSuccess("watching for changes...\n") for { diff --git a/compiler/compiler_test.go b/compiler/compiler_test.go index 0742cbaaa..2c399ad9b 100644 --- a/compiler/compiler_test.go +++ b/compiler/compiler_test.go @@ -679,43 +679,44 @@ func compileProject(t *testing.T, root *packages.Package, minify bool) map[strin pkgMap[pkg.PkgPath] = pkg }) - archiveCache := map[string]*Archive{} - var importContext *ImportContext - importContext = &ImportContext{ - Packages: map[string]*types.Package{}, - ImportArchive: func(path string) (*Archive, error) { - // find in local cache - if a, ok := archiveCache[path]; ok { - return a, nil - } - - pkg, ok := pkgMap[path] - if !ok { - t.Fatal(`package not found:`, path) - } - importContext.Packages[path] = pkg.Types - - srcs := sources.Sources{ - ImportPath: path, - Files: pkg.Syntax, - FileSet: pkg.Fset, - } + allSrcs := map[string]*sources.Sources{} + for _, pkg := range pkgMap { + srcs := &sources.Sources{ + ImportPath: pkg.PkgPath, + Dir: ``, + Files: pkg.Syntax, + FileSet: pkg.Fset, + } + allSrcs[pkg.PkgPath] = srcs + } - // compile package - a, err := Compile(srcs, importContext, minify) - if err != nil { - return nil, err - } - archiveCache[path] = a - return a, nil - }, + importer := func(path, srcDir string) (*sources.Sources, error) { + srcs, ok := allSrcs[path] + if !ok { + t.Fatal(`package not found:`, path) + return nil, nil + } + return srcs, nil } - _, err := importContext.ImportArchive(root.PkgPath) - if err != nil { - t.Fatal(`failed to compile:`, err) + tContext := types.NewContext() + sortedSources := make([]*sources.Sources, 0, len(allSrcs)) + for _, srcs := range allSrcs { + sortedSources = append(sortedSources, srcs) + } + sources.SortedSourcesSlice(sortedSources) + PrepareAllSources(sortedSources, importer, tContext) + + archives := map[string]*Archive{} + for _, srcs := range allSrcs { + a, err := Compile(srcs, tContext, minify) + if err != nil { + t.Fatal(`failed to compile:`, err) + } + archives[srcs.ImportPath] = a } - return archiveCache + + return archives } // newTime creates an arbitrary time.Time offset by the given number of seconds. @@ -730,6 +731,13 @@ func newTime(seconds float64) time.Time { func reloadCompiledProject(t *testing.T, archives map[string]*Archive, rootPkgPath string) map[string]*Archive { t.Helper() + // TODO(grantnelson-wf): The tests using this function are out-of-date + // since they are testing the old archive caching that has been disabled. + // At some point, these tests should be updated to test any new caching + // mechanism that is implemented or removed. As is this function is faking + // the old recursive archive loading that is no longer used since it + // doesn't allow cross package analysis for generings. + buildTime := newTime(5.0) serialized := map[string][]byte{} for path, a := range archives { @@ -742,6 +750,10 @@ func reloadCompiledProject(t *testing.T, archives map[string]*Archive, rootPkgPa srcModTime := newTime(0.0) reloadCache := map[string]*Archive{} + type ImportContext struct { + Packages map[string]*types.Package + ImportArchive func(path string) (*Archive, error) + } var importContext *ImportContext importContext = &ImportContext{ Packages: map[string]*types.Package{}, diff --git a/compiler/decls.go b/compiler/decls.go index 0694181f6..5b760fb15 100644 --- a/compiler/decls.go +++ b/compiler/decls.go @@ -82,7 +82,7 @@ func (d *Decl) Dce() *dce.Info { // topLevelObjects extracts package-level variables, functions and named types // from the package AST. -func (fc *funcContext) topLevelObjects(srcs sources.Sources) (vars []*types.Var, functions []*ast.FuncDecl, typeNames typesutil.TypeNames) { +func (fc *funcContext) topLevelObjects(srcs *sources.Sources) (vars []*types.Var, functions []*ast.FuncDecl, typeNames typesutil.TypeNames) { if !fc.isRoot() { panic(bailout(fmt.Errorf("functionContext.discoverObjects() must be only called on the package-level context"))) } diff --git a/compiler/internal/analysis/info.go b/compiler/internal/analysis/info.go index 803952b24..246547f60 100644 --- a/compiler/internal/analysis/info.go +++ b/compiler/internal/analysis/info.go @@ -58,10 +58,14 @@ type Info struct { funcLitInfos map[*ast.FuncLit][]*FuncInfo InitFuncInfo *FuncInfo // Context for package variable initialization. - isImportedBlocking func(typeparams.Instance) bool // For functions from other packages. - allInfos []*FuncInfo + infoImporter InfoImporter // To get `Info` for other packages. + allInfos []*FuncInfo } +// InfoImporter is used to get the `Info` for another package. +// The path is the resolved import path of the package to get the `Info` for. +type InfoImporter func(path string) (*Info, error) + func (info *Info) newFuncInfo(n ast.Node, obj types.Object, typeArgs typesutil.TypeList, resolver *typeparams.Resolver) *FuncInfo { funcInfo := &FuncInfo{ pkgInfo: info, @@ -132,11 +136,16 @@ func (info *Info) newFuncInfoInstances(fd *ast.FuncDecl) []*FuncInfo { } // IsBlocking returns true if the function may contain blocking calls or operations. -// If inst is from a different package, this will use the isImportedBlocking +// If inst is from a different package, this will use the getImportInfo function // to lookup the information from the other package. func (info *Info) IsBlocking(inst typeparams.Instance) bool { if inst.Object.Pkg() != info.Pkg { - return info.isImportedBlocking(inst) + path := inst.Object.Pkg().Path() + otherInfo, err := info.infoImporter(path) + if err != nil { + panic(fmt.Errorf(`failed to get info for package %q: %v`, path, err)) + } + return otherInfo.IsBlocking(inst) } if funInfo := info.FuncInfo(inst); funInfo != nil { return funInfo.IsBlocking() @@ -174,16 +183,21 @@ func (info *Info) VarsWithInitializers() map[*types.Var]bool { return result } -func AnalyzePkg(files []*ast.File, fileSet *token.FileSet, typesInfo *types.Info, typeCtx *types.Context, typesPkg *types.Package, instanceSets *typeparams.PackageInstanceSets, isBlocking func(typeparams.Instance) bool) *Info { +// AnalyzePkg analyzes the given package for blocking calls, defers, etc. +// +// Note that at the end of this call the analysis information +// has NOT been propagated across packages yet. Once all the packages +// have been analyzed, call PropagateAnalysis to propagate the information. +func AnalyzePkg(files []*ast.File, fileSet *token.FileSet, typesInfo *types.Info, typeCtx *types.Context, typesPkg *types.Package, instanceSets *typeparams.PackageInstanceSets, infoImporter InfoImporter) *Info { info := &Info{ - Info: typesInfo, - Pkg: typesPkg, - typeCtx: typeCtx, - instanceSets: instanceSets, - HasPointer: make(map[*types.Var]bool), - isImportedBlocking: isBlocking, - funcInstInfos: new(typeparams.InstanceMap[*FuncInfo]), - funcLitInfos: make(map[*ast.FuncLit][]*FuncInfo), + Info: typesInfo, + Pkg: typesPkg, + typeCtx: typeCtx, + instanceSets: instanceSets, + HasPointer: make(map[*types.Var]bool), + infoImporter: infoImporter, + funcInstInfos: new(typeparams.InstanceMap[*FuncInfo]), + funcLitInfos: make(map[*ast.FuncLit][]*FuncInfo), } info.InitFuncInfo = info.newFuncInfo(nil, nil, nil, nil) @@ -193,13 +207,25 @@ func AnalyzePkg(files []*ast.File, fileSet *token.FileSet, typesInfo *types.Info ast.Walk(info.InitFuncInfo, file) } + return info +} + +// PropagateAnalysis will propagate analysis information across package +// boundaries to finish the analysis of a whole project. +func PropagateAnalysis(allInfo []*Info) { done := false for !done { - done = info.propagateFunctionBlocking() + done = true + for _, info := range allInfo { + if !info.propagateFunctionBlocking() { + done = false + } + } } - info.propagateControlStatementBlocking() - return info + for _, info := range allInfo { + info.propagateControlStatementBlocking() + } } // propagateFunctionBlocking propagates information about blocking calls diff --git a/compiler/internal/analysis/info_test.go b/compiler/internal/analysis/info_test.go index 957e346d9..73428207e 100644 --- a/compiler/internal/analysis/info_test.go +++ b/compiler/internal/analysis/info_test.go @@ -1,6 +1,7 @@ package analysis import ( + "fmt" "go/ast" "go/types" "sort" @@ -1575,15 +1576,13 @@ func TestBlocking_IsImportBlocking_ForwardInstances(t *testing.T) { } func TestBlocking_IsImportBlocking_BackwardInstances(t *testing.T) { - t.Skip(`isImportedBlocking doesn't fully handle instances yet`) - // TODO(grantnelson-wf): This test is currently failing because the info - // for the test package is need while creating the instances for FooBaz - // while analyzing the other package. However the other package is analyzed - // first since the test package is dependent on it. One possible fix is that - // we add some mechanism similar to the localInstCallees but for remote - // instances then perform the blocking propagation steps for all packages - // including the localInstCallees propagation at the same time. After all the - // propagation of the calls then the flow control statements can be marked. + // This tests propagation of information across package boundaries. + // `FooBaz` has no instances in it until it is referenced in the `test` package. + // That instance information needs to propagate back across the package + // boundary to the `other` package. The information for `BazBlocker` and + // `BazNotBlocker` is propagated back to `FooBaz[BazBlocker]` and + // `FooBaz[BazNotBlocker]`. That information is then propagated forward + // to the `blocking` and `notBlocking` functions in the `test` package. otherSrc := `package other @@ -1629,8 +1628,9 @@ type blockingTest struct { func newBlockingTest(t *testing.T, src string) *blockingTest { f := srctesting.New(t) + tContext := types.NewContext() tc := typeparams.Collector{ - TContext: types.NewContext(), + TContext: tContext, Info: f.Info, Instances: &typeparams.PackageInstanceSets{}, } @@ -1639,11 +1639,11 @@ func newBlockingTest(t *testing.T, src string) *blockingTest { testInfo, testPkg := f.Check(`pkg/test`, file) tc.Scan(testPkg, file) - isImportBlocking := func(i typeparams.Instance) bool { - t.Fatalf(`isImportBlocking should not be called in this test, called with %v`, i) - return true + getImportInfo := func(path string) (*Info, error) { + return nil, fmt.Errorf(`getImportInfo should not be called in this test, called with %v`, path) } - pkgInfo := AnalyzePkg([]*ast.File{file}, f.FileSet, testInfo, types.NewContext(), testPkg, tc.Instances, isImportBlocking) + pkgInfo := AnalyzePkg([]*ast.File{file}, f.FileSet, testInfo, tContext, testPkg, tc.Instances, getImportInfo) + PropagateAnalysis([]*Info{pkgInfo}) return &blockingTest{ f: f, @@ -1654,19 +1654,19 @@ func newBlockingTest(t *testing.T, src string) *blockingTest { func newBlockingTestWithOtherPackage(t *testing.T, testSrc string, otherSrc string) *blockingTest { f := srctesting.New(t) + tContext := types.NewContext() tc := typeparams.Collector{ - TContext: types.NewContext(), + TContext: tContext, Info: f.Info, Instances: &typeparams.PackageInstanceSets{}, } - pkgInfo := map[*types.Package]*Info{} - isImportBlocking := func(i typeparams.Instance) bool { - if info, ok := pkgInfo[i.Object.Pkg()]; ok { - return info.IsBlocking(i) + pkgInfo := map[string]*Info{} + getImportInfo := func(path string) (*Info, error) { + if info, ok := pkgInfo[path]; ok { + return info, nil } - t.Fatalf(`unexpected package in isImportBlocking for %v`, i) - return true + return nil, fmt.Errorf(`unexpected package in getImportInfo for %v`, path) } otherFile := f.Parse(`other.go`, otherSrc) @@ -1677,11 +1677,13 @@ func newBlockingTestWithOtherPackage(t *testing.T, testSrc string, otherSrc stri _, testPkg := f.Check(`pkg/test`, testFile) tc.Scan(testPkg, testFile) - otherPkgInfo := AnalyzePkg([]*ast.File{otherFile}, f.FileSet, f.Info, types.NewContext(), otherPkg, tc.Instances, isImportBlocking) - pkgInfo[otherPkg] = otherPkgInfo + otherPkgInfo := AnalyzePkg([]*ast.File{otherFile}, f.FileSet, f.Info, tContext, otherPkg, tc.Instances, getImportInfo) + pkgInfo[otherPkg.Path()] = otherPkgInfo - testPkgInfo := AnalyzePkg([]*ast.File{testFile}, f.FileSet, f.Info, types.NewContext(), testPkg, tc.Instances, isImportBlocking) - pkgInfo[testPkg] = testPkgInfo + testPkgInfo := AnalyzePkg([]*ast.File{testFile}, f.FileSet, f.Info, tContext, testPkg, tc.Instances, getImportInfo) + pkgInfo[testPkg.Path()] = testPkgInfo + + PropagateAnalysis([]*Info{otherPkgInfo, testPkgInfo}) return &blockingTest{ f: f, diff --git a/compiler/package.go b/compiler/package.go index 5345b5666..cb06c9b1a 100644 --- a/compiler/package.go +++ b/compiler/package.go @@ -15,7 +15,6 @@ import ( "github.com/gopherjs/gopherjs/compiler/sources" "github.com/gopherjs/gopherjs/compiler/typesutil" "github.com/gopherjs/gopherjs/internal/errorList" - "github.com/gopherjs/gopherjs/internal/experiments" ) // pkgContext maintains compiler context for a specific package. @@ -117,18 +116,11 @@ type funcContext struct { funcLitCounter int } -func newRootCtx(tContext *types.Context, srcs sources.Sources, typesInfo *types.Info, typesPkg *types.Package, isBlocking func(typeparams.Instance) bool, minify bool) *funcContext { - tc := typeparams.Collector{ - TContext: tContext, - Info: typesInfo, - Instances: &typeparams.PackageInstanceSets{}, - } - tc.Scan(typesPkg, srcs.Files...) - pkgInfo := analysis.AnalyzePkg(srcs.Files, srcs.FileSet, typesInfo, tContext, typesPkg, tc.Instances, isBlocking) +func newRootCtx(tContext *types.Context, srcs *sources.Sources, minify bool) *funcContext { funcCtx := &funcContext{ - FuncInfo: pkgInfo.InitFuncInfo, + FuncInfo: srcs.TypeInfo.InitFuncInfo, pkgCtx: &pkgContext{ - Info: pkgInfo, + Info: srcs.TypeInfo, additionalSelections: make(map[*ast.SelectorExpr]typesutil.Selection), typesCtx: tContext, @@ -138,7 +130,7 @@ func newRootCtx(tContext *types.Context, srcs sources.Sources, typesInfo *types. indentation: 1, minify: minify, fileSet: srcs.FileSet, - instanceSet: tc.Instances, + instanceSet: srcs.Instances, }, allVars: make(map[string]int), flowDatas: map[*types.Label]*flowData{nil: {}}, @@ -158,64 +150,37 @@ type flowData struct { endCase int } -// ImportContext provides access to information about imported packages. -type ImportContext struct { - // Mapping for an absolute import path to the package type information. - Packages map[string]*types.Package - // ImportArchive returns a previously compiled Archive for a dependency - // package. If the Import() call was successful, the corresponding entry - // must be added to the Packages map. - ImportArchive func(importPath string) (*Archive, error) -} - -// isBlocking returns true if an _imported_ function is blocking. It will panic -// if the function decl is not found in the imported package or the package -// hasn't been compiled yet. +// PrepareAllSources prepares all sources for compilation by +// parsing go linknames, type checking, sorting, simplifying, and +// performing cross package analysis. +// The results are stored in the provided sources. // -// Note: see analysis.FuncInfo.Blocking if you need to determine if a function -// in the _current_ package is blocking. Usually available via functionContext -// object. -func (ic *ImportContext) isBlocking(inst typeparams.Instance) bool { - f, ok := inst.Object.(*types.Func) - if !ok { - panic(bailout(fmt.Errorf("can't determine if instance %v is blocking: instance isn't for a function object", inst))) - } - - archive, err := ic.ImportArchive(f.Pkg().Path()) - if err != nil { - panic(err) - } - - fullName := funcDeclFullName(inst) - for _, d := range archive.Declarations { - if d.FullName == fullName { - return d.Blocking +// All sources must be given at the same time for cross package analysis to +// work correctly. For consistency, the sources should be sorted by import path. +func PrepareAllSources(allSources []*sources.Sources, importer sources.Importer, tContext *types.Context) error { + // This will be performed recursively for all dependencies so + // most of these prepare calls will be no-ops. + // Since some packages might not be recursively reached via the root source, + // e.g. runtime, we need to try to prepare them all here. + for _, srcs := range allSources { + if err := srcs.Prepare(importer, sizes32, tContext); err != nil { + return err } } - panic(bailout(fmt.Errorf("can't determine if function %s is blocking: decl not found in package archive", fullName))) -} - -// Import implements go/types.Importer interface for ImportContext. -func (ic *ImportContext) Import(path string) (*types.Package, error) { - if path == "unsafe" { - return types.Unsafe, nil - } - // By importing the archive, the package will compile if it hasn't been - // compiled yet and the package will be added to the Packages map. - a, err := ic.ImportArchive(path) - if err != nil { - return nil, err + // Propagate the analysis information to all packages. + allInfo := make([]*analysis.Info, len(allSources)) + for i, src := range allSources { + allInfo[i] = src.TypeInfo } - - return ic.Packages[a.ImportPath], nil + analysis.PropagateAnalysis(allInfo) + return nil } // Compile the provided Go sources as a single package. // -// Import path must be the absolute import path for a package. Provided sources -// are always sorted by name to ensure reproducible JavaScript output. -func Compile(srcs sources.Sources, importContext *ImportContext, minify bool) (_ *Archive, err error) { +// Provided sources must be sorted by name to ensure reproducible JavaScript output. +func Compile(srcs *sources.Sources, tContext *types.Context, minify bool) (_ *Archive, err error) { defer func() { e := recover() if e == nil { @@ -231,27 +196,7 @@ func Compile(srcs sources.Sources, importContext *ImportContext, minify bool) (_ err = bailout(fmt.Errorf("unexpected compiler panic while building package %q: %v", srcs.ImportPath, e)) }() - srcs.Sort() - - tContext := types.NewContext() - typesInfo, typesPkg, err := srcs.TypeCheck(importContext, sizes32, tContext) - if err != nil { - return nil, err - } - if genErr := typeparams.RequiresGenericsSupport(typesInfo); genErr != nil && !experiments.Env.Generics { - return nil, fmt.Errorf("package %s requires generics support (https://github.com/gopherjs/gopherjs/issues/1013): %w", srcs.ImportPath, genErr) - } - importContext.Packages[srcs.ImportPath] = typesPkg - - // Extract all go:linkname compiler directives from the package source. - goLinknames, err := srcs.ParseGoLinknames() - if err != nil { - return nil, err - } - - srcs = srcs.Simplified(typesInfo) - - rootCtx := newRootCtx(tContext, srcs, typesInfo, typesPkg, importContext.isBlocking, minify) + rootCtx := newRootCtx(tContext, srcs, minify) importedPaths, importDecls := rootCtx.importDecls() @@ -295,13 +240,13 @@ func Compile(srcs sources.Sources, importContext *ImportContext, minify bool) (_ return &Archive{ ImportPath: srcs.ImportPath, - Name: typesPkg.Name(), + Name: srcs.Package.Name(), Imports: importedPaths, - Package: typesPkg, + Package: srcs.Package, Declarations: allDecls, FileSet: srcs.FileSet, Minified: minify, - GoLinknames: goLinknames, + GoLinknames: srcs.GoLinknames, }, nil } diff --git a/compiler/sources/sources.go b/compiler/sources/sources.go index e66fa2243..50d6f5777 100644 --- a/compiler/sources/sources.go +++ b/compiler/sources/sources.go @@ -1,15 +1,19 @@ package sources import ( + "fmt" "go/ast" "go/token" "go/types" "sort" "strings" + "github.com/gopherjs/gopherjs/compiler/internal/analysis" + "github.com/gopherjs/gopherjs/compiler/internal/typeparams" "github.com/gopherjs/gopherjs/compiler/jsFile" "github.com/gopherjs/gopherjs/compiler/linkname" "github.com/gopherjs/gopherjs/internal/errorList" + "github.com/gopherjs/gopherjs/internal/experiments" "github.com/neelance/astrewrite" ) @@ -38,38 +42,114 @@ type Sources struct { // JSFiles is the JavaScript files that are part of the package. JSFiles []jsFile.JSFile + + // TypeInfo is the type information this package. + // This is nil until Prepare is called. + TypeInfo *analysis.Info + + // Instances is the type parameters instances for this package. + // This is nil until Prepare is called. + Instances *typeparams.PackageInstanceSets + + // Package is the type-PrepareInfo package. + // This is nil until Prepare is called. + Package *types.Package + + // GoLinknames is the set of Go linknames for this package. + // This is nil until Prepare is called. + GoLinknames []linkname.GoLinkname +} + +type Importer func(path, srcDir string) (*Sources, error) + +// Prepare recursively processes the provided sources and +// prepares them for compilation by sorting the files by name, +// determining the type information, go linknames, etc. +// +// The importer function is used to import the sources of other packages +// that the package being prepared depends on. If the other sources +// are not prepared when returned by the importer, then that package +// will be prepared before continuing on with the current package. +// This is where the recursive nature of the Prepare function comes in. +// +// Note that at the end of this call the analysis information +// has NOT been propagated across packages yet. +func (s *Sources) Prepare(importer Importer, sizes types.Sizes, tContext *types.Context) error { + // Skip if the sources have already been prepared. + if s.isPrepared() { + return nil + } + + // Sort the files by name to ensure consistent order of processing. + s.sort() + + // Type check the sources to determine the type information. + typesInfo, err := s.typeCheck(importer, sizes, tContext) + if err != nil { + return err + } + + // If generics are not enabled, ensure the package does not requires generics support. + if !experiments.Env.Generics { + if genErr := typeparams.RequiresGenericsSupport(typesInfo); genErr != nil { + return fmt.Errorf("package %s requires generics support (https://github.com/gopherjs/gopherjs/issues/1013): %w", s.ImportPath, genErr) + } + } + + // Extract all go:linkname compiler directives from the package source. + err = s.parseGoLinknames() + if err != nil { + return err + } + + // Simply the source files. + s.simplify(typesInfo) + + // Analyze the package to determine type parameters instances, blocking, + // and other type information. This will not populate the information. + s.analyze(typesInfo, importer, tContext) + + return nil +} + +// isPrepared returns true if this sources have had Prepare called on it. +// +// This can not determine if the type information has been propagated +// across packages yet, but usually would only be called prior to that. +// For the source to be fully prepared for compilation, the type information +// must be propagated across packages as well. +func (s *Sources) isPrepared() bool { + return s.TypeInfo != nil && s.Package != nil } -// Sort the Go files slice by the original source name to ensure consistent order +// sort the Go files slice by the original source name to ensure consistent order // of processing. This is required for reproducible JavaScript output. // -// Note this function mutates the original slice. -func (s Sources) Sort() Sources { +// Note this function mutates the original Files slice. +func (s *Sources) sort() { sort.Slice(s.Files, func(i, j int) bool { return s.FileSet.File(s.Files[i].Pos()).Name() > s.FileSet.File(s.Files[j].Pos()).Name() }) - return s } -// Simplified returns a new sources instance with each Files entry processed by -// astrewrite.Simplify. The JSFiles are copied unchanged. -func (s Sources) Simplified(typesInfo *types.Info) Sources { - simplified := Sources{ - ImportPath: s.ImportPath, - Dir: s.Dir, - Files: make([]*ast.File, len(s.Files)), - FileSet: s.FileSet, - JSFiles: s.JSFiles, - } +// simplify processed each Files entry with astrewrite.Simplify. +// +// Note this function mutates the original Files slice. +// This must be called after TypeCheck and before analyze since +// this will change the pointers in the AST, for example the pointers +// to function literals will change, making it impossible to find them +// in the type information, if analyze is called first. +func (s *Sources) simplify(typesInfo *types.Info) { for i, file := range s.Files { - simplified.Files[i] = astrewrite.Simplify(file, typesInfo, false) + s.Files[i] = astrewrite.Simplify(file, typesInfo, false) } - return simplified } -// TypeCheck the sources. Returns information about declared package types and +// typeCheck the sources. Returns information about declared package types and // type information for the supplied AST. -func (s Sources) TypeCheck(importer types.Importer, sizes types.Sizes, tContext *types.Context) (*types.Info, *types.Package, error) { +// +// This must be called prior to simplify to get the types.Info used by simplify. +func (s *Sources) typeCheck(importer Importer, sizes types.Sizes, tContext *types.Context) (*types.Info, error) { const errLimit = 10 // Max number of type checking errors to return. typesInfo := &types.Info{ @@ -84,11 +164,16 @@ func (s Sources) TypeCheck(importer types.Importer, sizes types.Sizes, tContext var typeErrs errorList.ErrorList - ecImporter := &packageImporter{Importer: importer} + pkgImporter := &packageImporter{ + srcDir: s.Dir, + importer: importer, + sizes: sizes, + tContext: tContext, + } config := &types.Config{ Context: tContext, - Importer: ecImporter, + Importer: pkgImporter, Sizes: sizes, Error: func(err error) { typeErrs = typeErrs.AppendDistinct(err) }, } @@ -96,22 +181,55 @@ func (s Sources) TypeCheck(importer types.Importer, sizes types.Sizes, tContext // If we encountered any import errors, it is likely that the other type errors // are not meaningful and would be resolved by fixing imports. Return them // separately, if any. https://github.com/gopherjs/gopherjs/issues/119. - if ecImporter.Errors.ErrOrNil() != nil { - return nil, nil, ecImporter.Errors.Trim(errLimit).ErrOrNil() + if pkgImporter.Errors.ErrOrNil() != nil { + return nil, pkgImporter.Errors.Trim(errLimit).ErrOrNil() } // Return any other type errors. if typeErrs.ErrOrNil() != nil { - return nil, nil, typeErrs.Trim(errLimit).ErrOrNil() + return nil, typeErrs.Trim(errLimit).ErrOrNil() } // Any general errors that may have occurred during type checking. if err != nil { - return nil, nil, err + return nil, err } - return typesInfo, typesPkg, nil + + s.Package = typesPkg + return typesInfo, nil } -// ParseGoLinknames extracts all //go:linkname compiler directive from the sources. -func (s Sources) ParseGoLinknames() ([]linkname.GoLinkname, error) { +// analyze will determine the type parameters instances, blocking, +// and other type information for the package. +// +// This must be called after to simplify to ensure the pointers +// in the AST are still valid. +// +// Note that at the end of this call the analysis information +// has NOT been propagated across packages yet. +func (s *Sources) analyze(typesInfo *types.Info, importer Importer, tContext *types.Context) { + tc := typeparams.Collector{ + TContext: tContext, + Info: typesInfo, + Instances: &typeparams.PackageInstanceSets{}, + } + tc.Scan(s.Package, s.Files...) + + infoImporter := func(path string) (*analysis.Info, error) { + srcs, err := importer(path, s.Dir) + if err != nil { + return nil, err + } + return srcs.TypeInfo, nil + } + anaInfo := analysis.AnalyzePkg(s.Files, s.FileSet, typesInfo, tContext, s.Package, tc.Instances, infoImporter) + + s.TypeInfo = anaInfo + s.Instances = tc.Instances +} + +// parseGoLinknames extracts all //go:linkname compiler directive from the sources. +// +// This will set the GoLinknames field on the Sources struct. +func (s *Sources) parseGoLinknames() error { goLinknames := []linkname.GoLinkname{} var errs errorList.ErrorList for _, file := range s.Files { @@ -119,7 +237,11 @@ func (s Sources) ParseGoLinknames() ([]linkname.GoLinkname, error) { errs = errs.Append(err) goLinknames = append(goLinknames, found...) } - return goLinknames, errs.ErrOrNil() + if err := errs.ErrOrNil(); err != nil { + return err + } + s.GoLinknames = goLinknames + return nil } // UnresolvedImports calculates the import paths of the package's dependencies @@ -132,7 +254,7 @@ func (s Sources) ParseGoLinknames() ([]linkname.GoLinkname, error) { // The given skip paths (typically those imports from PackageData.Imports) // will not be returned in the results. // This will not return any `*_test` packages in the results. -func (s Sources) UnresolvedImports(skip ...string) []string { +func (s *Sources) UnresolvedImports(skip ...string) []string { seen := make(map[string]struct{}) for _, sk := range skip { seen[sk] = struct{}{} @@ -156,20 +278,40 @@ func (s Sources) UnresolvedImports(skip ...string) []string { // packageImporter implements go/types.Importer interface and // wraps it to collect import errors. type packageImporter struct { - Importer types.Importer + srcDir string + importer Importer + sizes types.Sizes + tContext *types.Context Errors errorList.ErrorList } -func (ei *packageImporter) Import(path string) (*types.Package, error) { +func (pi *packageImporter) Import(path string) (*types.Package, error) { if path == "unsafe" { return types.Unsafe, nil } - pkg, err := ei.Importer.Import(path) + srcs, err := pi.importer(path, pi.srcDir) + if err != nil { + pi.Errors = pi.Errors.AppendDistinct(err) + return nil, err + } + + // If the source hasn't been prepared yet, prepare it now. + // This will recursively prepare all of it's dependencies too. + // If the source is already prepared, this will be a no-op. + err = srcs.Prepare(pi.importer, pi.sizes, pi.tContext) if err != nil { - ei.Errors = ei.Errors.AppendDistinct(err) + pi.Errors = pi.Errors.AppendDistinct(err) return nil, err } - return pkg, nil + return srcs.Package, nil +} + +// SortedSourcesSlice in place sorts the given slice of Sources by ImportPath. +// This will not change the order of the files within any Sources. +func SortedSourcesSlice(sourcesSlice []*Sources) { + sort.Slice(sourcesSlice, func(i, j int) bool { + return sourcesSlice[i].ImportPath < sourcesSlice[j].ImportPath + }) }