Skip to content

[WIP] Extract type checking and analysis from compiler #1363

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 23 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
0c6d66d
starting migration
grantnelson-wf Feb 27, 2025
4c537e7
starting migration
grantnelson-wf Feb 27, 2025
6c48e04
moved linkname to its own package
grantnelson-wf Feb 27, 2025
f97e44c
Merge branch 'sourceInsteadOfParsedPackage' of github.com:Workiva/gop…
grantnelson-wf Feb 27, 2025
fb52990
Working on import context
grantnelson-wf Feb 27, 2025
cbf9b58
Working on import context
grantnelson-wf Feb 28, 2025
36c8c11
Working on package analysis
grantnelson-wf Feb 28, 2025
cd071c1
Moving propagate
grantnelson-wf Feb 28, 2025
f5f1968
Merge branch 'sourceInsteadOfParsedPackage' of github.com:Workiva/gop…
grantnelson-wf Feb 28, 2025
3def5f3
Reworking preparing and analyzing sources
grantnelson-wf Feb 28, 2025
731f220
Trying to debug the funcLit pointer issue
grantnelson-wf Mar 3, 2025
ae7afcd
Fixing the funclit pointer problem
grantnelson-wf Mar 4, 2025
ccb500b
Fixing the funclit pointer problem
grantnelson-wf Mar 4, 2025
a79a574
Fixing the funclit pointer problem
grantnelson-wf Mar 4, 2025
471fbf5
Fixing the funclit pointer problem
grantnelson-wf Mar 4, 2025
b5ab57e
Fixing the funclit pointer problem
grantnelson-wf Mar 4, 2025
04ab0c0
Fixing the funclit pointer problem
grantnelson-wf Mar 4, 2025
54e2572
Merge branch 'master' of github.com:gopherjs/gopherjs into breakupCom…
grantnelson-wf Mar 4, 2025
35e4013
Fixing remaining bugs
grantnelson-wf Mar 4, 2025
a1122c5
Fixing remaining bugs
grantnelson-wf Mar 4, 2025
e57c7aa
Some cleanup
grantnelson-wf Mar 4, 2025
c3f2ffe
Some cleanup
grantnelson-wf Mar 5, 2025
aae9e0a
Some cleanup
grantnelson-wf Mar 5, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
90 changes: 66 additions & 24 deletions build/build.go
Original file line number Diff line number Diff line change
Expand Up @@ -759,14 +759,13 @@ type Session struct {
// sources is a map of parsed packages that have been built and augmented.
// This is keyed using resolved import paths. This is used to avoid
// rebuilding and augmenting packages that are imported by several packages.
// These sources haven't been sorted nor simplified yet.
// The files in these sources haven't been sorted nor simplified yet.
sources map[string]*sources.Sources

// Binary archives produced during the current session and assumed to be
// up to date with input sources and dependencies. In the -w ("watch") mode
// must be cleared upon entering watching.
UpToDateArchives map[string]*compiler.Archive
Types map[string]*types.Package
Watcher *fsnotify.Watcher
}

Expand All @@ -788,7 +787,6 @@ func NewSession(options *Options) (*Session, error) {
return nil, err
}

s.Types = make(map[string]*types.Package)
if options.Watch {
if out, err := exec.Command("ulimit", "-n").Output(); err == nil {
if n, err := strconv.Atoi(strings.TrimSpace(string(out))); err == nil && n < 1024 {
Expand Down Expand Up @@ -906,7 +904,7 @@ func (s *Session) BuildFiles(filenames []string, pkgObj string, cwd string) erro
if err != nil {
return err
}
if s.Types["main"].Name() != "main" {
if s.sources["main"].Package.Name() != "main" {
return fmt.Errorf("cannot build/run non-main package")
}
return s.WriteCommandPackage(archive, pkgObj)
Expand All @@ -918,7 +916,7 @@ func (s *Session) BuildProject(pkg *PackageData) (*compiler.Archive, error) {
// ensure that runtime for gopherjs is imported
pkg.Imports = append(pkg.Imports, `runtime`)

// Build the project to get the sources for the parsed packages.
// Load the project to get the sources for the parsed packages.
var srcs *sources.Sources
var err error
if pkg.IsTest {
Expand All @@ -930,13 +928,35 @@ func (s *Session) BuildProject(pkg *PackageData) (*compiler.Archive, error) {
return nil, err
}

// TODO(grantnelson-wf): At this point we have all the parsed packages we
// need to compile the whole project, including testmain, if needed.
// We can perform analysis on the whole project at this point to propagate
// flatten, blocking, etc. information and check types to get the type info
// with all the instances for all generics in the whole project.
// TODO(grantnelson-wf): We could investigate caching the results of
// the sources prior to preparing them to avoid re-parsing the same
// sources and augmenting them when the files on disk haven't changed.
// This would require a way to determine if the sources are up-to-date
// which could be done with the left over srcModTime from when the archives
// were being cached.

return s.compilePackages(srcs)
// Prepare and analyze the source code.
// This will be performed recursively for all dependencies.
tContext := types.NewContext()
allSources := s.getSortedSources()
err = compiler.PrepareAllSources(allSources, s.SourcesForImport, tContext)
if err != nil {
return nil, err
}

// Compile the project into Archives containing the generated JS.
return s.compilePackages(srcs, tContext)
}

// getSortedSources returns the sources sorted by import path.
// The files in the sources may still not be sorted yet.
func (s *Session) getSortedSources() []*sources.Sources {
allSources := make([]*sources.Sources, 0, len(s.sources))
for _, srcs := range s.sources {
allSources = append(allSources, srcs)
}
sources.SortedSourcesSlice(allSources)
return allSources
}

func (s *Session) loadTestPackage(pkg *PackageData) (*sources.Sources, error) {
Expand Down Expand Up @@ -965,6 +985,7 @@ func (s *Session) loadTestPackage(pkg *PackageData) (*sources.Sources, error) {
Files: []*ast.File{mainFile},
FileSet: fset,
}
s.sources[srcs.ImportPath] = srcs

// Import dependencies for the testmain package.
for _, importedPkgPath := range srcs.UnresolvedImports() {
Expand Down Expand Up @@ -1103,16 +1124,27 @@ func (s *Session) loadPackages(pkg *PackageData) (*sources.Sources, error) {
return srcs, nil
}

func (s *Session) compilePackages(srcs *sources.Sources) (*compiler.Archive, error) {
func (s *Session) compilePackages(rootSrcs *sources.Sources, tContext *types.Context) (*compiler.Archive, error) {
for _, srcs := range s.sources {
if _, err := s.compilePackage(srcs, tContext); err != nil {
return nil, err
}
}

rootArchive, ok := s.UpToDateArchives[rootSrcs.ImportPath]
if !ok {
// This is confirmation that the root package is in the sources map and got compiled.
return nil, fmt.Errorf(`root package %q not found`, rootSrcs.ImportPath)
}
return rootArchive, nil
}

func (s *Session) compilePackage(srcs *sources.Sources, tContext *types.Context) (*compiler.Archive, error) {
if archive, ok := s.UpToDateArchives[srcs.ImportPath]; ok {
return archive, nil
}

importContext := &compiler.ImportContext{
Packages: s.Types,
ImportArchive: s.ImportResolverFor(srcs.Dir),
}
archive, err := compiler.Compile(*srcs, importContext, s.options.Minify)
archive, err := compiler.Compile(srcs, tContext, s.options.Minify)
if err != nil {
return nil, err
}
Expand Down Expand Up @@ -1152,6 +1184,20 @@ func (s *Session) getImportPath(path, srcDir string) (string, error) {
return pkg.ImportPath, nil
}

func (s *Session) SourcesForImport(path, srcDir string) (*sources.Sources, error) {
importPath, err := s.getImportPath(path, srcDir)
if err != nil {
return nil, err
}

srcs, ok := s.sources[importPath]
if !ok {
return nil, fmt.Errorf(`sources for %q not found`, path)
}

return srcs, nil
}

// ImportResolverFor returns a function which returns a compiled package archive
// given an import path.
func (s *Session) ImportResolverFor(srcDir string) func(string) (*compiler.Archive, error) {
Expand All @@ -1165,12 +1211,7 @@ func (s *Session) ImportResolverFor(srcDir string) func(string) (*compiler.Archi
return archive, nil
}

// The archive hasn't been compiled yet so compile it with the sources.
if srcs, ok := s.sources[importPath]; ok {
return s.compilePackages(srcs)
}

return nil, fmt.Errorf(`sources for %q not found`, importPath)
return nil, fmt.Errorf(`archive for %q not found`, importPath)
}
}

Expand Down Expand Up @@ -1258,8 +1299,9 @@ func hasGopathPrefix(file, gopath string) (hasGopathPrefix bool, prefixLen int)
func (s *Session) WaitForChange() {
// Will need to re-validate up-to-dateness of all archives, so flush them from
// memory.
s.importPaths = map[string]map[string]string{}
s.sources = map[string]*sources.Sources{}
s.UpToDateArchives = map[string]*compiler.Archive{}
s.Types = map[string]*types.Package{}

s.options.PrintSuccess("watching for changes...\n")
for {
Expand Down
78 changes: 45 additions & 33 deletions compiler/compiler_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -679,43 +679,44 @@ func compileProject(t *testing.T, root *packages.Package, minify bool) map[strin
pkgMap[pkg.PkgPath] = pkg
})

archiveCache := map[string]*Archive{}
var importContext *ImportContext
importContext = &ImportContext{
Packages: map[string]*types.Package{},
ImportArchive: func(path string) (*Archive, error) {
// find in local cache
if a, ok := archiveCache[path]; ok {
return a, nil
}

pkg, ok := pkgMap[path]
if !ok {
t.Fatal(`package not found:`, path)
}
importContext.Packages[path] = pkg.Types

srcs := sources.Sources{
ImportPath: path,
Files: pkg.Syntax,
FileSet: pkg.Fset,
}
allSrcs := map[string]*sources.Sources{}
for _, pkg := range pkgMap {
srcs := &sources.Sources{
ImportPath: pkg.PkgPath,
Dir: ``,
Files: pkg.Syntax,
FileSet: pkg.Fset,
}
allSrcs[pkg.PkgPath] = srcs
}

// compile package
a, err := Compile(srcs, importContext, minify)
if err != nil {
return nil, err
}
archiveCache[path] = a
return a, nil
},
importer := func(path, srcDir string) (*sources.Sources, error) {
srcs, ok := allSrcs[path]
if !ok {
t.Fatal(`package not found:`, path)
return nil, nil
}
return srcs, nil
}

_, err := importContext.ImportArchive(root.PkgPath)
if err != nil {
t.Fatal(`failed to compile:`, err)
tContext := types.NewContext()
sortedSources := make([]*sources.Sources, 0, len(allSrcs))
for _, srcs := range allSrcs {
sortedSources = append(sortedSources, srcs)
}
sources.SortedSourcesSlice(sortedSources)
PrepareAllSources(sortedSources, importer, tContext)

archives := map[string]*Archive{}
for _, srcs := range allSrcs {
a, err := Compile(srcs, tContext, minify)
if err != nil {
t.Fatal(`failed to compile:`, err)
}
archives[srcs.ImportPath] = a
}
return archiveCache

return archives
}

// newTime creates an arbitrary time.Time offset by the given number of seconds.
Expand All @@ -730,6 +731,13 @@ func newTime(seconds float64) time.Time {
func reloadCompiledProject(t *testing.T, archives map[string]*Archive, rootPkgPath string) map[string]*Archive {
t.Helper()

// TODO(grantnelson-wf): The tests using this function are out-of-date
// since they are testing the old archive caching that has been disabled.
// At some point, these tests should be updated to test any new caching
// mechanism that is implemented or removed. As is this function is faking
// the old recursive archive loading that is no longer used since it
// doesn't allow cross package analysis for generings.

buildTime := newTime(5.0)
serialized := map[string][]byte{}
for path, a := range archives {
Expand All @@ -742,6 +750,10 @@ func reloadCompiledProject(t *testing.T, archives map[string]*Archive, rootPkgPa

srcModTime := newTime(0.0)
reloadCache := map[string]*Archive{}
type ImportContext struct {
Packages map[string]*types.Package
ImportArchive func(path string) (*Archive, error)
}
var importContext *ImportContext
importContext = &ImportContext{
Packages: map[string]*types.Package{},
Expand Down
2 changes: 1 addition & 1 deletion compiler/decls.go
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ func (d *Decl) Dce() *dce.Info {

// topLevelObjects extracts package-level variables, functions and named types
// from the package AST.
func (fc *funcContext) topLevelObjects(srcs sources.Sources) (vars []*types.Var, functions []*ast.FuncDecl, typeNames typesutil.TypeNames) {
func (fc *funcContext) topLevelObjects(srcs *sources.Sources) (vars []*types.Var, functions []*ast.FuncDecl, typeNames typesutil.TypeNames) {
if !fc.isRoot() {
panic(bailout(fmt.Errorf("functionContext.discoverObjects() must be only called on the package-level context")))
}
Expand Down
58 changes: 42 additions & 16 deletions compiler/internal/analysis/info.go
Original file line number Diff line number Diff line change
Expand Up @@ -58,10 +58,14 @@ type Info struct {
funcLitInfos map[*ast.FuncLit][]*FuncInfo
InitFuncInfo *FuncInfo // Context for package variable initialization.

isImportedBlocking func(typeparams.Instance) bool // For functions from other packages.
allInfos []*FuncInfo
infoImporter InfoImporter // To get `Info` for other packages.
allInfos []*FuncInfo
}

// InfoImporter is used to get the `Info` for another package.
// The path is the resolved import path of the package to get the `Info` for.
type InfoImporter func(path string) (*Info, error)

func (info *Info) newFuncInfo(n ast.Node, obj types.Object, typeArgs typesutil.TypeList, resolver *typeparams.Resolver) *FuncInfo {
funcInfo := &FuncInfo{
pkgInfo: info,
Expand Down Expand Up @@ -132,11 +136,16 @@ func (info *Info) newFuncInfoInstances(fd *ast.FuncDecl) []*FuncInfo {
}

// IsBlocking returns true if the function may contain blocking calls or operations.
// If inst is from a different package, this will use the isImportedBlocking
// If inst is from a different package, this will use the getImportInfo function
// to lookup the information from the other package.
func (info *Info) IsBlocking(inst typeparams.Instance) bool {
if inst.Object.Pkg() != info.Pkg {
return info.isImportedBlocking(inst)
path := inst.Object.Pkg().Path()
otherInfo, err := info.infoImporter(path)
if err != nil {
panic(fmt.Errorf(`failed to get info for package %q: %v`, path, err))
}
return otherInfo.IsBlocking(inst)
}
if funInfo := info.FuncInfo(inst); funInfo != nil {
return funInfo.IsBlocking()
Expand Down Expand Up @@ -174,16 +183,21 @@ func (info *Info) VarsWithInitializers() map[*types.Var]bool {
return result
}

func AnalyzePkg(files []*ast.File, fileSet *token.FileSet, typesInfo *types.Info, typeCtx *types.Context, typesPkg *types.Package, instanceSets *typeparams.PackageInstanceSets, isBlocking func(typeparams.Instance) bool) *Info {
// AnalyzePkg analyzes the given package for blocking calls, defers, etc.
//
// Note that at the end of this call the analysis information
// has NOT been propagated across packages yet. Once all the packages
// have been analyzed, call PropagateAnalysis to propagate the information.
func AnalyzePkg(files []*ast.File, fileSet *token.FileSet, typesInfo *types.Info, typeCtx *types.Context, typesPkg *types.Package, instanceSets *typeparams.PackageInstanceSets, infoImporter InfoImporter) *Info {
info := &Info{
Info: typesInfo,
Pkg: typesPkg,
typeCtx: typeCtx,
instanceSets: instanceSets,
HasPointer: make(map[*types.Var]bool),
isImportedBlocking: isBlocking,
funcInstInfos: new(typeparams.InstanceMap[*FuncInfo]),
funcLitInfos: make(map[*ast.FuncLit][]*FuncInfo),
Info: typesInfo,
Pkg: typesPkg,
typeCtx: typeCtx,
instanceSets: instanceSets,
HasPointer: make(map[*types.Var]bool),
infoImporter: infoImporter,
funcInstInfos: new(typeparams.InstanceMap[*FuncInfo]),
funcLitInfos: make(map[*ast.FuncLit][]*FuncInfo),
}
info.InitFuncInfo = info.newFuncInfo(nil, nil, nil, nil)

Expand All @@ -193,13 +207,25 @@ func AnalyzePkg(files []*ast.File, fileSet *token.FileSet, typesInfo *types.Info
ast.Walk(info.InitFuncInfo, file)
}

return info
}

// PropagateAnalysis will propagate analysis information across package
// boundaries to finish the analysis of a whole project.
func PropagateAnalysis(allInfo []*Info) {
done := false
for !done {
done = info.propagateFunctionBlocking()
done = true
for _, info := range allInfo {
if !info.propagateFunctionBlocking() {
done = false
}
}
}

info.propagateControlStatementBlocking()
return info
for _, info := range allInfo {
info.propagateControlStatementBlocking()
}
}

// propagateFunctionBlocking propagates information about blocking calls
Expand Down
Loading
Loading