diff --git a/.github/actions/setup-gopherjs/action.yml b/.github/actions/setup-gopherjs/action.yml new file mode 100644 index 000000000..7bd1b7ab8 --- /dev/null +++ b/.github/actions/setup-gopherjs/action.yml @@ -0,0 +1,74 @@ +name: Setup GopherJS +description: Sets up Go, Node.js, and GopherJS + +inputs: + includeSyscall: + description: Indicates that the node-syscall package should be installed. + required: true + default: 'false' + + fixTemps: + description: Indicates that the Windows Temp variables should be fixed. + required: true + default: 'false' + +runs: + using: composite + steps: + - name: Fix Windows Temp Variables + if: inputs.fixTemps == 'true' + shell: pwsh + run: | + # see https://github.com/actions/runner-images/issues/712#issuecomment-613004302 + echo "TEMP=$env:USERPROFILE\AppData\Local\Temp" | Out-File -FilePath $env:GITHUB_ENV -Append -Encoding utf8 + echo "TMP=$env:USERPROFILE\AppData\Local\Temp" | Out-File -FilePath $env:GITHUB_ENV -Append -Encoding utf8 + echo "TMPDIR=$env:USERPROFILE\AppData\Local\Temp" | Out-File -FilePath $env:GITHUB_ENV -Append -Encoding utf8 + + - name: Setup Go + uses: actions/setup-go@v5 + with: + go-version: ${{ env.GO_VERSION }} + + - name: Setup Go Environment + working-directory: ${{ env.GOPHERJS_PATH }} + shell: bash + run: echo "GOROOT=$(go env GOROOT)" >> $GITHUB_ENV + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: ${{ env.NODE_VERSION }} + + - name: Install Node.js for non-Linux + if: inputs.includeSyscall != 'true' + working-directory: ${{ env.GOPHERJS_PATH }} + shell: bash + # Install required Node.js packages without optional (node-syscall). + run: npm install --omit=optional --no-package-lock + + - name: Install Node.js for Linux + if: inputs.includeSyscall == 'true' + working-directory: ${{ env.GOPHERJS_PATH }} + shell: bash + # Install required Node.js packages including optional (node-syscall). + run: | + npm install --include=optional --no-package-lock + + - name: Setup Node.js Environment + working-directory: ${{ env.GOPHERJS_PATH }} + shell: bash + # Make nodejs able to require installed modules from any working path. + run: echo "NODE_PATH=$(npm root)" >> $GITHUB_ENV + + - name: Install GopherJS + working-directory: ${{ env.GOPHERJS_PATH }} + shell: bash + run: go install -v + + - name: Setup information + shell: bash + run: | + echo ::notice::go version: $(go version) + echo ::notice::node version: $(node -v) + echo ::notice::npm version: $(npm -v) + echo ::notice::gopherjs version: $(gopherjs version) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml new file mode 100644 index 000000000..07ff3844a --- /dev/null +++ b/.github/workflows/ci.yaml @@ -0,0 +1,223 @@ +name: CI + +on: + push: + branches: [ "*" ] + pull_request: + branches: [ "*" ] + +permissions: + contents: read + +concurrency: + group: ci-${{ github.ref }} + cancel-in-progress: true + +env: + GO_VERSION: 1.19.13 + NODE_VERSION: 18 + GOLANGCI_VERSION: v1.53.3 + GOPHERJS_EXPERIMENT: generics + SOURCE_MAP_SUPPORT: true + GOPATH: ${{ github.workspace }}/go + GOPHERJS_PATH: ${{ github.workspace }}/go/src/github.com/${{ github.repository }} + +jobs: + ubuntu_smoke: + name: Ubuntu Smoke + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + path: ${{ env.GOPHERJS_PATH }} + - name: Copy Actions + run: cp -r ${{ env.GOPHERJS_PATH }}/.github . + - name: Setup GopherJS + uses: ./.github/actions/setup-gopherjs/ + with: + includeSyscall: 'true' + - name: Test GopherJS + working-directory: ${{ env.GOPHERJS_PATH }} + run: go test -v -short ./... + - name: Run Tests + working-directory: ${{ env.GOPHERJS_PATH }} + run: | + gopherjs build -v net/http + gopherjs test -v --short fmt log ./tests + + windows_smoke: + name: Window Smoke + runs-on: windows-latest + env: + # Windows does not support source maps. + SOURCE_MAP_SUPPORT: false + steps: + - uses: actions/checkout@v4 + with: + path: ${{ env.GOPHERJS_PATH }} + - name: Copy Actions + run: cp -r ${{ env.GOPHERJS_PATH }}/.github . + - name: Setup GopherJS + uses: ./.github/actions/setup-gopherjs/ + with: + fixTemps: 'true' + - name: Test GopherJS + working-directory: ${{ env.GOPHERJS_PATH }} + run: go test -v -short ./... + - name: Run Tests + working-directory: ${{ env.GOPHERJS_PATH }} + run: | + gopherjs build -v net/http + gopherjs test -v --short fmt sort ./tests + + darwin_smoke: + name: Darwin Smoke + runs-on: macos-latest + env: + # Node version '12' is not found for darwin. + NODE_VERSION: 20 + steps: + - uses: actions/checkout@v4 + with: + path: ${{ env.GOPHERJS_PATH }} + - name: Copy Actions + run: cp -r ${{ env.GOPHERJS_PATH }}/.github . + - name: Setup GopherJS + uses: ./.github/actions/setup-gopherjs/ + - name: Test GopherJS + working-directory: ${{ env.GOPHERJS_PATH }} + run: go test -v -short ./... + - name: Run Tests + working-directory: ${{ env.GOPHERJS_PATH }} + run: | + gopherjs build -v net/http + gopherjs test -v --short fmt log os ./tests + + lint: + name: Lint Checks + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + path: ${{ env.GOPHERJS_PATH }} + - uses: actions/setup-go@v5 + with: + go-version: ${{ env.GO_VERSION }} + - name: Install golangci-lint + uses: golangci/golangci-lint-action@v3 + with: + working-directory: ${{ env.GOPHERJS_PATH }} + version: ${{ env.GOLANGCI_VERSION }} + only-new-issues: true + - name: Check go.mod + working-directory: ${{ env.GOPHERJS_PATH }} + run: go mod tidy && git diff --exit-code + - name: Check natives build tags + working-directory: ${{ env.GOPHERJS_PATH }} + # All those packages should have // +build js. + run: diff -u <(echo -n) <(go list ./compiler/natives/src/...) + + go_tests: + name: Go Tests + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + path: ${{ env.GOPHERJS_PATH }} + - name: Copy Actions + run: cp -r ${{ env.GOPHERJS_PATH }}/.github . + - name: Setup GopherJS + uses: ./.github/actions/setup-gopherjs/ + with: + includeSyscall: 'true' + - name: Run Tests + working-directory: ${{ env.GOPHERJS_PATH }} + # Run all tests except gorepo tests. + run: go test -v -race $(go list ./... | grep -v github.com/gopherjs/gopherjs/tests/gorepo) + + todomvc_check: + name: TodoMVC GO111MODULE Check + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + path: ${{ env.GOPHERJS_PATH }} + - name: Copy Actions + run: cp -r ${{ env.GOPHERJS_PATH }}/.github . + - name: Setup GopherJS + uses: ./.github/actions/setup-gopherjs/ + - name: TodoMVC in GOPATH mode + working-directory: ${{ env.GOPHERJS_PATH }} + env: + GO111MODULE: off + GOPATH: /tmp/gopath + run: | + mkdir -p $GOPATH/src/github.com/gopherjs/gopherjs + cp -r -p ${{ env.GOPHERJS_PATH }}/. $GOPATH/src/github.com/gopherjs/gopherjs/ + go get -v github.com/gopherjs/todomvc + gopherjs build -v -o /tmp/todomvc_gopath.js github.com/gopherjs/todomvc + gopherjs test -v github.com/gopherjs/todomvc/... + find $GOPATH + - name: TodoMVC in Go Modules mode + env: + GO111MODULE: on + GOPATH: /tmp/gmod + run: | + mkdir -p $GOPATH/src + cd /tmp + git clone --depth=1 https://github.com/gopherjs/todomvc.git + cd /tmp/todomvc + gopherjs build -v -o /tmp/todomvc_gomod.js github.com/gopherjs/todomvc + gopherjs test -v github.com/gopherjs/todomvc/... + find $GOPATH + - name: Compare GOPATH and Go Modules output + run: | + diff -u \ + <(sed 's/todomvc_gomod.js.map/todomvc_ignored.js.map/' /tmp/todomvc_gomod.js) \ + <(sed 's/todomvc_gopath.js.map/todomvc_ignored.js.map/' /tmp/todomvc_gopath.js) + + gopherjs_tests: + name: GopherJS Tests (${{ matrix.filter.name }}) + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + filter: + - name: non-crypto + pattern: '-Pve "^crypto"' + - name: cypto + pattern: '-Pe "^crypto"' + steps: + - uses: actions/checkout@v4 + with: + path: ${{ env.GOPHERJS_PATH }} + - name: Copy Actions + run: cp -r ${{ env.GOPHERJS_PATH }}/.github . + - name: Setup GopherJS + uses: ./.github/actions/setup-gopherjs/ + - name: Run GopherJS tests + working-directory: ${{ env.GOPHERJS_PATH }} + run: | + PACKAGE_NAMES=$( \ + GOOS=js GOARCH=wasm go list std github.com/gopherjs/gopherjs/js/... github.com/gopherjs/gopherjs/tests/... \ + | grep -v -x -f .std_test_pkg_exclusions \ + | grep ${{ matrix.filter.pattern }} \ + ) + echo "Running tests for packages:" + echo "$PACKAGE_NAMES" + gopherjs test -p 4 --minify -v --short $PACKAGE_NAMES + + gorepo_tests: + name: Gorepo Tests + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + path: ${{ env.GOPHERJS_PATH }} + - name: Copy Actions + run: cp -r ${{ env.GOPHERJS_PATH }}/.github . + - name: Setup GopherJS + uses: ./.github/actions/setup-gopherjs/ + - name: Run GopherJS tests + working-directory: ${{ env.GOPHERJS_PATH }} + run: go test -v github.com/gopherjs/gopherjs/tests/gorepo diff --git a/.github/workflows/lint.yaml b/.github/workflows/lint.yaml deleted file mode 100644 index 03fa75d9c..000000000 --- a/.github/workflows/lint.yaml +++ /dev/null @@ -1,25 +0,0 @@ -name: golangci-lint -on: - pull_request: -permissions: - contents: read -jobs: - golangci: - name: lint - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - - uses: actions/setup-go@v3 - with: - go-version: "1.19.13" - - - name: golangci-lint - uses: golangci/golangci-lint-action@v3 - with: - version: v1.53.3 - only-new-issues: true - - - name: Check go.mod - run: | - go mod tidy && git diff --exit-code diff --git a/.github/workflows/measure-size.yml b/.github/workflows/measure-size.yml index ee4024e6a..1697b1127 100644 --- a/.github/workflows/measure-size.yml +++ b/.github/workflows/measure-size.yml @@ -2,16 +2,19 @@ name: Measure canonical app size on: ['pull_request'] +env: + GO_VERSION: '~1.19.13' + jobs: measure: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 with: fetch-depth: 0 - - uses: actions/setup-go@v2 + - uses: actions/setup-go@v5 with: - go-version: '~1.19.13' + go-version: ${{ env.GO_VERSION }} - uses: gopherjs/output-size-action/measure@main with: name: jQuery TodoMVC @@ -19,10 +22,9 @@ jobs: go-package: github.com/gopherjs/todomvc report_json: /tmp/report.json report_md: /tmp/report.md - - uses: actions/upload-artifact@v2 + - uses: actions/upload-artifact@v4 with: name: size_report path: | /tmp/report.json /tmp/report.md - diff --git a/README.md b/README.md index 3804dbf0f..016d41bf3 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ [![GoDoc](https://godoc.org/github.com/gopherjs/gopherjs/js?status.svg)](https://godoc.org/github.com/gopherjs/gopherjs/js) [![Sourcegraph](https://sourcegraph.com/github.com/gopherjs/gopherjs/-/badge.svg)](https://sourcegraph.com/github.com/gopherjs/gopherjs?badge) -[![Circle CI](https://circleci.com/gh/gopherjs/gopherjs.svg?style=svg)](https://circleci.com/gh/gopherjs/gopherjs) +[![Github Actions CI](https://github.com/gopherjs/gopherjs/actions/workflows/ci.yaml/badge.svg)](https://github.com/gopherjs/gopherjs/actions/workflows/ci.yaml) GopherJS compiles Go code ([go.dev](https://go.dev/)) to pure JavaScript code. Its main purpose is to give you the opportunity to write front-end code in Go which will still run in all browsers. @@ -15,6 +15,7 @@ GopherJS compiles Go code ([go.dev](https://go.dev/)) to pure JavaScript code. I ### What's new? +- 2024-02-24: Go 1.19 support is [available](https://github.com/gopherjs/gopherjs/releases/tag/v1.19.0-beta1)! - 2022-08-18: Go 1.18 support is [available](https://github.com/gopherjs/gopherjs/releases/tag/v1.18.0-beta2%2Bgo1.18.5)! - 2021-09-19: Go 1.17 support is available! - 2021-08-23: Go Modules are now fully supported. @@ -56,11 +57,7 @@ _Note: GopherJS will try to write compiled object files of the core packages to #### gopherjs run, gopherjs test -If you want to use `gopherjs run` or `gopherjs test` to run the generated code locally, install Node.js 10.0.0 (or newer), and the `source-map-support` module: - -``` -npm install --global source-map-support -``` +If you want to use `gopherjs run` or `gopherjs test` to run the generated code locally, install Node.js 18 (or newer). On supported `GOOS` platforms, it's possible to make system calls (file system access, etc.) available. See [doc/syscalls.md](https://github.com/gopherjs/gopherjs/blob/master/doc/syscalls.md) for instructions on how to do so. diff --git a/build/build.go b/build/build.go index def9cd313..46786a30b 100644 --- a/build/build.go +++ b/build/build.go @@ -21,17 +21,20 @@ import ( "sort" "strconv" "strings" + "sync" "time" "github.com/fsnotify/fsnotify" "github.com/gopherjs/gopherjs/compiler" "github.com/gopherjs/gopherjs/compiler/astutil" + "github.com/gopherjs/gopherjs/compiler/jsFile" + "github.com/gopherjs/gopherjs/compiler/sources" + "github.com/gopherjs/gopherjs/internal/errorList" + "github.com/gopherjs/gopherjs/internal/testmain" log "github.com/sirupsen/logrus" "github.com/neelance/sourcemap" "golang.org/x/tools/go/buildutil" - - "github.com/gopherjs/gopherjs/build/cache" ) // DefaultGOROOT is the default GOROOT value for builds. @@ -163,7 +166,7 @@ type overrideInfo struct { // - Otherwise for identifiers that exist in the original and the overrides, // the original is removed. // - New identifiers that don't exist in original package get added. -func parseAndAugment(xctx XContext, pkg *PackageData, isTest bool, fileSet *token.FileSet) ([]*ast.File, []JSFile, error) { +func parseAndAugment(xctx XContext, pkg *PackageData, isTest bool, fileSet *token.FileSet) ([]*ast.File, []jsFile.JSFile, error) { jsFiles, overlayFiles := parseOverlayFiles(xctx, pkg, isTest, fileSet) originalFiles, err := parserOriginalFiles(pkg, fileSet) @@ -174,7 +177,6 @@ func parseAndAugment(xctx XContext, pkg *PackageData, isTest bool, fileSet *toke overrides := make(map[string]overrideInfo) for _, file := range overlayFiles { augmentOverlayFile(file, overrides) - pruneImports(file) } delete(overrides, "init") @@ -193,7 +195,7 @@ func parseAndAugment(xctx XContext, pkg *PackageData, isTest bool, fileSet *toke // parseOverlayFiles loads and parses overlay files // to augment the original files with. -func parseOverlayFiles(xctx XContext, pkg *PackageData, isTest bool, fileSet *token.FileSet) ([]JSFile, []*ast.File) { +func parseOverlayFiles(xctx XContext, pkg *PackageData, isTest bool, fileSet *token.FileSet) ([]jsFile.JSFile, []*ast.File) { isXTest := strings.HasSuffix(pkg.ImportPath, "_test") importPath := pkg.ImportPath if isXTest { @@ -239,7 +241,7 @@ func parseOverlayFiles(xctx XContext, pkg *PackageData, isTest bool, fileSet *to // parserOriginalFiles loads and parses the original files to augment. func parserOriginalFiles(pkg *PackageData, fileSet *token.FileSet) ([]*ast.File, error) { var files []*ast.File - var errList compiler.ErrorList + var errList errorList.ErrorList for _, name := range pkg.GoFiles { if !filepath.IsAbs(name) { // name might be absolute if specified directly. E.g., `gopherjs build /abs/file.go`. name = filepath.Join(pkg.Dir, name) @@ -623,18 +625,11 @@ func (o *Options) PrintSuccess(format string, a ...interface{}) { fmt.Fprintf(os.Stderr, format, a...) } -// JSFile represents a *.inc.js file metadata and content. -type JSFile struct { - Path string // Full file path for the build context the file came from. - ModTime time.Time - Content []byte -} - // PackageData is an extension of go/build.Package with additional metadata // GopherJS requires. type PackageData struct { *build.Package - JSFiles []JSFile + JSFiles []jsFile.JSFile // IsTest is true if the package is being built for running tests. IsTest bool SrcModTime time.Time @@ -749,15 +744,28 @@ func (p *PackageData) InstallPath() string { // This is the main interface to GopherJS build system. Session lifetime is // roughly equivalent to a single GopherJS tool invocation. type Session struct { - options *Options - xctx XContext - buildCache cache.BuildCache + options *Options + xctx XContext + + // importPaths is a map of the resolved import paths given the + // source directory (first key) and the unresolved import path (second key). + // This is used to cache the resolved import returned from XContext.Import. + // XContent.Import can be slow, so we cache the resolved path that is used + // as the map key by parsedPackages and UpToDateArchives. + // This makes subsequent lookups faster during compilation when all we have + // is the unresolved import path and source directory. + importPaths map[string]map[string]string + + // sources is a map of parsed packages that have been built and augmented. + // This is keyed using resolved import paths. This is used to avoid + // rebuilding and augmenting packages that are imported by several packages. + // The files in these sources haven't been sorted nor simplified yet. + sources map[string]*sources.Sources // Binary archives produced during the current session and assumed to be // up to date with input sources and dependencies. In the -w ("watch") mode // must be cleared upon entering watching. UpToDateArchives map[string]*compiler.Archive - Types map[string]*types.Package Watcher *fsnotify.Watcher } @@ -767,6 +775,8 @@ func NewSession(options *Options) (*Session, error) { s := &Session{ options: options, + importPaths: make(map[string]map[string]string), + sources: make(map[string]*sources.Sources), UpToDateArchives: make(map[string]*compiler.Archive), } s.xctx = NewBuildContext(s.InstallSuffix(), s.options.BuildTags) @@ -777,16 +787,6 @@ func NewSession(options *Options) (*Session, error) { return nil, err } - s.buildCache = cache.BuildCache{ - GOOS: env.GOOS, - GOARCH: env.GOARCH, - GOROOT: env.GOROOT, - GOPATH: env.GOPATH, - BuildTags: append([]string{}, env.BuildTags...), - Minify: options.Minify, - TestedPackage: options.TestedPackage, - } - s.Types = make(map[string]*types.Package) if options.Watch { if out, err := exec.Command("ulimit", "-n").Output(); err == nil { if n, err := strconv.Atoi(strings.TrimSpace(string(out))); err == nil && n < 1024 { @@ -893,36 +893,107 @@ func (s *Session) BuildFiles(filenames []string, pkgObj string, cwd string) erro if err != nil { return fmt.Errorf("failed to stat %s: %w", file, err) } - pkg.JSFiles = append(pkg.JSFiles, JSFile{ + pkg.JSFiles = append(pkg.JSFiles, jsFile.JSFile{ Path: filepath.Join(pkg.Dir, filepath.Base(file)), ModTime: info.ModTime(), Content: content, }) } - archive, err := s.BuildPackage(pkg) + archive, err := s.BuildProject(pkg) if err != nil { return err } - if s.Types["main"].Name() != "main" { + if s.sources["main"].Package.Name() != "main" { return fmt.Errorf("cannot build/run non-main package") } return s.WriteCommandPackage(archive, pkgObj) } -// BuildImportPath loads and compiles package with the given import path. -// -// Relative paths are interpreted relative to the current working dir. -func (s *Session) BuildImportPath(path string) (*compiler.Archive, error) { - _, archive, err := s.buildImportPathWithSrcDir(path, "") - return archive, err +// BuildProject builds a command project (one with a main method) or +// builds a test project (one with a synthesized test main package). +func (s *Session) BuildProject(pkg *PackageData) (*compiler.Archive, error) { + // ensure that runtime for gopherjs is imported + pkg.Imports = append(pkg.Imports, `runtime`) + + // Load the project to get the sources for the parsed packages. + var rootSrcs *sources.Sources + var err error + if pkg.IsTest { + rootSrcs, err = s.loadTestPackage(pkg) + } else { + rootSrcs, err = s.loadPackages(pkg) + } + if err != nil { + return nil, err + } + + // TODO(grantnelson-wf): We could investigate caching the results of + // the sources prior to preparing them to avoid re-parsing the same + // sources and augmenting them when the files on disk haven't changed. + // This would require a way to determine if the sources are up-to-date + // which could be done with the left over srcModTime from when the archives + // were being cached. + + // Compile the project into Archives containing the generated JS. + return s.prepareAndCompilePackages(rootSrcs) } -// buildImportPathWithSrcDir builds the package specified by the import path. +// getSortedSources returns the sources sorted by import path. +// The files in the sources may still not be sorted yet. +func (s *Session) getSortedSources() []*sources.Sources { + allSources := make([]*sources.Sources, 0, len(s.sources)) + for _, srcs := range s.sources { + allSources = append(allSources, srcs) + } + sources.SortedSourcesSlice(allSources) + return allSources +} + +func (s *Session) loadTestPackage(pkg *PackageData) (*sources.Sources, error) { + _, err := s.loadPackages(pkg.TestPackage()) + if err != nil { + return nil, err + } + _, err = s.loadPackages(pkg.XTestPackage()) + if err != nil { + return nil, err + } + + // Generate a synthetic testmain package. + fset := token.NewFileSet() + tests := testmain.TestMain{Package: pkg.Package, Context: pkg.bctx} + tests.Scan(fset) + mainPkg, mainFile, err := tests.Synthesize(fset) + if err != nil { + return nil, fmt.Errorf("failed to generate testmain package for %s: %w", pkg.ImportPath, err) + } + + // Create the sources for parsed package for the testmain package. + srcs := &sources.Sources{ + ImportPath: mainPkg.ImportPath, + Dir: mainPkg.Dir, + Files: []*ast.File{mainFile}, + FileSet: fset, + } + s.sources[srcs.ImportPath] = srcs + + // Import dependencies for the testmain package. + for _, importedPkgPath := range srcs.UnresolvedImports() { + _, _, err := s.loadImportPathWithSrcDir(importedPkgPath, pkg.Dir) + if err != nil { + return nil, err + } + } + + return srcs, nil +} + +// loadImportPathWithSrcDir gets the parsed package specified by the import path. // -// Relative import paths are interpreted relative to the passed srcDir. If -// srcDir is empty, current working directory is assumed. -func (s *Session) buildImportPathWithSrcDir(path string, srcDir string) (*PackageData, *compiler.Archive, error) { +// Relative import paths are interpreted relative to the passed srcDir. +// If srcDir is empty, current working directory is assumed. +func (s *Session) loadImportPathWithSrcDir(path, srcDir string) (*PackageData, *sources.Sources, error) { pkg, err := s.xctx.Import(path, srcDir, 0) if s.Watcher != nil && pkg != nil { // add watch even on error s.Watcher.Add(pkg.Dir) @@ -931,65 +1002,85 @@ func (s *Session) buildImportPathWithSrcDir(path string, srcDir string) (*Packag return nil, nil, err } - archive, err := s.BuildPackage(pkg) + srcs, err := s.loadPackages(pkg) if err != nil { return nil, nil, err } - return pkg, archive, nil + s.cacheImportPath(path, srcDir, pkg.ImportPath) + return pkg, srcs, nil } -// BuildPackage compiles an already loaded package. -func (s *Session) BuildPackage(pkg *PackageData) (*compiler.Archive, error) { - if archive, ok := s.UpToDateArchives[pkg.ImportPath]; ok { - return archive, nil +// cacheImportPath stores the resolved import path for the build package +// so we can look it up later without getting the whole build package. +// The given path and source directly are the ones passed into +// XContext.Import to the get the build package originally. +func (s *Session) cacheImportPath(path, srcDir, importPath string) { + if paths, ok := s.importPaths[srcDir]; ok { + paths[path] = importPath + } else { + s.importPaths[srcDir] = map[string]string{path: importPath} } +} - var fileInfo os.FileInfo - gopherjsBinary, err := os.Executable() - if err == nil { - fileInfo, err = os.Stat(gopherjsBinary) - if err == nil && fileInfo.ModTime().After(pkg.SrcModTime) { - pkg.SrcModTime = fileInfo.ModTime() +// getExeModTime will determine the mod time of the GopherJS binary +// the first time this is called and cache the result for subsequent calls. +var getExeModTime = func() func() time.Time { + var ( + once sync.Once + result time.Time + ) + getTime := func() { + gopherjsBinary, err := os.Executable() + if err == nil { + var fileInfo os.FileInfo + fileInfo, err = os.Stat(gopherjsBinary) + if err == nil { + result = fileInfo.ModTime() + return + } } - } - if err != nil { os.Stderr.WriteString("Could not get GopherJS binary's modification timestamp. Please report issue.\n") - pkg.SrcModTime = time.Now() + result = time.Now() + } + return func() time.Time { + once.Do(getTime) + return result + } +}() + +// loadPackages will recursively load and parse the given package and +// its dependencies. This will return the sources for the given package. +// The returned source and sources for the dependencies will be added +// to the session's sources map. +func (s *Session) loadPackages(pkg *PackageData) (*sources.Sources, error) { + if srcs, ok := s.sources[pkg.ImportPath]; ok { + return srcs, nil + } + + if exeModTime := getExeModTime(); exeModTime.After(pkg.SrcModTime) { + pkg.SrcModTime = exeModTime } for _, importedPkgPath := range pkg.Imports { if importedPkgPath == "unsafe" { continue } - importedPkg, _, err := s.buildImportPathWithSrcDir(importedPkgPath, pkg.Dir) + importedPkg, _, err := s.loadImportPathWithSrcDir(importedPkgPath, pkg.Dir) if err != nil { return nil, err } - impModTime := importedPkg.SrcModTime - if impModTime.After(pkg.SrcModTime) { + if impModTime := importedPkg.SrcModTime; impModTime.After(pkg.SrcModTime) { pkg.SrcModTime = impModTime } } - if pkg.FileModTime().After(pkg.SrcModTime) { - pkg.SrcModTime = pkg.FileModTime() - } - - if !s.options.NoCache { - archive := s.buildCache.LoadArchive(pkg.ImportPath) - if archive != nil && !pkg.SrcModTime.After(archive.BuildTime) { - if err := archive.RegisterTypes(s.Types); err != nil { - panic(fmt.Errorf("failed to load type information from %v: %w", archive, err)) - } - s.UpToDateArchives[pkg.ImportPath] = archive - // Existing archive is up to date, no need to build it from scratch. - return archive, nil - } + if fileModTime := pkg.FileModTime(); fileModTime.After(pkg.SrcModTime) { + pkg.SrcModTime = fileModTime } - // Existing archive is out of date or doesn't exist, let's build the package. + // Build the package by parsing and augmenting the original files with overlay files. fileSet := token.NewFileSet() files, overlayJsFiles, err := parseAndAugment(s.xctx, pkg, pkg.IsTest, fileSet) if err != nil { @@ -1003,40 +1094,125 @@ func (s *Session) BuildPackage(pkg *PackageData) (*compiler.Archive, error) { files = append(files, embed) } - importContext := &compiler.ImportContext{ - Packages: s.Types, - Import: s.ImportResolverFor(pkg), + srcs := &sources.Sources{ + ImportPath: pkg.ImportPath, + Dir: pkg.Dir, + Files: files, + FileSet: fileSet, + JSFiles: append(pkg.JSFiles, overlayJsFiles...), + } + s.sources[pkg.ImportPath] = srcs + + // Import dependencies from the augmented files, + // whilst skipping any that have been already imported. + for _, importedPkgPath := range srcs.UnresolvedImports(pkg.Imports...) { + _, _, err := s.loadImportPathWithSrcDir(importedPkgPath, pkg.Dir) + if err != nil { + return nil, err + } + } + + return srcs, nil +} + +func (s *Session) prepareAndCompilePackages(rootSrcs *sources.Sources) (*compiler.Archive, error) { + tContext := types.NewContext() + allSources := s.getSortedSources() + + // Prepare and analyze the source code. + // This will be performed recursively for all dependencies. + if err := compiler.PrepareAllSources(allSources, s.SourcesForImport, tContext); err != nil { + return nil, err } - archive, err := compiler.Compile(pkg.ImportPath, files, fileSet, importContext, s.options.Minify) + + // Compile all the sources into archives. + for _, srcs := range allSources { + if _, err := s.compilePackage(srcs, tContext); err != nil { + return nil, err + } + } + + rootArchive, ok := s.UpToDateArchives[rootSrcs.ImportPath] + if !ok { + // This is confirmation that the root package is in the sources map and got compiled. + return nil, fmt.Errorf(`root package %q was not found in archives`, rootSrcs.ImportPath) + } + return rootArchive, nil +} + +func (s *Session) compilePackage(srcs *sources.Sources, tContext *types.Context) (*compiler.Archive, error) { + if archive, ok := s.UpToDateArchives[srcs.ImportPath]; ok { + return archive, nil + } + + archive, err := compiler.Compile(srcs, tContext, s.options.Minify) if err != nil { return nil, err } - for _, jsFile := range append(pkg.JSFiles, overlayJsFiles...) { + for _, jsFile := range srcs.JSFiles { archive.IncJSCode = append(archive.IncJSCode, []byte("\t(function() {\n")...) archive.IncJSCode = append(archive.IncJSCode, jsFile.Content...) archive.IncJSCode = append(archive.IncJSCode, []byte("\n\t}).call($global);\n")...) } if s.options.Verbose { - fmt.Println(pkg.ImportPath) + fmt.Println(srcs.ImportPath) } - s.buildCache.StoreArchive(archive) - s.UpToDateArchives[pkg.ImportPath] = archive + s.UpToDateArchives[srcs.ImportPath] = archive return archive, nil } +func (s *Session) getImportPath(path, srcDir string) (string, error) { + // If path is for an xtest package, just return it. + if strings.HasSuffix(path, "_test") { + return path, nil + } + + // Check if the import path is already cached. + if importPath, ok := s.importPaths[srcDir][path]; ok { + return importPath, nil + } + + // Fall back to the slow import of the build package. + pkg, err := s.xctx.Import(path, srcDir, 0) + if err != nil { + return ``, err + } + s.cacheImportPath(path, srcDir, pkg.ImportPath) + return pkg.ImportPath, nil +} + +func (s *Session) SourcesForImport(path, srcDir string) (*sources.Sources, error) { + importPath, err := s.getImportPath(path, srcDir) + if err != nil { + return nil, err + } + + srcs, ok := s.sources[importPath] + if !ok { + return nil, fmt.Errorf(`sources for %q not found`, path) + } + + return srcs, nil +} + // ImportResolverFor returns a function which returns a compiled package archive // given an import path. -func (s *Session) ImportResolverFor(pkg *PackageData) func(string) (*compiler.Archive, error) { +func (s *Session) ImportResolverFor(srcDir string) func(string) (*compiler.Archive, error) { return func(path string) (*compiler.Archive, error) { - if archive, ok := s.UpToDateArchives[path]; ok { + importPath, err := s.getImportPath(path, srcDir) + if err != nil { + return nil, err + } + + if archive, ok := s.UpToDateArchives[importPath]; ok { return archive, nil } - _, archive, err := s.buildImportPathWithSrcDir(path, pkg.Dir) - return archive, err + + return nil, fmt.Errorf(`archive for %q not found`, importPath) } } @@ -1074,13 +1250,7 @@ func (s *Session) WriteCommandPackage(archive *compiler.Archive, pkgObj string) sourceMapFilter.MappingCallback = s.SourceMappingCallback(m) } - deps, err := compiler.ImportDependencies(archive, func(path string) (*compiler.Archive, error) { - if archive, ok := s.UpToDateArchives[path]; ok { - return archive, nil - } - _, archive, err := s.buildImportPathWithSrcDir(path, "") - return archive, err - }) + deps, err := compiler.ImportDependencies(archive, s.ImportResolverFor("")) if err != nil { return err } @@ -1130,8 +1300,9 @@ func hasGopathPrefix(file, gopath string) (hasGopathPrefix bool, prefixLen int) func (s *Session) WaitForChange() { // Will need to re-validate up-to-dateness of all archives, so flush them from // memory. + s.importPaths = map[string]map[string]string{} + s.sources = map[string]*sources.Sources{} s.UpToDateArchives = map[string]*compiler.Archive{} - s.Types = map[string]*types.Package{} s.options.PrintSuccess("watching for changes...\n") for { diff --git a/build/build_test.go b/build/build_test.go index 343e8b933..7bda7f54a 100644 --- a/build/build_test.go +++ b/build/build_test.go @@ -418,18 +418,17 @@ func TestOverlayAugmentation(t *testing.T) { test.want = test.src } - fsetSrc := token.NewFileSet() - fileSrc := srctesting.Parse(t, fsetSrc, pkgName+test.src) + f := srctesting.New(t) + fileSrc := f.Parse("test.go", pkgName+test.src) overrides := map[string]overrideInfo{} augmentOverlayFile(fileSrc, overrides) pruneImports(fileSrc) - got := srctesting.Format(t, fsetSrc, fileSrc) + got := srctesting.Format(t, f.FileSet, fileSrc) - fsetWant := token.NewFileSet() - fileWant := srctesting.Parse(t, fsetWant, pkgName+test.want) - want := srctesting.Format(t, fsetWant, fileWant) + fileWant := f.Parse("test.go", pkgName+test.want) + want := srctesting.Format(t, f.FileSet, fileWant) if got != want { t.Errorf("augmentOverlayFile and pruneImports got unexpected code:\n"+ @@ -720,18 +719,17 @@ func TestOriginalAugmentation(t *testing.T) { t.Run(test.desc, func(t *testing.T) { pkgName := "package testpackage\n\n" importPath := `math/rand` - fsetSrc := token.NewFileSet() - fileSrc := srctesting.Parse(t, fsetSrc, pkgName+test.src) + f := srctesting.New(t) + fileSrc := f.Parse("test.go", pkgName+test.src) augmentOriginalImports(importPath, fileSrc) augmentOriginalFile(fileSrc, test.info) pruneImports(fileSrc) - got := srctesting.Format(t, fsetSrc, fileSrc) + got := srctesting.Format(t, f.FileSet, fileSrc) - fsetWant := token.NewFileSet() - fileWant := srctesting.Parse(t, fsetWant, pkgName+test.want) - want := srctesting.Format(t, fsetWant, fileWant) + fileWant := f.Parse("test.go", pkgName+test.want) + want := srctesting.Format(t, f.FileSet, fileWant) if got != want { t.Errorf("augmentOriginalImports, augmentOriginalFile, and pruneImports got unexpected code:\n"+ diff --git a/build/cache/cache.go b/build/cache/cache.go index 14257ef9e..fc0949d67 100644 --- a/build/cache/cache.go +++ b/build/cache/cache.go @@ -6,9 +6,11 @@ import ( "crypto/sha256" "fmt" "go/build" + "go/types" "os" "path" "path/filepath" + "time" "github.com/gopherjs/gopherjs/compiler" log "github.com/sirupsen/logrus" @@ -58,7 +60,7 @@ func Clear() error { // the cache. For example, any artifacts that were cached for a minified build // must not be reused for a non-minified build. GopherJS version change also // invalidates the cache. It is callers responsibility to ensure that artifacts -// passed the the StoreArchive function were generated with the same build +// passed the StoreArchive function were generated with the same build // parameters as the cache is configured. // // There is no upper limit for the total cache size. It can be cleared @@ -90,7 +92,10 @@ func (bc BuildCache) String() string { // StoreArchive compiled archive in the cache. Any error inside this method // will cause the cache not to be persisted. -func (bc *BuildCache) StoreArchive(a *compiler.Archive) { +// +// The passed in buildTime is used to determine if the archive is out-of-date when reloaded. +// Typically it should be set to the srcModTime or time.Now(). +func (bc *BuildCache) StoreArchive(a *compiler.Archive, buildTime time.Time) { if bc == nil { return // Caching is disabled. } @@ -106,7 +111,7 @@ func (bc *BuildCache) StoreArchive(a *compiler.Archive) { return } defer f.Close() - if err := compiler.WriteArchive(a, f); err != nil { + if err := compiler.WriteArchive(a, buildTime, f); err != nil { log.Warningf("Failed to write build cache archive %q: %v", a, err) // Make sure we don't leave a half-written archive behind. os.Remove(f.Name()) @@ -125,7 +130,10 @@ func (bc *BuildCache) StoreArchive(a *compiler.Archive) { // // The returned archive would have been built with the same configuration as // the build cache was. -func (bc *BuildCache) LoadArchive(importPath string) *compiler.Archive { +// +// The imports map is used to resolve package dependencies and may modify the +// map to include the package from the read archive. See [gcexportdata.Read]. +func (bc *BuildCache) LoadArchive(importPath string, srcModTime time.Time, imports map[string]*types.Package) *compiler.Archive { if bc == nil { return nil // Caching is disabled. } @@ -140,12 +148,16 @@ func (bc *BuildCache) LoadArchive(importPath string) *compiler.Archive { return nil // Cache miss. } defer f.Close() - a, err := compiler.ReadArchive(importPath, f) + a, buildTime, err := compiler.ReadArchive(importPath, f, srcModTime, imports) if err != nil { log.Warningf("Failed to read cached package archive for %q: %v", importPath, err) return nil // Invalid/corrupted archive, cache miss. } - log.Infof("Found cached package archive for %q, built at %v.", importPath, a.BuildTime) + if a == nil { + log.Infof("Found out-of-date package archive for %q, built at %v.", importPath, buildTime) + return nil // Archive is out-of-date, cache miss. + } + log.Infof("Found cached package archive for %q, built at %v.", importPath, buildTime) return a } diff --git a/build/cache/cache_test.go b/build/cache/cache_test.go index fd89ec187..0a0541f64 100644 --- a/build/cache/cache_test.go +++ b/build/cache/cache_test.go @@ -1,7 +1,9 @@ package cache import ( + "go/types" "testing" + "time" "github.com/google/go-cmp/cmp" "github.com/gopherjs/gopherjs/compiler" @@ -15,21 +17,24 @@ func TestStore(t *testing.T) { Imports: []string{"fake/dep"}, } + srcModTime := newTime(0.0) + buildTime := newTime(5.0) + imports := map[string]*types.Package{} bc := BuildCache{} - if got := bc.LoadArchive(want.ImportPath); got != nil { + if got := bc.LoadArchive(want.ImportPath, srcModTime, imports); got != nil { t.Errorf("Got: %s was found in the cache. Want: empty cache.", got.ImportPath) } - bc.StoreArchive(want) - got := bc.LoadArchive(want.ImportPath) + bc.StoreArchive(want, buildTime) + got := bc.LoadArchive(want.ImportPath, srcModTime, imports) if got == nil { - t.Errorf("Got: %s wan not found in the cache. Want: archive is can be loaded after store.", want.ImportPath) + t.Errorf("Got: %s was not found in the cache. Want: archive is can be loaded after store.", want.ImportPath) } if diff := cmp.Diff(want, got); diff != "" { t.Errorf("Loaded archive is different from stored (-want,+got):\n%s", diff) } // Make sure the package names are a part of the cache key. - if got := bc.LoadArchive("fake/other"); got != nil { + if got := bc.LoadArchive("fake/other", srcModTime, imports); got != nil { t.Errorf("Got: fake/other was found in cache: %#v. Want: nil for packages that weren't cached.", got) } } @@ -59,20 +64,54 @@ func TestInvalidation(t *testing.T) { }, } + srcModTime := newTime(0.0) + buildTime := newTime(5.0) + imports := map[string]*types.Package{} for _, test := range tests { a := &compiler.Archive{ImportPath: "package/fake"} - test.cache1.StoreArchive(a) + test.cache1.StoreArchive(a, buildTime) - if got := test.cache2.LoadArchive(a.ImportPath); got != nil { + if got := test.cache2.LoadArchive(a.ImportPath, srcModTime, imports); got != nil { t.Logf("-cache1,+cache2:\n%s", cmp.Diff(test.cache1, test.cache2)) t.Errorf("Got: %v loaded from cache. Want: build parameter change invalidates cache.", got) } } } +func TestOldArchive(t *testing.T) { + cacheForTest(t) + + want := &compiler.Archive{ + ImportPath: "fake/package", + Imports: []string{"fake/dep"}, + } + + buildTime := newTime(5.0) + imports := map[string]*types.Package{} + bc := BuildCache{} + bc.StoreArchive(want, buildTime) + + oldSrcModTime := newTime(2.0) // older than archive build time, so archive is up-to-date + got := bc.LoadArchive(want.ImportPath, oldSrcModTime, imports) + if got == nil { + t.Errorf("Got: %s was nil. Want: up-to-date archive to be loaded.", want.ImportPath) + } + + newerSrcModTime := newTime(7.0) // newer than archive build time, so archive is stale + got = bc.LoadArchive(want.ImportPath, newerSrcModTime, imports) + if got != nil { + t.Errorf("Got: %s was not nil. Want: stale archive to not be loaded.", want.ImportPath) + } +} + func cacheForTest(t *testing.T) { t.Helper() originalRoot := cacheRoot t.Cleanup(func() { cacheRoot = originalRoot }) cacheRoot = t.TempDir() } + +func newTime(seconds float64) time.Time { + return time.Date(1969, 7, 20, 20, 17, 0, 0, time.UTC). + Add(time.Duration(seconds * float64(time.Second))) +} diff --git a/build/context.go b/build/context.go index 316bfb2bb..657300839 100644 --- a/build/context.go +++ b/build/context.go @@ -4,7 +4,6 @@ import ( "fmt" "go/build" "go/token" - "io" "net/http" "os" "os/exec" @@ -16,6 +15,7 @@ import ( _ "github.com/gopherjs/gopherjs/build/versionhack" // go/build release tags hack. "github.com/gopherjs/gopherjs/compiler" "github.com/gopherjs/gopherjs/compiler/gopherjspkg" + "github.com/gopherjs/gopherjs/compiler/jsFile" "github.com/gopherjs/gopherjs/compiler/natives" "golang.org/x/tools/go/buildutil" ) @@ -91,7 +91,7 @@ func (sc simpleCtx) Import(importPath string, srcDir string, mode build.ImportMo if err != nil { return nil, err } - jsFiles, err := jsFilesFromDir(&sc.bctx, pkg.Dir) + jsFiles, err := jsFile.JSFilesFromDir(&sc.bctx, pkg.Dir) if err != nil { return nil, fmt.Errorf("failed to enumerate .inc.js files in %s: %w", pkg.Dir, err) } @@ -440,40 +440,3 @@ func updateImports(sources []string, importPos map[string][]token.Position) (new sort.Strings(newImports) return newImports, newImportPos } - -// jsFilesFromDir finds and loads any *.inc.js packages in the build context -// directory. -func jsFilesFromDir(bctx *build.Context, dir string) ([]JSFile, error) { - files, err := buildutil.ReadDir(bctx, dir) - if err != nil { - return nil, err - } - var jsFiles []JSFile - for _, file := range files { - if !strings.HasSuffix(file.Name(), ".inc.js") || file.IsDir() { - continue - } - if file.Name()[0] == '_' || file.Name()[0] == '.' { - continue // Skip "hidden" files that are typically ignored by the Go build system. - } - - path := buildutil.JoinPath(bctx, dir, file.Name()) - f, err := buildutil.OpenFile(bctx, path) - if err != nil { - return nil, fmt.Errorf("failed to open %s from %v: %w", path, bctx, err) - } - defer f.Close() - - content, err := io.ReadAll(f) - if err != nil { - return nil, fmt.Errorf("failed to read %s from %v: %w", path, bctx, err) - } - - jsFiles = append(jsFiles, JSFile{ - Path: path, - ModTime: file.ModTime(), - Content: content, - }) - } - return jsFiles, nil -} diff --git a/build/embed.go b/build/embed.go index c749eeb50..a68fb9494 100644 --- a/build/embed.go +++ b/build/embed.go @@ -8,7 +8,7 @@ import ( "go/token" "strconv" - "github.com/visualfc/goembed" + "github.com/msvitok77/goembed" ) func buildIdent(name string) string { diff --git a/circle.yml b/circle.yml deleted file mode 100644 index 2c3095c8b..000000000 --- a/circle.yml +++ /dev/null @@ -1,280 +0,0 @@ -# CircleCI configuration for GopherJS. -# -# This configuration has one build_and_test workflow designed to run on all commits -# and pull requests. It consists of three jobs: -# -# - build: Builds and runs GopherJS unit tests, as well as lints, smoke tests, etc. -# This job is designed to provide quickest feedback on the most important -# functionality. It should not include anything heavyweight and should be kept -# under 2-3 minutes of runtime. -# -# - gopherjs_tests: Runs standard library and GopherJS package tests using GopherJS -# *itself*. This is the most comprehensive test suite we have, and it is sharded -# into 4 parallel instances for faster execution. -# -# - gorepo_tests: Runs language tests from the Go compiler test suite. The objective -# of these tests is to ensure conformance of GopherJS to the upstream Go to the -# highest degree possible, considering differences in the runtime. -# -# If all tests passed, it is reasonably to assume that the version is more or less -# bug-free (although as of summer 2021 our test coverage is not ideal). -# -# For convenience of upgrades, NVM, Node.js and Go versions are specified as -# parameters at the top of the config. Increasing the version and ensuring that the -# workflow passes is sufficient to verify GopherJS compatibility with that version. -# -# Versions of Node modules GopherJS depends on are specified in package.json and can -# be changed there (remember to run `npm install` to update the lock file). - -version: 2.1 -executors: - gopherjs: - docker: - - image: cimg/base:stable - working_directory: ~/gopherjs - -workflows: - version: 2 - build_and_test: - jobs: - - build - - gopherjs_tests: - requires: - - build - - gorepo_tests: - requires: - - build - - darwin_smoke: - requires: - - build - - windows_smoke: - requires: - - build - -parameters: - go_version: - type: string - default: "1.19.13" - chocolatey_go_version: - type: string - # Chocolatey doesn't have 1.19.13, closest is 1.19.9 - default: "1.19.9" - nvm_version: - type: string - default: "0.38.0" - node_version: - type: string - default: "12" - -orbs: - win: circleci/windows@4.0.0 - go: circleci/go@1.7.1 - node: circleci/node@5.0.1 - -jobs: - build: - executor: gopherjs - steps: - - setup_and_install_gopherjs - - run: - name: Check natives build tags - command: diff -u <(echo -n) <(go list ./compiler/natives/src/...) # All those packages should have // +build js. - - run: - name: Smoke tests - command: | - gopherjs build -v net/http # Should build successfully. - gopherjs test -v fmt log # Should catch problems with test execution and source maps. - - run: - name: go test ... - command: | - set +e - # Run all tests except gorepo, which will be run separately in parallel. - go test -v -race $(go list ./... | grep -v github.com/gopherjs/gopherjs/tests/gorepo) | tee /tmp/test-go.txt - status="$?" - # Convert test output into junit format for CircleCI. - mkdir -p ~/test-reports/ - go-junit-report --full-class-name < /tmp/test-go.txt > ~/test-reports/go.xml - exit "$status" - - store_test_results: - path: ~/test-reports/ - - run: - name: TodoMVC in GOPATH mode - command: | - set -e - export GO111MODULE=off - export GOPATH=/tmp/gopath - mkdir -p $GOPATH/src/github.com/gopherjs/gopherjs - cp -r -p . $GOPATH/src/github.com/gopherjs/gopherjs/ - go get -v github.com/gopherjs/todomvc - gopherjs build -v -o /tmp/todomvc_gopath.js github.com/gopherjs/todomvc - gopherjs test -v github.com/gopherjs/todomvc/... - find $GOPATH - - run: - name: TodoMVC in Go Modules mode - command: | - set -e - export GO111MODULE=on - export GOPATH=/tmp/gomod - mkdir -p $GOPATH/src - cd /tmp - git clone --depth=1 https://github.com/gopherjs/todomvc.git - cd /tmp/todomvc - gopherjs build -v -o /tmp/todomvc_gomod.js github.com/gopherjs/todomvc - gopherjs test -v github.com/gopherjs/todomvc/... - find $GOPATH - - run: - name: Compare GOPATH and Go Modules output - command: diff -u <(sed 's/todomvc_gomod.js.map/todomvc_ignored.js.map/' /tmp/todomvc_gomod.js) <(sed 's/todomvc_gopath.js.map/todomvc_ignored.js.map/' /tmp/todomvc_gopath.js) - - gopherjs_tests: - executor: gopherjs - parallelism: 4 - steps: - - setup_and_install_gopherjs - - run: - name: gopherjs test ... - command: | - set +e - ulimit -s 10000 - PACKAGE_NAMES=$( \ - GOOS=js GOARCH=wasm go list std github.com/gopherjs/gopherjs/js/... github.com/gopherjs/gopherjs/tests/... \ - | grep -v -x -f .std_test_pkg_exclusions \ - | circleci tests split --split-by=timings --timings-type=classname \ - ) - gopherjs test -p 2 --minify -v --short $PACKAGE_NAMES \ - | tee /tmp/test-gopherjs.txt - status="$?" - set -e - # Convert test output into junit format for CircleCI. - mkdir -p ~/test-reports/ - go-junit-report --full-class-name < /tmp/test-gopherjs.txt > ~/test-reports/gopherjs-${CIRCLE_NODE_INDEX}.xml - exit "$status" - no_output_timeout: "1h" # Packages like math/big take a while to run all tests. - - store_test_results: - path: ~/test-reports/ - - gorepo_tests: - executor: gopherjs - parallelism: 4 - steps: - - setup_environment - - checkout - - install_deps - - install_gopherjs - - run: - name: Go Repository tests - command: | - go test -v github.com/gopherjs/gopherjs/tests/gorepo - - windows_smoke: - executor: - name: win/default - shell: powershell.exe - steps: - - checkout - - run: - name: Install Go - command: | - choco install golang --version="<< pipeline.parameters.chocolatey_go_version >>" -my --force -y - go version - (Get-Command go).Path - [Environment]::SetEnvironmentVariable( - "Path", - [Environment]::GetEnvironmentVariable("Path", [EnvironmentVariableTarget]::Machine) + ";C:\Users\circleci\go\bin", - [EnvironmentVariableTarget]::Machine) - - - install_deps: - optional: false - - run: - name: Install GopherJS - command: - go install -v . - (Get-Command gopherjs).Path - - run: - name: Test GopherJS - command: go test -v -short ./... - - run: - name: Smoke tests - command: | - $env:NODE_PATH=$(npm root) - $env:SOURCE_MAP_SUPPORT=false - gopherjs build -v net/http - gopherjs test -v --short fmt sort ./tests - - darwin_smoke: - macos: - xcode: 13.4.1 # Mac OS 12.6.1, see https://circleci.com/docs/using-macos/ - steps: - - checkout - - setup_environment - - install_deps: - optional: false - - run: - name: Install GopherJS - command: go install -v . - - run: - name: Test GopherJS - command: go test -v -short ./... - - run: - name: Smoke tests - command: | - gopherjs build -v net/http - gopherjs test -v --short fmt log os ./tests - -commands: - setup_environment: - description: Set up Go, NVM and Node.js - steps: - - go/install: - version: << pipeline.parameters.go_version >> - - node/install: - node-version: << pipeline.parameters.node_version >> - - run: - name: Set up environment - command: | - echo 'export PATH="$PATH:$HOME/go/bin"' >> $BASH_ENV - echo 'export GO111MODULE=on' >> $BASH_ENV - echo 'export SOURCE_MAP_SUPPORT=true' >> $BASH_ENV - # Make nodejs able to require installed modules from any working path. - echo "export NODE_PATH=$(npm root)" >> $BASH_ENV - go version - node -v - go install -v github.com/nevkontakte/go-junit-report@forked # For CircleCI test reports. - install_deps: - description: Install Go and Node dependency packages - parameters: - optional: - default: true - type: boolean - description: Install node-syscall module and its dependencies. - steps: - - when: - condition: - not: << parameters.optional >> - steps: - - run: - name: Install required Node.js packages - command: | - # Extra flags to avoid installing node-syscall. - npm install --no-optional --no-package-lock - - when: - condition: << parameters.optional >> - steps: - - run: - name: Install required Node.js packages (including optional) - command: | - npm ci # Install our dependencies from package.json. - - go/mod-download - install_gopherjs: - description: Install GopherJS - steps: - - run: - name: Install GopherJS - command: go install -v && gopherjs version - setup_and_install_gopherjs: - description: A shorthand for setting up GopherJS environment and building the binary. - steps: - - setup_environment - - checkout - - install_deps - - install_gopherjs diff --git a/compiler/analysis/info.go b/compiler/analysis/info.go deleted file mode 100644 index 304c8808a..000000000 --- a/compiler/analysis/info.go +++ /dev/null @@ -1,393 +0,0 @@ -package analysis - -import ( - "fmt" - "go/ast" - "go/token" - "go/types" - "strings" - - "github.com/gopherjs/gopherjs/compiler/astutil" - "github.com/gopherjs/gopherjs/compiler/typesutil" -) - -type continueStmt struct { - forStmt *ast.ForStmt - analyzeStack astPath -} - -func newContinueStmt(forStmt *ast.ForStmt, stack astPath) continueStmt { - cs := continueStmt{ - forStmt: forStmt, - analyzeStack: stack.copy(), - } - return cs -} - -// astPath is a list of AST nodes where each previous node is a parent of the -// next node. -type astPath []ast.Node - -func (src astPath) copy() astPath { - dst := make(astPath, len(src)) - copy(dst, src) - return dst -} - -func (ap astPath) String() string { - s := &strings.Builder{} - s.WriteString("[") - for i, n := range ap { - if i > 0 { - s.WriteString(", ") - } - fmt.Fprintf(s, "%T(%p)", n, n) - } - s.WriteString("]") - return s.String() -} - -type Info struct { - *types.Info - Pkg *types.Package - HasPointer map[*types.Var]bool - FuncDeclInfos map[*types.Func]*FuncInfo - FuncLitInfos map[*ast.FuncLit]*FuncInfo - InitFuncInfo *FuncInfo // Context for package variable initialization. - - isImportedBlocking func(*types.Func) bool // For functions from other packages. - allInfos []*FuncInfo -} - -func (info *Info) newFuncInfo(n ast.Node) *FuncInfo { - funcInfo := &FuncInfo{ - pkgInfo: info, - Flattened: make(map[ast.Node]bool), - Blocking: make(map[ast.Node]bool), - GotoLabel: make(map[*types.Label]bool), - localNamedCallees: make(map[*types.Func][]astPath), - literalFuncCallees: make(map[*ast.FuncLit][]astPath), - } - - // Register the function in the appropriate map. - switch n := n.(type) { - case *ast.FuncDecl: - if n.Body == nil { - // Function body comes from elsewhere (for example, from a go:linkname - // directive), conservatively assume that it may be blocking. - // TODO(nevkontakte): It is possible to improve accuracy of this detection. - // Since GopherJS supports inly "import-style" go:linkname, at this stage - // the compiler already determined whether the implementation function is - // blocking, and we could check that. - funcInfo.Blocking[n] = true - } - info.FuncDeclInfos[info.Defs[n.Name].(*types.Func)] = funcInfo - case *ast.FuncLit: - info.FuncLitInfos[n] = funcInfo - } - - // And add it to the list of all functions. - info.allInfos = append(info.allInfos, funcInfo) - - return funcInfo -} - -func (info *Info) IsBlocking(fun *types.Func) bool { - if funInfo := info.FuncDeclInfos[fun]; funInfo != nil { - return len(funInfo.Blocking) > 0 - } - panic(fmt.Errorf(`info did not have function declaration for %s`, fun.FullName())) -} - -func AnalyzePkg(files []*ast.File, fileSet *token.FileSet, typesInfo *types.Info, typesPkg *types.Package, isBlocking func(*types.Func) bool) *Info { - info := &Info{ - Info: typesInfo, - Pkg: typesPkg, - HasPointer: make(map[*types.Var]bool), - isImportedBlocking: isBlocking, - FuncDeclInfos: make(map[*types.Func]*FuncInfo), - FuncLitInfos: make(map[*ast.FuncLit]*FuncInfo), - } - info.InitFuncInfo = info.newFuncInfo(nil) - - // Traverse the full AST of the package and collect information about existing - // functions. - for _, file := range files { - ast.Walk(info.InitFuncInfo, file) - } - - for _, funcInfo := range info.allInfos { - if !funcInfo.HasDefer { - continue - } - // Conservatively assume that if a function has a deferred call, it might be - // blocking, and therefore all return statements need to be treated as - // blocking. - // TODO(nevkontakte): This could be improved by detecting whether a deferred - // call is actually blocking. Doing so might reduce generated code size a - // bit. - for _, returnStmt := range funcInfo.returnStmts { - funcInfo.markBlocking(returnStmt) - } - } - - // Propagate information about blocking calls to the caller functions. - // For each function we check all other functions it may call and if any of - // them are blocking, we mark the caller blocking as well. The process is - // repeated until no new blocking functions is detected. - for { - done := true - for _, caller := range info.allInfos { - // Check calls to named functions and function-typed variables. - for callee, callSites := range caller.localNamedCallees { - if info.IsBlocking(callee) { - for _, callSite := range callSites { - caller.markBlocking(callSite) - } - delete(caller.localNamedCallees, callee) - done = false - } - } - - // Check direct calls to function literals. - for callee, callSites := range caller.literalFuncCallees { - if len(info.FuncLitInfos[callee].Blocking) > 0 { - for _, callSite := range callSites { - caller.markBlocking(callSite) - } - delete(caller.literalFuncCallees, callee) - done = false - } - } - } - if done { - break - } - } - - // After all function blocking information was propagated, mark flow control - // statements as blocking whenever they may lead to a blocking function call. - for _, funcInfo := range info.allInfos { - for _, continueStmt := range funcInfo.continueStmts { - if funcInfo.Blocking[continueStmt.forStmt.Post] { - // If a for-loop post-expression is blocking, the continue statement - // that leads to it must be treated as blocking. - funcInfo.markBlocking(continueStmt.analyzeStack) - } - } - } - - return info -} - -type FuncInfo struct { - HasDefer bool - // Nodes are "flattened" into a switch-case statement when we need to be able - // to jump into an arbitrary position in the code with a GOTO statement, or - // resume a goroutine after a blocking call unblocks. - Flattened map[ast.Node]bool - // Blocking indicates that either the AST node itself or its descendant may - // block goroutine execution (for example, a channel operation). - Blocking map[ast.Node]bool - // GotoLavel indicates a label referenced by a goto statement, rather than a - // named loop. - GotoLabel map[*types.Label]bool - // List of continue statements in the function. - continueStmts []continueStmt - // List of return statements in the function. - returnStmts []astPath - // List of other named functions from the current package this function calls. - // If any of them are blocking, this function will become blocking too. - localNamedCallees map[*types.Func][]astPath - // List of function literals directly called from this function (for example: - // `func() { /* do stuff */ }()`). This is distinct from function literals - // assigned to named variables (for example: `doStuff := func() {}; - // doStuff()`), which are handled by localNamedCallees. If any of them are - // identified as blocking, this function will become blocking too. - literalFuncCallees map[*ast.FuncLit][]astPath - - pkgInfo *Info // Function's parent package. - visitorStack astPath -} - -func (fi *FuncInfo) Visit(node ast.Node) ast.Visitor { - if node == nil { - if len(fi.visitorStack) != 0 { - fi.visitorStack = fi.visitorStack[:len(fi.visitorStack)-1] - } - return nil - } - fi.visitorStack = append(fi.visitorStack, node) - - switch n := node.(type) { - case *ast.FuncDecl, *ast.FuncLit: - // Analyze the function in its own context. - return fi.pkgInfo.newFuncInfo(n) - case *ast.BranchStmt: - switch n.Tok { - case token.GOTO: - // Emulating GOTO in JavaScript requires the code to be flattened into a - // switch-statement. - fi.markFlattened(fi.visitorStack) - fi.GotoLabel[fi.pkgInfo.Uses[n.Label].(*types.Label)] = true - case token.CONTINUE: - loopStmt := astutil.FindLoopStmt(fi.visitorStack, n, fi.pkgInfo.Info) - if forStmt, ok := (loopStmt).(*ast.ForStmt); ok { - // In `for x; y; z { ... }` loops `z` may be potentially blocking - // and therefore continue expression that triggers it would have to - // be treated as blocking. - fi.continueStmts = append(fi.continueStmts, newContinueStmt(forStmt, fi.visitorStack)) - } - } - return fi - case *ast.CallExpr: - return fi.visitCallExpr(n) - case *ast.SendStmt: - // Sending into a channel is blocking. - fi.markBlocking(fi.visitorStack) - return fi - case *ast.UnaryExpr: - switch n.Op { - case token.AND: - if id, ok := astutil.RemoveParens(n.X).(*ast.Ident); ok { - fi.pkgInfo.HasPointer[fi.pkgInfo.Uses[id].(*types.Var)] = true - } - case token.ARROW: - // Receiving from a channel is blocking. - fi.markBlocking(fi.visitorStack) - } - return fi - case *ast.RangeStmt: - if _, ok := fi.pkgInfo.TypeOf(n.X).Underlying().(*types.Chan); ok { - // for-range loop over a channel is blocking. - fi.markBlocking(fi.visitorStack) - } - return fi - case *ast.SelectStmt: - for _, s := range n.Body.List { - if s.(*ast.CommClause).Comm == nil { // default clause - return fi - } - } - // Select statements without a default case are blocking. - fi.markBlocking(fi.visitorStack) - return fi - case *ast.CommClause: - // FIXME(nevkontakte): Does this need to be manually spelled out? Presumably - // ast.Walk would visit all those nodes anyway, and we are not creating any - // new contexts here. - // https://github.com/gopherjs/gopherjs/issues/230 seems to be relevant? - switch comm := n.Comm.(type) { - case *ast.SendStmt: - ast.Walk(fi, comm.Chan) - ast.Walk(fi, comm.Value) - case *ast.ExprStmt: - ast.Walk(fi, comm.X.(*ast.UnaryExpr).X) - case *ast.AssignStmt: - ast.Walk(fi, comm.Rhs[0].(*ast.UnaryExpr).X) - } - for _, s := range n.Body { - ast.Walk(fi, s) - } - return nil // The subtree was manually checked, no need to visit it again. - case *ast.GoStmt: - // Unlike a regular call, the function in a go statement doesn't block the - // caller goroutine, but the expression that determines the function and its - // arguments still need to be checked. - ast.Walk(fi, n.Call.Fun) - for _, arg := range n.Call.Args { - ast.Walk(fi, arg) - } - return nil // The subtree was manually checked, no need to visit it again. - case *ast.DeferStmt: - fi.HasDefer = true - if funcLit, ok := n.Call.Fun.(*ast.FuncLit); ok { - ast.Walk(fi, funcLit.Body) - } - return fi - case *ast.ReturnStmt: - // Capture all return statements in the function. They could become blocking - // if the function has a blocking deferred call. - fi.returnStmts = append(fi.returnStmts, fi.visitorStack.copy()) - return fi - default: - return fi - } - // Deliberately no return here to make sure that each of the cases above is - // self-sufficient and explicitly decides in which context the its AST subtree - // needs to be analyzed. -} - -func (fi *FuncInfo) visitCallExpr(n *ast.CallExpr) ast.Visitor { - switch f := astutil.RemoveParens(n.Fun).(type) { - case *ast.Ident: - fi.callToNamedFunc(fi.pkgInfo.Uses[f]) - case *ast.SelectorExpr: - if sel := fi.pkgInfo.Selections[f]; sel != nil && typesutil.IsJsObject(sel.Recv()) { - // js.Object methods are known to be non-blocking, but we still must - // check its arguments. - } else { - fi.callToNamedFunc(fi.pkgInfo.Uses[f.Sel]) - } - case *ast.FuncLit: - // Collect info about the function literal itself. - ast.Walk(fi, n.Fun) - - // Check all argument expressions. - for _, arg := range n.Args { - ast.Walk(fi, arg) - } - // Register literal function call site in case it is identified as blocking. - fi.literalFuncCallees[f] = append(fi.literalFuncCallees[f], fi.visitorStack.copy()) - return nil // No need to walk under this CallExpr, we already did it manually. - default: - if astutil.IsTypeExpr(f, fi.pkgInfo.Info) { - // This is a type assertion, not a call. Type assertion itself is not - // blocking, but we will visit the expression itself. - } else { - // The function is returned by a non-trivial expression. We have to be - // conservative and assume that function might be blocking. - fi.markBlocking(fi.visitorStack) - } - } - - return fi -} - -func (fi *FuncInfo) callToNamedFunc(callee types.Object) { - switch o := callee.(type) { - case *types.Func: - if recv := o.Type().(*types.Signature).Recv(); recv != nil { - if _, ok := recv.Type().Underlying().(*types.Interface); ok { - // Conservatively assume that an interface implementation may be blocking. - fi.markBlocking(fi.visitorStack) - return - } - } - if o.Pkg() != fi.pkgInfo.Pkg { - if fi.pkgInfo.isImportedBlocking(o) { - fi.markBlocking(fi.visitorStack) - } - return - } - // We probably don't know yet whether the callee function is blocking. - // Record the calls site for the later stage. - fi.localNamedCallees[o] = append(fi.localNamedCallees[o], fi.visitorStack.copy()) - case *types.Var: - // Conservatively assume that a function in a variable might be blocking. - fi.markBlocking(fi.visitorStack) - } -} - -func (fi *FuncInfo) markBlocking(stack astPath) { - for _, n := range stack { - fi.Blocking[n] = true - fi.Flattened[n] = true - } -} - -func (fi *FuncInfo) markFlattened(stack astPath) { - for _, n := range stack { - fi.Flattened[n] = true - } -} diff --git a/compiler/analysis/info_test.go b/compiler/analysis/info_test.go deleted file mode 100644 index 723208255..000000000 --- a/compiler/analysis/info_test.go +++ /dev/null @@ -1,79 +0,0 @@ -package analysis - -import ( - "go/ast" - "go/token" - "go/types" - "testing" - - "github.com/gopherjs/gopherjs/internal/srctesting" -) - -// See: https://github.com/gopherjs/gopherjs/issues/955. -func TestBlockingFunctionLiteral(t *testing.T) { - src := ` -package test - -func blocking() { - c := make(chan bool) - <-c -} - -func indirectlyBlocking() { - func() { blocking() }() -} - -func directlyBlocking() { - func() { - c := make(chan bool) - <-c - }() -} - -func notBlocking() { - func() { println() } () -} -` - fset := token.NewFileSet() - file := srctesting.Parse(t, fset, src) - typesInfo, typesPkg := srctesting.Check(t, fset, file) - - pkgInfo := AnalyzePkg([]*ast.File{file}, fset, typesInfo, typesPkg, func(f *types.Func) bool { - panic("isBlocking() should be never called for imported functions in this test.") - }) - - assertBlocking(t, file, pkgInfo, "blocking") - assertBlocking(t, file, pkgInfo, "indirectlyBlocking") - assertBlocking(t, file, pkgInfo, "directlyBlocking") - assertNotBlocking(t, file, pkgInfo, "notBlocking") -} - -func assertBlocking(t *testing.T, file *ast.File, pkgInfo *Info, funcName string) { - typesFunc := getTypesFunc(t, file, pkgInfo, funcName) - if !pkgInfo.IsBlocking(typesFunc) { - t.Errorf("Got: %q is not blocking. Want: %q is blocking.", typesFunc, typesFunc) - } -} - -func assertNotBlocking(t *testing.T, file *ast.File, pkgInfo *Info, funcName string) { - typesFunc := getTypesFunc(t, file, pkgInfo, funcName) - if pkgInfo.IsBlocking(typesFunc) { - t.Errorf("Got: %q is blocking. Want: %q is not blocking.", typesFunc, typesFunc) - } -} - -func getTypesFunc(t *testing.T, file *ast.File, pkgInfo *Info, funcName string) *types.Func { - obj := file.Scope.Lookup(funcName) - if obj == nil { - t.Fatalf("Declaration of %q is not found in the AST.", funcName) - } - decl, ok := obj.Decl.(*ast.FuncDecl) - if !ok { - t.Fatalf("Got: %q is %v. Want: a function declaration.", funcName, obj.Kind) - } - blockingType, ok := pkgInfo.Defs[decl.Name] - if !ok { - t.Fatalf("No type information is found for %v.", decl.Name) - } - return blockingType.(*types.Func) -} diff --git a/compiler/astutil/astutil.go b/compiler/astutil/astutil.go index 5cfe2dbd3..9ff88a48c 100644 --- a/compiler/astutil/astutil.go +++ b/compiler/astutil/astutil.go @@ -35,7 +35,15 @@ func NewIdent(name string, t types.Type, info *types.Info, pkg *types.Package) * return ident } +// IsTypeExpr returns true if expr denotes a type. This can be used to +// distinguish between calls and type conversions. func IsTypeExpr(expr ast.Expr, info *types.Info) bool { + // Note that we could've used info.Types[expr].IsType() instead of doing our + // own analysis. However, that creates a problem because we synthesize some + // *ast.CallExpr nodes and, more importantly, *ast.Ident nodes that denote a + // type. Unfortunately, because the flag that controls + // types.TypeAndValue.IsType() return value is unexported we wouldn't be able + // to set it correctly. Thus, we can't rely on IsType(). switch e := expr.(type) { case *ast.ArrayType, *ast.ChanType, *ast.FuncType, *ast.InterfaceType, *ast.MapType, *ast.StructType: return true @@ -47,6 +55,20 @@ func IsTypeExpr(expr ast.Expr, info *types.Info) bool { case *ast.SelectorExpr: _, ok := info.Uses[e.Sel].(*types.TypeName) return ok + case *ast.IndexExpr: + ident, ok := e.X.(*ast.Ident) + if !ok { + return false + } + _, ok = info.Uses[ident].(*types.TypeName) + return ok + case *ast.IndexListExpr: + ident, ok := e.X.(*ast.Ident) + if !ok { + return false + } + _, ok = info.Uses[ident].(*types.TypeName) + return ok case *ast.ParenExpr: return IsTypeExpr(e.X, info) default: diff --git a/compiler/astutil/astutil_test.go b/compiler/astutil/astutil_test.go index 28528a2b3..56dabc510 100644 --- a/compiler/astutil/astutil_test.go +++ b/compiler/astutil/astutil_test.go @@ -2,7 +2,6 @@ package astutil import ( "go/ast" - "go/token" "strconv" "testing" @@ -44,8 +43,7 @@ func TestImportsUnsafe(t *testing.T) { for _, test := range tests { t.Run(test.desc, func(t *testing.T) { src := "package testpackage\n\n" + test.imports - fset := token.NewFileSet() - file := srctesting.Parse(t, fset, src) + file := srctesting.New(t).Parse("test.go", src) got := ImportsUnsafe(file) if got != test.want { t.Fatalf("ImportsUnsafe() returned %t, want %t", got, test.want) @@ -81,8 +79,7 @@ func TestImportName(t *testing.T) { for _, test := range tests { t.Run(test.desc, func(t *testing.T) { src := "package testpackage\n\n" + test.src - fset := token.NewFileSet() - file := srctesting.Parse(t, fset, src) + file := srctesting.New(t).Parse("test.go", src) if len(file.Imports) != 1 { t.Fatal(`expected one and only one import`) } @@ -399,8 +396,7 @@ func TestHasDirectiveOnFile(t *testing.T) { for _, test := range tests { t.Run(test.desc, func(t *testing.T) { const action = `do-stuff` - fset := token.NewFileSet() - file := srctesting.Parse(t, fset, test.src) + file := srctesting.New(t).Parse("test.go", test.src) if got := hasDirective(file, action); got != test.want { t.Errorf(`hasDirective(%T, %q) returned %t, want %t`, file, action, got, test.want) } diff --git a/compiler/compiler.go b/compiler/compiler.go index 0588a923c..e8264c946 100644 --- a/compiler/compiler.go +++ b/compiler/compiler.go @@ -17,6 +17,8 @@ import ( "strings" "time" + "github.com/gopherjs/gopherjs/compiler/internal/dce" + "github.com/gopherjs/gopherjs/compiler/linkname" "github.com/gopherjs/gopherjs/compiler/prelude" "golang.org/x/tools/go/gcexportdata" ) @@ -32,22 +34,6 @@ func init() { } } -type ErrorList []error - -func (err ErrorList) Error() string { - if len(err) == 0 { - return "" - } - return fmt.Sprintf("%s (and %d more errors)", err[0].Error(), len(err[1:])) -} - -func (err ErrorList) Normalize() error { - if len(err) == 0 { - return nil - } - return err -} - // Archive contains intermediate build outputs of a single package. // // This is a logical equivalent of an object file in traditional compilers. @@ -61,91 +47,26 @@ type Archive struct { // A list of full package import paths that the current package imports across // all source files. See go/types.Package.Imports(). Imports []string - // Serialized contents of go/types.Package in a binary format. This information - // is used by the compiler to type-check packages that import this one. See - // gcexportdata.Write(). - // - // TODO(nevkontakte): It would be more convenient to store go/types.Package - // itself and only serialize it when writing the archive onto disk. - ExportData []byte + // The package information is used by the compiler to type-check packages + // that import this one. See [gcexportdata.Write]. + Package *types.Package // Compiled package-level symbols. Declarations []*Decl // Concatenated contents of all raw .inc.js of the package. IncJSCode []byte - // JSON-serialized contents of go/token.FileSet. This is used to obtain source - // code locations for various symbols (e.g. for sourcemap generation). See - // token.FileSet.Write(). - // - // TODO(nevkontakte): This is also more convenient to store as the original - // object and only serialize before writing onto disk. - FileSet []byte + // The file set containing the source code locations for various symbols + // (e.g. for sourcemap generation). See [token.FileSet.Write]. + FileSet *token.FileSet // Whether or not the package was compiled with minification enabled. Minified bool // A list of go:linkname directives encountered in the package. - GoLinknames []GoLinkname - // Time when this archive was built. - BuildTime time.Time + GoLinknames []linkname.GoLinkname } func (a Archive) String() string { return fmt.Sprintf("compiler.Archive{%s}", a.ImportPath) } -// RegisterTypes adds package type information from the archive into the provided map. -func (a *Archive) RegisterTypes(packages map[string]*types.Package) error { - var err error - // TODO(nevkontakte): Should this be shared throughout the build? - fset := token.NewFileSet() - packages[a.ImportPath], err = gcexportdata.Read(bytes.NewReader(a.ExportData), fset, packages, a.ImportPath) - return err -} - -// Decl represents a package-level symbol (e.g. a function, variable or type). -// -// It contains code generated by the compiler for this specific symbol, which is -// grouped by the execution stage it belongs to in the JavaScript runtime. -type Decl struct { - // The package- or receiver-type-qualified name of function or method obj. - // See go/types.Func.FullName(). - FullName string - // A logical equivalent of a symbol name in an object file in the traditional - // Go compiler/linker toolchain. Used by GopherJS to support go:linkname - // directives. Must be set for decls that are supported by go:linkname - // implementation. - LinkingName SymName - // A list of package-level JavaScript variable names this symbol needs to declare. - Vars []string - // NamedRecvType is method named recv declare. - NamedRecvType string - // JavaScript code that declares basic information about a symbol. For a type - // it configures basic information about the type and its identity. For a function - // or method it contains its compiled body. - DeclCode []byte - // JavaScript code that initializes reflection metadata about type's method list. - MethodListCode []byte - // JavaScript code that initializes the rest of reflection metadata about a type - // (e.g. struct fields, array type sizes, element types, etc.). - TypeInitCode []byte - // JavaScript code that needs to be executed during the package init phase to - // set the symbol up (e.g. initialize package-level variable value). - InitCode []byte - // Symbol's identifier used by the dead-code elimination logic, not including - // package path. If empty, the symbol is assumed to be alive and will not be - // eliminated. For methods it is the same as its receiver type identifier. - DceObjectFilter string - // The second part of the identified used by dead-code elimination for methods. - // Empty for other types of symbols. - DceMethodFilter string - // List of fully qualified (including package path) DCE symbol identifiers the - // symbol depends on for dead code elimination purposes. - DceDeps []string - // Set to true if a function performs a blocking operation (I/O or - // synchronization). The compiler will have to generate function code such - // that it can be resumed after a blocking operation completes without - // blocking the main thread in the meantime. - Blocking bool -} - type Dependency struct { Pkg string Type string @@ -187,77 +108,31 @@ func ImportDependencies(archive *Archive, importPkg func(string) (*Archive, erro return deps, nil } -type dceInfo struct { - decl *Decl - objectFilter string - methodFilter string -} - func WriteProgramCode(pkgs []*Archive, w *SourceMapFilter, goVersion string) error { mainPkg := pkgs[len(pkgs)-1] minify := mainPkg.Minified // Aggregate all go:linkname directives in the program together. - gls := goLinknameSet{} + gls := linkname.GoLinknameSet{} for _, pkg := range pkgs { gls.Add(pkg.GoLinknames) } - byFilter := make(map[string][]*dceInfo) - var pendingDecls []*Decl // A queue of live decls to find other live decls. + sel := &dce.Selector[*Decl]{} for _, pkg := range pkgs { for _, d := range pkg.Declarations { - if d.DceObjectFilter == "" && d.DceMethodFilter == "" { - // This is an entry point (like main() or init() functions) or a variable - // initializer which has a side effect, consider it live. - pendingDecls = append(pendingDecls, d) - continue - } + implementsLink := false if gls.IsImplementation(d.LinkingName) { // If a decl is referenced by a go:linkname directive, we just assume // it's not dead. // TODO(nevkontakte): This is a safe, but imprecise assumption. We should // try and trace whether the referencing functions are actually live. - pendingDecls = append(pendingDecls, d) - } - info := &dceInfo{decl: d} - if d.DceObjectFilter != "" { - info.objectFilter = pkg.ImportPath + "." + d.DceObjectFilter - byFilter[info.objectFilter] = append(byFilter[info.objectFilter], info) - } - if d.DceMethodFilter != "" { - info.methodFilter = pkg.ImportPath + "." + d.DceMethodFilter - byFilter[info.methodFilter] = append(byFilter[info.methodFilter], info) - } - } - } - - dceSelection := make(map[*Decl]struct{}) // Known live decls. - for len(pendingDecls) != 0 { - d := pendingDecls[len(pendingDecls)-1] - pendingDecls = pendingDecls[:len(pendingDecls)-1] - - dceSelection[d] = struct{}{} // Mark the decl as live. - - // Consider all decls the current one is known to depend on and possible add - // them to the live queue. - for _, dep := range d.DceDeps { - if infos, ok := byFilter[dep]; ok { - delete(byFilter, dep) - for _, info := range infos { - if info.objectFilter == dep { - info.objectFilter = "" - } - if info.methodFilter == dep { - info.methodFilter = "" - } - if info.objectFilter == "" && info.methodFilter == "" { - pendingDecls = append(pendingDecls, info.decl) - } - } + implementsLink = true } + sel.Include(d, implementsLink) } } + dceSelection := sel.AliveDecls() if _, err := w.Write([]byte("\"use strict\";\n(function() {\n\n")); err != nil { return err @@ -290,12 +165,9 @@ func WriteProgramCode(pkgs []*Archive, w *SourceMapFilter, goVersion string) err return nil } -func WritePkgCode(pkg *Archive, dceSelection map[*Decl]struct{}, gls goLinknameSet, minify bool, w *SourceMapFilter) error { +func WritePkgCode(pkg *Archive, dceSelection map[*Decl]struct{}, gls linkname.GoLinknameSet, minify bool, w *SourceMapFilter) error { if w.MappingCallback != nil && pkg.FileSet != nil { - w.fileSet = token.NewFileSet() - if err := w.fileSet.Read(json.NewDecoder(bytes.NewReader(pkg.FileSet)).Decode); err != nil { - panic(err) - } + w.fileSet = pkg.FileSet } if _, err := w.Write(pkg.IncJSCode); err != nil { return err @@ -326,7 +198,7 @@ func WritePkgCode(pkg *Archive, dceSelection map[*Decl]struct{}, gls goLinknameS if recv, method, ok := d.LinkingName.IsMethod(); ok { code = fmt.Sprintf("\t$linknames[%q] = $unsafeMethodToFunction(%v,%q,%t);\n", d.LinkingName.String(), d.NamedRecvType, method, strings.HasPrefix(recv, "*")) } else { - code = fmt.Sprintf("\t$linknames[%q] = %s;\n", d.LinkingName.String(), d.Vars[0]) + code = fmt.Sprintf("\t$linknames[%q] = %s;\n", d.LinkingName.String(), d.RefExpr) } if _, err := w.Write(removeWhitespace([]byte(code), minify)); err != nil { return err @@ -357,7 +229,7 @@ func WritePkgCode(pkg *Archive, dceSelection map[*Decl]struct{}, gls goLinknameS if !found { continue // The symbol is not affected by a go:linkname directive. } - lines = append(lines, fmt.Sprintf("\t\t%s = $linknames[%q];\n", d.Vars[0], impl.String())) + lines = append(lines, fmt.Sprintf("\t\t%s = $linknames[%q];\n", d.RefExpr, impl.String())) } if len(lines) > 0 { code := fmt.Sprintf("\t$pkg.$initLinknames = function() {\n%s};\n", strings.Join(lines, "")) @@ -384,19 +256,98 @@ func WritePkgCode(pkg *Archive, dceSelection map[*Decl]struct{}, gls goLinknameS return nil } +type serializableArchive struct { + ImportPath string + Name string + Imports []string + ExportData []byte + Declarations []*Decl + IncJSCode []byte + FileSet []byte + Minified bool + GoLinknames []linkname.GoLinkname + BuildTime time.Time +} + // ReadArchive reads serialized compiled archive of the importPath package. -func ReadArchive(path string, r io.Reader) (*Archive, error) { +// +// The given srcModTime is used to determine if the archive is out-of-date. +// If the archive is out-of-date, the returned archive is nil. +// If there was not an error, the returned time is when the archive was built. +// +// The imports map is used to resolve package dependencies and may modify the +// map to include the package from the read archive. See [gcexportdata.Read]. +func ReadArchive(importPath string, r io.Reader, srcModTime time.Time, imports map[string]*types.Package) (*Archive, time.Time, error) { + var sa serializableArchive + if err := gob.NewDecoder(r).Decode(&sa); err != nil { + return nil, time.Time{}, err + } + + if srcModTime.After(sa.BuildTime) { + // Archive is out-of-date. + return nil, sa.BuildTime, nil + } + var a Archive - if err := gob.NewDecoder(r).Decode(&a); err != nil { - return nil, err + fset := token.NewFileSet() + if len(sa.ExportData) > 0 { + pkg, err := gcexportdata.Read(bytes.NewReader(sa.ExportData), fset, imports, importPath) + if err != nil { + return nil, sa.BuildTime, err + } + a.Package = pkg } - return &a, nil + if len(sa.FileSet) > 0 { + a.FileSet = token.NewFileSet() + if err := a.FileSet.Read(json.NewDecoder(bytes.NewReader(sa.FileSet)).Decode); err != nil { + return nil, sa.BuildTime, err + } + } + + a.ImportPath = sa.ImportPath + a.Name = sa.Name + a.Imports = sa.Imports + a.Declarations = sa.Declarations + a.IncJSCode = sa.IncJSCode + a.Minified = sa.Minified + a.GoLinknames = sa.GoLinknames + return &a, sa.BuildTime, nil } // WriteArchive writes compiled package archive on disk for later reuse. -func WriteArchive(a *Archive, w io.Writer) error { - return gob.NewEncoder(w).Encode(a) +// +// The passed in buildTime is used to determine if the archive is out-of-date. +// Typically it should be set to the srcModTime or time.Now() but it is exposed for testing purposes. +func WriteArchive(a *Archive, buildTime time.Time, w io.Writer) error { + exportData := new(bytes.Buffer) + if a.Package != nil { + if err := gcexportdata.Write(exportData, nil, a.Package); err != nil { + return fmt.Errorf("failed to write export data: %w", err) + } + } + + encodedFileSet := new(bytes.Buffer) + if a.FileSet != nil { + if err := a.FileSet.Write(json.NewEncoder(encodedFileSet).Encode); err != nil { + return err + } + } + + sa := serializableArchive{ + ImportPath: a.ImportPath, + Name: a.Name, + Imports: a.Imports, + ExportData: exportData.Bytes(), + Declarations: a.Declarations, + IncJSCode: a.IncJSCode, + FileSet: encodedFileSet.Bytes(), + Minified: a.Minified, + GoLinknames: a.GoLinknames, + BuildTime: buildTime, + } + + return gob.NewEncoder(w).Encode(sa) } type SourceMapFilter struct { diff --git a/compiler/compiler_test.go b/compiler/compiler_test.go index 377f09d94..88d8e525e 100644 --- a/compiler/compiler_test.go +++ b/compiler/compiler_test.go @@ -2,145 +2,1178 @@ package compiler import ( "bytes" - "go/ast" - "go/build" - "go/parser" - "go/token" "go/types" + "regexp" + "sort" + "strings" "testing" + "time" "github.com/google/go-cmp/cmp" - "golang.org/x/tools/go/loader" + "golang.org/x/tools/go/packages" + + "github.com/gopherjs/gopherjs/compiler/internal/dce" + "github.com/gopherjs/gopherjs/compiler/linkname" + "github.com/gopherjs/gopherjs/compiler/sources" + "github.com/gopherjs/gopherjs/internal/srctesting" ) func TestOrder(t *testing.T) { fileA := ` -package foo + package foo -var Avar = "a" + var Avar = "a" -type Atype struct{} + type Atype struct{} -func Afunc() int { - var varA = 1 - var varB = 2 - return varA+varB -} -` + func Afunc() int { + var varA = 1 + var varB = 2 + return varA+varB + }` fileB := ` -package foo + package foo + + var Bvar = "b" -var Bvar = "b" + type Btype struct{} -type Btype struct{} + func Bfunc() int { + var varA = 1 + var varB = 2 + return varA+varB + }` + + files := []srctesting.Source{ + {Name: "fileA.go", Contents: []byte(fileA)}, + {Name: "fileB.go", Contents: []byte(fileB)}, + } -func Bfunc() int { - var varA = 1 - var varB = 2 - return varA+varB + compareOrder(t, files, false) + compareOrder(t, files, true) } -` - files := []source{{"fileA.go", []byte(fileA)}, {"fileB.go", []byte(fileB)}} - compare(t, "foo", files, false) - compare(t, "foo", files, true) +func TestDeclSelection_KeepUnusedExportedMethods(t *testing.T) { + src := ` + package main + type Foo struct {} + func (f Foo) Bar() { + println("bar") + } + func (f Foo) Baz() { // unused + println("baz") + } + func main() { + Foo{}.Bar() + }` + + srcFiles := []srctesting.Source{{Name: `main.go`, Contents: []byte(src)}} + sel := declSelection(t, srcFiles, nil) + + sel.IsAlive(`type:command-line-arguments.Foo`) + sel.IsAlive(`func:command-line-arguments.Foo.Bar`) + sel.IsAlive(`func:command-line-arguments.Foo.Baz`) } -func compare(t *testing.T, path string, sourceFiles []source, minify bool) { - outputNormal, err := compile(path, sourceFiles, minify) - if err != nil { - t.Fatal(err) +func TestDeclSelection_RemoveUnusedUnexportedMethods(t *testing.T) { + src := ` + package main + type Foo struct {} + func (f Foo) Bar() { + println("bar") + } + func (f Foo) baz() { // unused + println("baz") + } + func main() { + Foo{}.Bar() + }` + + srcFiles := []srctesting.Source{{Name: `main.go`, Contents: []byte(src)}} + sel := declSelection(t, srcFiles, nil) + + sel.IsAlive(`type:command-line-arguments.Foo`) + sel.IsAlive(`func:command-line-arguments.Foo.Bar`) + + sel.IsDead(`func:command-line-arguments.Foo.baz`) +} + +func TestDeclSelection_KeepUnusedUnexportedMethodForInterface(t *testing.T) { + src := ` + package main + type Foo struct {} + func (f Foo) Bar() { + println("foo") + } + func (f Foo) baz() {} // unused + + type Foo2 struct {} + func (f Foo2) Bar() { + println("foo2") + } + + type IFoo interface { + Bar() + baz() + } + func main() { + fs := []any{ Foo{}, Foo2{} } + for _, f := range fs { + if i, ok := f.(IFoo); ok { + i.Bar() + } + } + }` + + srcFiles := []srctesting.Source{{Name: `main.go`, Contents: []byte(src)}} + sel := declSelection(t, srcFiles, nil) + + sel.IsAlive(`type:command-line-arguments.Foo`) + sel.IsAlive(`func:command-line-arguments.Foo.Bar`) + + // `baz` signature metadata is used to check a type assertion against IFoo, + // but the method itself is never called, so it can be removed. + // The method is kept in Foo's MethodList for type checking. + sel.IsDead(`func:command-line-arguments.Foo.baz`) +} + +func TestDeclSelection_KeepUnexportedMethodUsedViaInterfaceLit(t *testing.T) { + src := ` + package main + type Foo struct {} + func (f Foo) Bar() { + println("foo") + } + func (f Foo) baz() { + println("baz") + } + func main() { + var f interface { + Bar() + baz() + } = Foo{} + f.baz() + }` + + srcFiles := []srctesting.Source{{Name: `main.go`, Contents: []byte(src)}} + sel := declSelection(t, srcFiles, nil) + + sel.IsAlive(`type:command-line-arguments.Foo`) + sel.IsAlive(`func:command-line-arguments.Foo.Bar`) + sel.IsAlive(`func:command-line-arguments.Foo.baz`) +} + +func TestDeclSelection_KeepAliveUnexportedMethodsUsedInMethodExpressions(t *testing.T) { + src := ` + package main + type Foo struct {} + func (f Foo) baz() { + println("baz") + } + func main() { + fb := Foo.baz + fb(Foo{}) + }` + + srcFiles := []srctesting.Source{{Name: `main.go`, Contents: []byte(src)}} + sel := declSelection(t, srcFiles, nil) + + sel.IsAlive(`type:command-line-arguments.Foo`) + sel.IsAlive(`func:command-line-arguments.Foo.baz`) +} + +func TestDeclSelection_RemoveUnusedFuncInstance(t *testing.T) { + src := ` + package main + func Sum[T int | float64](values ...T) T { + var sum T + for _, v := range values { + sum += v + } + return sum + } + func Foo() { // unused + println(Sum(1, 2, 3)) + } + func main() { + println(Sum(1.1, 2.2, 3.3)) + }` + + srcFiles := []srctesting.Source{{Name: `main.go`, Contents: []byte(src)}} + sel := declSelection(t, srcFiles, nil) + + sel.IsAlive(`func:command-line-arguments.Sum`) + sel.IsAlive(`anonType:command-line-arguments.sliceType$1`) // []float64 + + sel.IsDead(`func:command-line-arguments.Foo`) + sel.IsDead(`anonType:command-line-arguments.sliceType`) // []int + sel.IsDead(`func:command-line-arguments.Sum`) +} + +func TestDeclSelection_RemoveUnusedStructTypeInstances(t *testing.T) { + src := ` + package main + type Foo[T any] struct { v T } + func (f Foo[T]) Bar() { + println(f.v) + } + + var _ = Foo[float64]{v: 3.14} // unused + + func main() { + Foo[int]{v: 7}.Bar() + }` + + srcFiles := []srctesting.Source{{Name: `main.go`, Contents: []byte(src)}} + sel := declSelection(t, srcFiles, nil) + + sel.IsAlive(`type:command-line-arguments.Foo`) + sel.IsAlive(`func:command-line-arguments.Foo.Bar`) + + sel.IsDead(`type:command-line-arguments.Foo`) + sel.IsDead(`func:command-line-arguments.Foo.Bar`) +} + +func TestDeclSelection_RemoveUnusedInterfaceTypeInstances(t *testing.T) { + src := ` + package main + type Foo[T any] interface { Bar(v T) } + + type Baz int + func (b Baz) Bar(v int) { + println(v + int(b)) + } + + var F64 = FooBar[float64] // unused + + func FooBar[T any](f Foo[T], v T) { + f.Bar(v) + } + + func main() { + FooBar[int](Baz(42), 12) // Baz implements Foo[int] + }` + + srcFiles := []srctesting.Source{{Name: `main.go`, Contents: []byte(src)}} + sel := declSelection(t, srcFiles, nil) + + sel.IsAlive(`type:command-line-arguments.Baz`) + sel.IsAlive(`func:command-line-arguments.Baz.Bar`) + sel.IsDead(`var:command-line-arguments.F64`) + + sel.IsAlive(`func:command-line-arguments.FooBar`) + // The Foo[int] instance is defined as a parameter in FooBar[int] that is alive. + // However, Foo[int] isn't used directly in the code so it can be removed. + // JS will simply duck-type the Baz object to Foo[int] without Foo[int] specifically defined. + sel.IsDead(`type:command-line-arguments.Foo`) + + sel.IsDead(`func:command-line-arguments.FooBar`) + sel.IsDead(`type:command-line-arguments.Foo`) +} + +func TestDeclSelection_RemoveUnusedMethodWithDifferentSignature(t *testing.T) { + src := ` + package main + type Foo struct{} + func (f Foo) Bar() { println("Foo") } + func (f Foo) baz(x int) { println(x) } // unused + + type Foo2 struct{} + func (f Foo2) Bar() { println("Foo2") } + func (f Foo2) baz(x string) { println(x) } + + func main() { + f1 := Foo{} + f1.Bar() + + f2 := Foo2{} + f2.Bar() + f2.baz("foo") + }` + + srcFiles := []srctesting.Source{{Name: `main.go`, Contents: []byte(src)}} + sel := declSelection(t, srcFiles, nil) + + sel.IsAlive(`type:command-line-arguments.Foo`) + sel.IsAlive(`func:command-line-arguments.Foo.Bar`) + sel.IsDead(`func:command-line-arguments.Foo.baz`) + + sel.IsAlive(`type:command-line-arguments.Foo2`) + sel.IsAlive(`func:command-line-arguments.Foo2.Bar`) + sel.IsAlive(`func:command-line-arguments.Foo2.baz`) +} + +func TestDeclSelection_RemoveUnusedUnexportedMethodInstance(t *testing.T) { + src := ` + package main + type Foo[T any] struct{} + func (f Foo[T]) Bar() { println("Foo") } + func (f Foo[T]) baz(x T) { Baz[T]{v: x}.Bar() } + + type Baz[T any] struct{ v T } + func (b Baz[T]) Bar() { println("Baz", b.v) } + + func main() { + f1 := Foo[int]{} + f1.Bar() + f1.baz(7) + + f2 := Foo[uint]{} // Foo[uint].baz is unused + f2.Bar() + }` + + srcFiles := []srctesting.Source{{Name: `main.go`, Contents: []byte(src)}} + sel := declSelection(t, srcFiles, nil) + + sel.IsAlive(`type:command-line-arguments.Foo`) + sel.IsAlive(`func:command-line-arguments.Foo.Bar`) + sel.IsAlive(`func:command-line-arguments.Foo.baz`) + sel.IsAlive(`type:command-line-arguments.Baz`) + sel.IsAlive(`func:command-line-arguments.Baz.Bar`) + + sel.IsAlive(`type:command-line-arguments.Foo`) + sel.IsAlive(`func:command-line-arguments.Foo.Bar`) + + // All three below are dead because Foo[uint].baz is unused. + sel.IsDead(`func:command-line-arguments.Foo.baz`) + sel.IsDead(`type:command-line-arguments.Baz`) + sel.IsDead(`func:command-line-arguments.Baz.Bar`) +} + +func TestDeclSelection_RemoveUnusedTypeConstraint(t *testing.T) { + src := ` + package main + type Foo interface{ int | string } + + type Bar[T Foo] struct{ v T } + func (b Bar[T]) Baz() { println(b.v) } + + var ghost = Bar[int]{v: 7} // unused + + func main() { + println("do nothing") + }` + + srcFiles := []srctesting.Source{{Name: `main.go`, Contents: []byte(src)}} + sel := declSelection(t, srcFiles, nil) + + sel.IsDead(`type:command-line-arguments.Foo`) + sel.IsDead(`type:command-line-arguments.Bar`) + sel.IsDead(`func:command-line-arguments.Bar.Baz`) + sel.IsDead(`var:command-line-arguments.ghost`) +} + +func TestLengthParenthesizingIssue841(t *testing.T) { + // See issue https://github.com/gopherjs/gopherjs/issues/841 + // + // Summary: Given `len(a+b)` where a and b are strings being concatenated + // together, the result was `a + b.length` instead of `(a+b).length`. + // + // The fix was to check if the expression in `len` is a binary + // expression or not. If it is, then the expression is parenthesized. + // This will work for concatenations any combination of variables and + // literals but won't pick up `len(Foo(a+b))` or `len(a[0:i+3])`. + + src := ` + package main + + func main() { + a := "a" + b := "b" + ab := a + b + if len(a+b) != len(ab) { + panic("unreachable") + } + }` + + srcFiles := []srctesting.Source{{Name: `main.go`, Contents: []byte(src)}} + root := srctesting.ParseSources(t, srcFiles, nil) + archives := compileProject(t, root, false) + mainPkg := archives[root.PkgPath] + + badRegex := regexp.MustCompile(`a\s*\+\s*b\.length`) + goodRegex := regexp.MustCompile(`\(a\s*\+\s*b\)\.length`) + goodFound := false + for i, decl := range mainPkg.Declarations { + if badRegex.Match(decl.DeclCode) { + t.Errorf("found length issue in decl #%d: %s", i, decl.FullName) + t.Logf("decl code:\n%s", string(decl.DeclCode)) + } + if goodRegex.Match(decl.DeclCode) { + goodFound = true + } + } + if !goodFound { + t.Error("parenthesized length not found") + } +} + +func TestDeclNaming_Import(t *testing.T) { + src1 := ` + package main + + import ( + newt "github.com/gopherjs/gopherjs/compiler/jorden" + "github.com/gopherjs/gopherjs/compiler/burke" + "github.com/gopherjs/gopherjs/compiler/hudson" + ) + + func main() { + newt.Quote() + burke.Quote() + hudson.Quote() + }` + src2 := `package jorden + func Quote() { println("They mostly come at night... mostly") }` + src3 := `package burke + func Quote() { println("Busy little creatures, huh?") }` + src4 := `package hudson + func Quote() { println("Game over, man! Game over!") }` + + root := srctesting.ParseSources(t, + []srctesting.Source{ + {Name: `main.go`, Contents: []byte(src1)}, + }, + []srctesting.Source{ + {Name: `jorden/rebecca.go`, Contents: []byte(src2)}, + {Name: `burke/carter.go`, Contents: []byte(src3)}, + {Name: `hudson/william.go`, Contents: []byte(src4)}, + }) + + archives := compileProject(t, root, false) + checkForDeclFullNames(t, archives, + `import:github.com/gopherjs/gopherjs/compiler/burke`, + `import:github.com/gopherjs/gopherjs/compiler/hudson`, + `import:github.com/gopherjs/gopherjs/compiler/jorden`, + ) +} + +func TestDeclNaming_FuncAndFuncVar(t *testing.T) { + src := ` + package main + + func Avasarala(value int) { println("Chrisjen", value) } + + func Draper[T any](value T) { println("Bobbie", value) } + + type Nagata struct{ value int } + func (n Nagata) Print() { println("Naomi", n.value) } + + type Burton[T any] struct{ value T } + func (b Burton[T]) Print() { println("Amos", b.value) } + + func main() { + Avasarala(10) + Draper(11) + Draper("Babs") + Nagata{value: 12}.Print() + Burton[int]{value: 13}.Print() + Burton[string]{value: "Timothy"}.Print() + }` + + srcFiles := []srctesting.Source{{Name: `main.go`, Contents: []byte(src)}} + root := srctesting.ParseSources(t, srcFiles, nil) + archives := compileProject(t, root, false) + checkForDeclFullNames(t, archives, + `funcVar:command-line-arguments.Avasarala`, + `func:command-line-arguments.Avasarala`, + + `funcVar:command-line-arguments.Draper`, + `func:command-line-arguments.Draper`, + `func:command-line-arguments.Draper`, + + `func:command-line-arguments.Nagata.Print`, + + `typeVar:command-line-arguments.Burton`, + `type:command-line-arguments.Burton`, + `type:command-line-arguments.Burton`, + `func:command-line-arguments.Burton.Print`, + `func:command-line-arguments.Burton.Print`, + + `funcVar:command-line-arguments.main`, + `func:command-line-arguments.main`, + `init:main`, + ) +} + +func TestDeclNaming_InitsAndVars(t *testing.T) { + src1 := ` + package main + + import ( + _ "github.com/gopherjs/gopherjs/compiler/spengler" + _ "github.com/gopherjs/gopherjs/compiler/barrett" + _ "github.com/gopherjs/gopherjs/compiler/tully" + ) + + var peck = "Walter" + func init() { println(peck) } + + func main() { + println("Janosz Poha") + }` + src2 := `package spengler + func init() { println("Egon") } + var egie = func() { println("Dirt Farmer") } + func init() { egie() }` + src3 := `package barrett + func init() { println("Dana") }` + src4 := `package barrett + func init() { println("Zuul") }` + src5 := `package barrett + func init() { println("Gatekeeper") }` + src6 := `package tully + func init() { println("Louis") }` + src7 := `package tully + var keymaster = "Vinz Clortho" + func init() { println(keymaster) }` + + root := srctesting.ParseSources(t, + []srctesting.Source{ + {Name: `main.go`, Contents: []byte(src1)}, + }, + []srctesting.Source{ + {Name: `spengler/a.go`, Contents: []byte(src2)}, + {Name: `barrett/a.go`, Contents: []byte(src3)}, + {Name: `barrett/b.go`, Contents: []byte(src4)}, + {Name: `barrett/c.go`, Contents: []byte(src5)}, + {Name: `tully/a.go`, Contents: []byte(src6)}, + {Name: `tully/b.go`, Contents: []byte(src7)}, + }) + + archives := compileProject(t, root, false) + checkForDeclFullNames(t, archives, + // tully + `var:github.com/gopherjs/gopherjs/compiler/tully.keymaster`, + `funcVar:github.com/gopherjs/gopherjs/compiler/tully.init`, + `funcVar:github.com/gopherjs/gopherjs/compiler/tully.init`, + `func:github.com/gopherjs/gopherjs/compiler/tully.init`, + `func:github.com/gopherjs/gopherjs/compiler/tully.init`, + + // spangler + `var:github.com/gopherjs/gopherjs/compiler/spengler.egie`, + `funcVar:github.com/gopherjs/gopherjs/compiler/spengler.init`, + `funcVar:github.com/gopherjs/gopherjs/compiler/spengler.init`, + `func:github.com/gopherjs/gopherjs/compiler/spengler.init`, + `func:github.com/gopherjs/gopherjs/compiler/spengler.init`, + + // barrett + `funcVar:github.com/gopherjs/gopherjs/compiler/barrett.init`, + `funcVar:github.com/gopherjs/gopherjs/compiler/barrett.init`, + `funcVar:github.com/gopherjs/gopherjs/compiler/barrett.init`, + `func:github.com/gopherjs/gopherjs/compiler/barrett.init`, + `func:github.com/gopherjs/gopherjs/compiler/barrett.init`, + `func:github.com/gopherjs/gopherjs/compiler/barrett.init`, + + // main + `var:command-line-arguments.peck`, + `funcVar:command-line-arguments.init`, + `func:command-line-arguments.init`, + `funcVar:command-line-arguments.main`, + `func:command-line-arguments.main`, + `init:main`, + ) +} + +func TestDeclNaming_VarsAndTypes(t *testing.T) { + src := ` + package main + + var _, shawn, _ = func() (int, string, float64) { + return 1, "Vizzini", 3.14 + }() + + var _ = func() string { + return "Inigo Montoya" + }() + + var fezzik = struct{ value int }{value: 7} + var inigo = struct{ value string }{value: "Montoya"} + + type westley struct{ value string } + + func main() {}` + + srcFiles := []srctesting.Source{{Name: `main.go`, Contents: []byte(src)}} + root := srctesting.ParseSources(t, srcFiles, nil) + + archives := compileProject(t, root, false) + checkForDeclFullNames(t, archives, + `var:command-line-arguments.shawn`, + `var:blank`, + + `var:command-line-arguments.fezzik`, + `anonType:command-line-arguments.structType`, + + `var:command-line-arguments.inigo`, + `anonType:command-line-arguments.structType$1`, + + `typeVar:command-line-arguments.westley`, + `type:command-line-arguments.westley`, + ) +} + +func Test_CrossPackageAnalysis(t *testing.T) { + src1 := ` + package main + import "github.com/gopherjs/gopherjs/compiler/stable" + + func main() { + m := map[string]int{ + "one": 1, + "two": 2, + "three": 3, + } + stable.Print(m) + }` + src2 := ` + package collections + import "github.com/gopherjs/gopherjs/compiler/cmp" + + func Keys[K cmp.Ordered, V any, M ~map[K]V](m M) []K { + keys := make([]K, 0, len(m)) + for k := range m { + keys = append(keys, k) + } + return keys + }` + src3 := ` + package collections + import "github.com/gopherjs/gopherjs/compiler/cmp" + + func Values[K cmp.Ordered, V any, M ~map[K]V](m M) []V { + values := make([]V, 0, len(m)) + for _, v := range m { + values = append(values, v) + } + return values + }` + src4 := ` + package sorts + import "github.com/gopherjs/gopherjs/compiler/cmp" + + func Pair[K cmp.Ordered, V any, SK ~[]K, SV ~[]V](k SK, v SV) { + Bubble(len(k), + func(i, j int) bool { return k[i] < k[j] }, + func(i, j int) { k[i], v[i], k[j], v[j] = k[j], v[j], k[i], v[i] }) + } + + func Bubble(length int, less func(i, j int) bool, swap func(i, j int)) { + for i := 0; i < length; i++ { + for j := i + 1; j < length; j++ { + if less(j, i) { + swap(i, j) + } + } + } + }` + src5 := ` + package stable + import ( + "github.com/gopherjs/gopherjs/compiler/collections" + "github.com/gopherjs/gopherjs/compiler/sorts" + "github.com/gopherjs/gopherjs/compiler/cmp" + ) + + func Print[K cmp.Ordered, V any, M ~map[K]V](m M) { + keys := collections.Keys(m) + values := collections.Values(m) + sorts.Pair(keys, values) + for i, k := range keys { + println(i, k, values[i]) + } + }` + src6 := ` + package cmp + type Ordered interface { ~int | ~uint | ~float64 | ~string }` + + root := srctesting.ParseSources(t, + []srctesting.Source{ + {Name: `main.go`, Contents: []byte(src1)}, + }, + []srctesting.Source{ + {Name: `collections/keys.go`, Contents: []byte(src2)}, + {Name: `collections/values.go`, Contents: []byte(src3)}, + {Name: `sorts/sorts.go`, Contents: []byte(src4)}, + {Name: `stable/print.go`, Contents: []byte(src5)}, + {Name: `cmp/ordered.go`, Contents: []byte(src6)}, + }) + + archives := compileProject(t, root, false) + checkForDeclFullNames(t, archives, + // collections + `funcVar:github.com/gopherjs/gopherjs/compiler/collections.Values`, + `func:github.com/gopherjs/gopherjs/compiler/collections.Values`, + `funcVar:github.com/gopherjs/gopherjs/compiler/collections.Keys`, + `func:github.com/gopherjs/gopherjs/compiler/collections.Keys`, + + // sorts + `funcVar:github.com/gopherjs/gopherjs/compiler/sorts.Pair`, + `func:github.com/gopherjs/gopherjs/compiler/sorts.Pair`, + `funcVar:github.com/gopherjs/gopherjs/compiler/sorts.Bubble`, + `func:github.com/gopherjs/gopherjs/compiler/sorts.Bubble`, + + // stable + `funcVar:github.com/gopherjs/gopherjs/compiler/stable.Print`, + `func:github.com/gopherjs/gopherjs/compiler/stable.Print`, + + // main + `init:main`, + ) +} + +func TestArchiveSelectionAfterSerialization(t *testing.T) { + src := ` + package main + type Foo interface{ int | string } + + type Bar[T Foo] struct{ v T } + func (b Bar[T]) Baz() { println(b.v) } + + var ghost = Bar[int]{v: 7} // unused + + func main() { + println("do nothing") + }` + srcFiles := []srctesting.Source{{Name: `main.go`, Contents: []byte(src)}} + root := srctesting.ParseSources(t, srcFiles, nil) + rootPath := root.PkgPath + origArchives := compileProject(t, root, false) + readArchives := reloadCompiledProject(t, origArchives, rootPath) + + origJS := renderPackage(t, origArchives[rootPath], false) + readJS := renderPackage(t, readArchives[rootPath], false) + + if diff := cmp.Diff(origJS, readJS); diff != "" { + t.Errorf("the reloaded files produce different JS:\n%s", diff) } +} + +func TestNestedConcreteTypeInGenericFunc(t *testing.T) { + // This is a test of a type defined inside a generic function + // that uses the type parameter of the function as a field type. + // The `T` type is unique for each instance of `F`. + // The use of `A` as a field is do demonstrate the difference in the types + // however even if T had no fields, the type would still be different. + // + // Change `print(F[?]())` to `fmt.Printf("%T\n", F[?]())` for + // golang playground to print the type of T in the different F instances. + // (I just didn't want this test to depend on `fmt` when it doesn't need to.) + + src := ` + package main + func F[A any]() any { + type T struct{ + a A + } + return T{} + } + func main() { + type Int int + print(F[int]()) + print(F[Int]()) + } + ` + + srcFiles := []srctesting.Source{{Name: `main.go`, Contents: []byte(src)}} + root := srctesting.ParseSources(t, srcFiles, nil) + archives := compileProject(t, root, false) + mainPkg := archives[root.PkgPath] + insts := collectDeclInstances(t, mainPkg) + + exp := []string{ + `F[int]`, + `F[main.Int]`, // Go prints `F[main.Int·2]` + `T[int;]`, // `T` from `F[int]` (Go prints `T[int]`) + `T[main.Int;]`, // `T` from `F[main.Int]` (Go prints `T[main.Int·2]`) + } + if diff := cmp.Diff(exp, insts); len(diff) > 0 { + t.Errorf("the instances of generics are different:\n%s", diff) + } +} + +func TestNestedGenericTypeInGenericFunc(t *testing.T) { + // This is a subset of the type param nested test from the go repo. + // See https://github.com/golang/go/blob/go1.19.13/test/typeparam/nested.go + // The test is failing because nested types aren't being typed differently. + // For example the type of `T[int]` below is different based on `F[X]` + // instance for different `X` type parameters, hence Go prints the type as + // `T[X;int]` instead of `T[int]`. + + src := ` + package main + func F[A any]() any { + type T[B any] struct{ + a A + b B + } + return T[int]{} + } + func main() { + type Int int + print(F[int]()) + print(F[Int]()) + } + ` + + srcFiles := []srctesting.Source{{Name: `main.go`, Contents: []byte(src)}} + root := srctesting.ParseSources(t, srcFiles, nil) + archives := compileProject(t, root, false) + mainPkg := archives[root.PkgPath] + insts := collectDeclInstances(t, mainPkg) + + exp := []string{ + `F[int]`, + `F[main.Int]`, + `T[int; int]`, + `T[main.Int; int]`, + } + if diff := cmp.Diff(exp, insts); len(diff) > 0 { + t.Errorf("the instances of generics are different:\n%s", diff) + } +} + +func TestNestedGenericTypeInGenericFuncWithSharedTArgs(t *testing.T) { + src := ` + package main + func F[A any]() any { + type T[B any] struct { + b B + } + return T[A]{} + } + func main() { + type Int int + print(F[int]()) + print(F[Int]()) + }` + + srcFiles := []srctesting.Source{{Name: `main.go`, Contents: []byte(src)}} + root := srctesting.ParseSources(t, srcFiles, nil) + archives := compileProject(t, root, false) + mainPkg := archives[root.PkgPath] + insts := collectDeclInstances(t, mainPkg) + + exp := []string{ + `F[int]`, + `F[main.Int]`, + `T[int; int]`, + `T[main.Int; main.Int]`, + // Make sure that T[int;main.Int] and T[main.Int;int] aren't created. + } + if diff := cmp.Diff(exp, insts); len(diff) > 0 { + t.Errorf("the instances of generics are different:\n%s", diff) + } +} + +func collectDeclInstances(t *testing.T, pkg *Archive) []string { + t.Helper() + + // Regex to match strings like `Foo[42 /* bar */] =` and capture + // the name (`Foo`), the index (`42`), and the instance type (`bar`). + rex := regexp.MustCompile(`^\s*(\w+)\s*\[\s*(\d+)\s*\/\*(.+)\*\/\s*\]\s*\=`) + + // Collect all instances of generics (e.g. `Foo[bar] @ 2`) written to the decl code. + insts := []string{} + for _, decl := range pkg.Declarations { + if match := rex.FindAllStringSubmatch(string(decl.DeclCode), 1); len(match) > 0 { + instance := match[0][1] + `[` + strings.TrimSpace(match[0][3]) + `]` + instance = strings.ReplaceAll(instance, `command-line-arguments`, pkg.Name) + insts = append(insts, instance) + } + } + sort.Strings(insts) + return insts +} + +func compareOrder(t *testing.T, sourceFiles []srctesting.Source, minify bool) { + t.Helper() + outputNormal := compile(t, sourceFiles, minify) // reverse the array for i, j := 0, len(sourceFiles)-1; i < j; i, j = i+1, j-1 { sourceFiles[i], sourceFiles[j] = sourceFiles[j], sourceFiles[i] } - outputReversed, err := compile(path, sourceFiles, minify) - if err != nil { - t.Fatal(err) - } + outputReversed := compile(t, sourceFiles, minify) - if diff := cmp.Diff(string(outputNormal), string(outputReversed)); diff != "" { + if diff := cmp.Diff(outputNormal, outputReversed); diff != "" { t.Errorf("files in different order produce different JS:\n%s", diff) } } -type source struct { - name string - contents []byte +func compile(t *testing.T, sourceFiles []srctesting.Source, minify bool) string { + t.Helper() + rootPkg := srctesting.ParseSources(t, sourceFiles, nil) + archives := compileProject(t, rootPkg, minify) + + path := rootPkg.PkgPath + a, ok := archives[path] + if !ok { + t.Fatalf(`root package not found in archives: %s`, path) + } + + return renderPackage(t, a, minify) } -func compile(path string, sourceFiles []source, minify bool) ([]byte, error) { - conf := loader.Config{} - conf.Fset = token.NewFileSet() - conf.ParserMode = parser.ParseComments +// compileProject compiles the given root package and all packages imported by the root. +// This returns the compiled archives of all packages keyed by their import path. +func compileProject(t *testing.T, root *packages.Package, minify bool) map[string]*Archive { + t.Helper() + pkgMap := map[string]*packages.Package{} + packages.Visit([]*packages.Package{root}, nil, func(pkg *packages.Package) { + pkgMap[pkg.PkgPath] = pkg + }) + + allSrcs := map[string]*sources.Sources{} + for _, pkg := range pkgMap { + srcs := &sources.Sources{ + ImportPath: pkg.PkgPath, + Dir: ``, + Files: pkg.Syntax, + FileSet: pkg.Fset, + } + allSrcs[pkg.PkgPath] = srcs + } - context := build.Default // make a copy of build.Default - conf.Build = &context - conf.Build.BuildTags = []string{"js"} + importer := func(path, srcDir string) (*sources.Sources, error) { + srcs, ok := allSrcs[path] + if !ok { + t.Fatal(`package not found:`, path) + return nil, nil + } + return srcs, nil + } + + tContext := types.NewContext() + sortedSources := make([]*sources.Sources, 0, len(allSrcs)) + for _, srcs := range allSrcs { + sortedSources = append(sortedSources, srcs) + } + sources.SortedSourcesSlice(sortedSources) + PrepareAllSources(sortedSources, importer, tContext) - var astFiles []*ast.File - for _, sourceFile := range sourceFiles { - astFile, err := parser.ParseFile(conf.Fset, sourceFile.name, sourceFile.contents, parser.ParseComments) + archives := map[string]*Archive{} + for _, srcs := range allSrcs { + a, err := Compile(srcs, tContext, minify) if err != nil { - return nil, err + t.Fatal(`failed to compile:`, err) } - astFiles = append(astFiles, astFile) + archives[srcs.ImportPath] = a } - conf.CreateFromFiles(path, astFiles...) - prog, err := conf.Load() - if err != nil { - return nil, err + return archives +} + +// newTime creates an arbitrary time.Time offset by the given number of seconds. +// This is useful for quickly creating times that are before or after another. +func newTime(seconds float64) time.Time { + return time.Date(1969, 7, 20, 20, 17, 0, 0, time.UTC). + Add(time.Duration(seconds * float64(time.Second))) +} + +// reloadCompiledProject persists the given archives into memory then reloads +// them from memory to simulate a cache reload of a precompiled project. +func reloadCompiledProject(t *testing.T, archives map[string]*Archive, rootPkgPath string) map[string]*Archive { + t.Helper() + + // TODO(grantnelson-wf): The tests using this function are out-of-date + // since they are testing the old archive caching that has been disabled. + // At some point, these tests should be updated to test any new caching + // mechanism that is implemented or removed. As is this function is faking + // the old recursive archive loading that is no longer used since it + // doesn't allow cross package analysis for generings. + + buildTime := newTime(5.0) + serialized := map[string][]byte{} + for path, a := range archives { + buf := &bytes.Buffer{} + if err := WriteArchive(a, buildTime, buf); err != nil { + t.Fatalf(`failed to write archive for %s: %v`, path, err) + } + serialized[path] = buf.Bytes() } - archiveCache := map[string]*Archive{} + srcModTime := newTime(0.0) + reloadCache := map[string]*Archive{} + type ImportContext struct { + Packages map[string]*types.Package + ImportArchive func(path string) (*Archive, error) + } var importContext *ImportContext importContext = &ImportContext{ - Packages: make(map[string]*types.Package), - Import: func(path string) (*Archive, error) { + Packages: map[string]*types.Package{}, + ImportArchive: func(path string) (*Archive, error) { // find in local cache - if a, ok := archiveCache[path]; ok { + if a, ok := reloadCache[path]; ok { return a, nil } - pi := prog.Package(path) - importContext.Packages[path] = pi.Pkg - - // compile package - a, err := Compile(path, pi.Files, prog.Fset, importContext, minify) + // deserialize archive + buf, ok := serialized[path] + if !ok { + t.Fatalf(`archive not found for %s`, path) + } + a, _, err := ReadArchive(path, bytes.NewReader(buf), srcModTime, importContext.Packages) if err != nil { - return nil, err + t.Fatalf(`failed to read archive for %s: %v`, path, err) } - archiveCache[path] = a + reloadCache[path] = a return a, nil }, } - a, err := importContext.Import(path) - if err != nil { - return nil, err - } - b, err := renderPackage(a) + _, err := importContext.ImportArchive(rootPkgPath) if err != nil { - return nil, err + t.Fatal(`failed to reload archives:`, err) } - return b, nil + return reloadCache } -func renderPackage(archive *Archive) ([]byte, error) { - selection := make(map[*Decl]struct{}) +func renderPackage(t *testing.T, archive *Archive, minify bool) string { + t.Helper() + + sel := &dce.Selector[*Decl]{} for _, d := range archive.Declarations { - selection[d] = struct{}{} + sel.Include(d, false) } + selection := sel.AliveDecls() buf := &bytes.Buffer{} - if err := WritePkgCode(archive, selection, goLinknameSet{}, false, &SourceMapFilter{Writer: buf}); err != nil { - return nil, err + if err := WritePkgCode(archive, selection, linkname.GoLinknameSet{}, minify, &SourceMapFilter{Writer: buf}); err != nil { + t.Fatal(err) + } + + b := buf.String() + if len(b) == 0 { + t.Fatal(`render package had no output`) + } + return b +} + +type selectionTester struct { + t *testing.T + mainPkg *Archive + archives map[string]*Archive + packages []*Archive + dceSelection map[*Decl]struct{} +} + +func declSelection(t *testing.T, sourceFiles []srctesting.Source, auxFiles []srctesting.Source) *selectionTester { + t.Helper() + root := srctesting.ParseSources(t, sourceFiles, auxFiles) + archives := compileProject(t, root, false) + mainPkg := archives[root.PkgPath] + + paths := make([]string, 0, len(archives)) + for path := range archives { + paths = append(paths, path) + } + sort.Strings(paths) + packages := make([]*Archive, 0, len(archives)) + for _, path := range paths { + packages = append(packages, archives[path]) + } + + sel := &dce.Selector[*Decl]{} + for _, pkg := range packages { + for _, d := range pkg.Declarations { + sel.Include(d, false) + } + } + dceSelection := sel.AliveDecls() + + return &selectionTester{ + t: t, + mainPkg: mainPkg, + archives: archives, + packages: packages, + dceSelection: dceSelection, } +} + +func (st *selectionTester) PrintDeclStatus() { + st.t.Helper() + for _, pkg := range st.packages { + st.t.Logf(`Package %s`, pkg.ImportPath) + for _, decl := range pkg.Declarations { + if _, ok := st.dceSelection[decl]; ok { + st.t.Logf(` [Alive] %q`, decl.FullName) + } else { + st.t.Logf(` [Dead] %q`, decl.FullName) + } + } + } +} + +func (st *selectionTester) IsAlive(declFullName string) { + st.t.Helper() + decl := st.FindDecl(declFullName) + if _, ok := st.dceSelection[decl]; !ok { + st.t.Error(`expected the decl to be alive:`, declFullName) + } +} + +func (st *selectionTester) IsDead(declFullName string) { + st.t.Helper() + decl := st.FindDecl(declFullName) + if _, ok := st.dceSelection[decl]; ok { + st.t.Error(`expected the decl to be dead:`, declFullName) + } +} - return buf.Bytes(), nil +func (st *selectionTester) FindDecl(declFullName string) *Decl { + st.t.Helper() + var found *Decl + for _, pkg := range st.packages { + for _, d := range pkg.Declarations { + if d.FullName == declFullName { + if found != nil { + st.t.Fatal(`multiple decls found with the name`, declFullName) + } + found = d + } + } + } + if found == nil { + st.t.Fatal(`no decl found by the name`, declFullName) + } + return found +} + +func checkForDeclFullNames(t *testing.T, archives map[string]*Archive, expectedFullNames ...string) { + t.Helper() + + expected := map[string]int{} + counts := map[string]int{} + for _, name := range expectedFullNames { + expected[name]++ + counts[name]++ + } + for _, pkg := range archives { + for _, decl := range pkg.Declarations { + if found, has := expected[decl.FullName]; has { + if found <= 0 { + t.Errorf(`decl name existed more than %d time(s): %q`, counts[decl.FullName], decl.FullName) + } else { + expected[decl.FullName]-- + } + } + } + } + for imp, found := range expected { + if found > 0 { + t.Errorf(`missing %d decl name(s): %q`, found, imp) + } + } + if t.Failed() { + t.Log("Declarations:") + for pkgName, pkg := range archives { + t.Logf("\t%q", pkgName) + for i, decl := range pkg.Declarations { + t.Logf("\t\t%d:\t%q", i, decl.FullName) + } + } + } } diff --git a/compiler/declNames.go b/compiler/declNames.go new file mode 100644 index 000000000..4ba59e289 --- /dev/null +++ b/compiler/declNames.go @@ -0,0 +1,70 @@ +package compiler + +import ( + "go/types" + + "github.com/gopherjs/gopherjs/compiler/internal/symbol" + "github.com/gopherjs/gopherjs/compiler/internal/typeparams" +) + +// importDeclFullName returns a unique name for an import declaration. +// This import name may be duplicated in different packages if they both +// import the same package, they are only unique per package. +func importDeclFullName(importedPkg *types.Package) string { + return `import:` + importedPkg.Path() +} + +// varDeclFullName returns a name for a package-level variable declaration. +// This var name only references the first named variable in an assignment. +// If no variables are named, the name is `var:blank` and not unique. +func varDeclFullName(init *types.Initializer) string { + for _, lhs := range init.Lhs { + if lhs.Name() != `_` { + return `var:` + symbol.New(lhs).String() + } + } + return `var:blank` +} + +// funcVarDeclFullName returns a name for a package-level variable +// that is used for a function (without a receiver) declaration. +// The name is unique unless the function is an `init` function. +// If the function is generic, this declaration name is also for the list +// of instantiations of the function. +func funcVarDeclFullName(o *types.Func) string { + return `funcVar:` + symbol.New(o).String() +} + +// mainFuncFullName returns the name for the declaration used to invoke the +// main function of the program. There should only be one decl with this name. +func mainFuncDeclFullName() string { + return `init:main` +} + +// funcDeclFullName returns a name for a package-level function +// declaration for the given instance of a function. +// The name is unique unless the function is an `init` function. +func funcDeclFullName(inst typeparams.Instance) string { + return `func:` + inst.String() +} + +// typeVarDeclFullName returns a unique name for a package-level variable +// that is used for a named type declaration. +// If the type is generic, this declaration name is also for the list +// of instantiations of the type. +func typeVarDeclFullName(o *types.TypeName) string { + return `typeVar:` + symbol.New(o).String() +} + +// typeDeclFullName returns a unique name for a package-level type declaration +// for the given instance of a type. Names are only unique per package. +func typeDeclFullName(inst typeparams.Instance) string { + return `type:` + inst.String() +} + +// anonTypeDeclFullName returns a unique name for a package-level type +// declaration for an anonymous type. Names are only unique per package. +// These names are generated for types that are not named in the source code. +func anonTypeDeclFullName(o types.Object) string { + return `anonType:` + symbol.New(o).String() +} diff --git a/compiler/decls.go b/compiler/decls.go new file mode 100644 index 000000000..eb95cd2f7 --- /dev/null +++ b/compiler/decls.go @@ -0,0 +1,614 @@ +package compiler + +// decls.go contains logic responsible for compiling top-level declarations, +// such as imports, types, functions, etc. + +import ( + "fmt" + "go/ast" + "go/constant" + "go/token" + "go/types" + "sort" + "strings" + + "github.com/gopherjs/gopherjs/compiler/internal/analysis" + "github.com/gopherjs/gopherjs/compiler/internal/dce" + "github.com/gopherjs/gopherjs/compiler/internal/symbol" + "github.com/gopherjs/gopherjs/compiler/internal/typeparams" + "github.com/gopherjs/gopherjs/compiler/sources" + "github.com/gopherjs/gopherjs/compiler/typesutil" +) + +// Decl represents a package-level symbol (e.g. a function, variable or type). +// +// It contains code generated by the compiler for this specific symbol, which is +// grouped by the execution stage it belongs to in the JavaScript runtime. +// +// When adding new fields to this struct, make sure the field is exported +// so that it Gob serializes correctly for the archive cache. +type Decl struct { + // The package- or receiver-type-qualified name of function or method obj. + // See go/types.Func.FullName(). + FullName string + // A logical equivalent of a symbol name in an object file in the traditional + // Go compiler/linker toolchain. Used by GopherJS to support go:linkname + // directives. Must be set for decls that are supported by go:linkname + // implementation. + LinkingName symbol.Name + // A list of package-level JavaScript variable names this symbol needs to declare. + Vars []string + // A JS expression by which the object represented by this decl may be + // referenced within the package context. Empty if the decl represents no such + // object. + RefExpr string + // NamedRecvType is method named recv declare. + NamedRecvType string + // JavaScript code that declares basic information about a symbol. For a type + // it configures basic information about the type and its identity. For a function + // or method it contains its compiled body. + DeclCode []byte + // JavaScript code that initializes reflection metadata about type's method list. + MethodListCode []byte + // JavaScript code that initializes the rest of reflection metadata about a type + // (e.g. struct fields, array type sizes, element types, etc.). + TypeInitCode []byte + // JavaScript code that needs to be executed during the package init phase to + // set the symbol up (e.g. initialize package-level variable value). + InitCode []byte + // DCEInfo stores the information for dead-code elimination. + DCEInfo dce.Info + // Set to true if a function performs a blocking operation (I/O or + // synchronization). The compiler will have to generate function code such + // that it can be resumed after a blocking operation completes without + // blocking the main thread in the meantime. + Blocking bool +} + +// minify returns a copy of Decl with unnecessary whitespace removed from the +// JS code. +func (d Decl) minify() Decl { + d.DeclCode = removeWhitespace(d.DeclCode, true) + d.MethodListCode = removeWhitespace(d.MethodListCode, true) + d.TypeInitCode = removeWhitespace(d.TypeInitCode, true) + d.InitCode = removeWhitespace(d.InitCode, true) + return d +} + +// Dce gets the information for dead-code elimination. +func (d *Decl) Dce() *dce.Info { + return &d.DCEInfo +} + +// topLevelObjects extracts package-level variables, functions and named types +// from the package AST. +func (fc *funcContext) topLevelObjects(srcs *sources.Sources) (vars []*types.Var, functions []*ast.FuncDecl, typeNames typesutil.TypeNames) { + if !fc.isRoot() { + panic(bailout(fmt.Errorf("functionContext.discoverObjects() must be only called on the package-level context"))) + } + + for _, file := range srcs.Files { + for _, decl := range file.Decls { + switch d := decl.(type) { + case *ast.FuncDecl: + sig := fc.pkgCtx.Defs[d.Name].(*types.Func).Type().(*types.Signature) + if sig.Recv() == nil { + fc.objectName(fc.pkgCtx.Defs[d.Name]) // register toplevel name + } + if !isBlank(d.Name) { + functions = append(functions, d) + } + case *ast.GenDecl: + switch d.Tok { + case token.TYPE: + for _, spec := range d.Specs { + o := fc.pkgCtx.Defs[spec.(*ast.TypeSpec).Name].(*types.TypeName) + typeNames.Add(o) + fc.objectName(o) // register toplevel name + } + case token.VAR: + for _, spec := range d.Specs { + for _, name := range spec.(*ast.ValueSpec).Names { + if !isBlank(name) { + o := fc.pkgCtx.Defs[name].(*types.Var) + vars = append(vars, o) + fc.objectName(o) // register toplevel name + } + } + } + case token.CONST: + // skip, constants are inlined + } + } + } + } + + return vars, functions, typeNames +} + +// importDecls processes import declarations. +// +// For each imported package: +// - A new package-level variable is reserved to refer to symbols from that +// package. +// - A Decl instance is generated to be included in the Archive. +// +// Lists of imported package paths and corresponding Decls is returned to the caller. +func (fc *funcContext) importDecls() (importedPaths []string, importDecls []*Decl) { + if !fc.isRoot() { + panic(bailout(fmt.Errorf("functionContext.importDecls() must be only called on the package-level context"))) + } + + imports := []*types.Package{} + for _, pkg := range fc.pkgCtx.Pkg.Imports() { + if pkg == types.Unsafe { + // Prior to Go 1.9, unsafe import was excluded by Imports() method, + // but now we do it here to maintain previous behavior. + continue + } + imports = append(imports, pkg) + } + + // Deterministic processing order. + sort.Slice(imports, func(i, j int) bool { return imports[i].Path() < imports[j].Path() }) + + for _, pkg := range imports { + importedPaths = append(importedPaths, pkg.Path()) + importDecls = append(importDecls, fc.newImportDecl(pkg)) + } + + return importedPaths, importDecls +} + +// newImportDecl registers the imported package and returns a Decl instance for it. +func (fc *funcContext) newImportDecl(importedPkg *types.Package) *Decl { + pkgVar := fc.importedPkgVar(importedPkg) + d := &Decl{ + FullName: importDeclFullName(importedPkg), + Vars: []string{pkgVar}, + DeclCode: []byte(fmt.Sprintf("\t%s = $packages[\"%s\"];\n", pkgVar, importedPkg.Path())), + InitCode: fc.CatchOutput(1, func() { fc.translateStmt(fc.importInitializer(importedPkg.Path()), nil) }), + } + d.Dce().SetAsAlive() + return d +} + +// importInitializer calls the imported package $init() function to ensure it is +// initialized before any code in the importer package runs. +func (fc *funcContext) importInitializer(impPath string) ast.Stmt { + pkgVar := fc.pkgCtx.pkgVars[impPath] + id := fc.newIdent(fmt.Sprintf(`%s.$init`, pkgVar), types.NewSignatureType(nil, nil, nil, nil, nil, false)) + call := &ast.CallExpr{Fun: id} + fc.Blocking[call] = true + fc.Flattened[call] = true + + return &ast.ExprStmt{X: call} +} + +// varDecls translates all package-level variables. +// +// `vars` argument must contain all package-level variables found in the package. +// The method returns corresponding Decls that declare and initialize the vars +// as appropriate. Decls are returned in order necessary to correctly initialize +// the variables, considering possible dependencies between them. +func (fc *funcContext) varDecls(vars []*types.Var) []*Decl { + if !fc.isRoot() { + panic(bailout(fmt.Errorf("functionContext.varDecls() must be only called on the package-level context"))) + } + + var varDecls []*Decl + varsWithInit := fc.pkgCtx.VarsWithInitializers() + + initializers := []*types.Initializer{} + + // For implicitly-initialized vars we generate synthetic zero-value + // initializers and then process them the same way as explicitly initialized. + for _, o := range vars { + if varsWithInit[o] { + continue + } + initializer := &types.Initializer{ + Lhs: []*types.Var{o}, + Rhs: fc.zeroValue(o.Type()), + } + initializers = append(initializers, initializer) + } + + // Add explicitly-initialized variables to the list. Implicitly-initialized + // variables should be declared first in case explicit initializers depend on + // them. + initializers = append(initializers, fc.pkgCtx.InitOrder...) + + for _, init := range initializers { + varDecls = append(varDecls, fc.newVarDecl(init)) + } + + return varDecls +} + +// newVarDecl creates a new Decl describing a variable, given an explicit +// initializer. +func (fc *funcContext) newVarDecl(init *types.Initializer) *Decl { + d := &Decl{ + FullName: varDeclFullName(init), + } + + assignLHS := []ast.Expr{} + for _, o := range init.Lhs { + assignLHS = append(assignLHS, fc.newIdentFor(o)) + + // For non-exported package-level variables we need to declared a local JS + // variable. Exported variables are represented as properties of the $pkg + // JS object. + if !o.Exported() { + d.Vars = append(d.Vars, fc.objectName(o)) + } + if fc.pkgCtx.HasPointer[o] && !o.Exported() { + d.Vars = append(d.Vars, fc.varPtrName(o)) + } + } + + fc.pkgCtx.CollectDCEDeps(d, func() { + fc.localVars = nil + d.InitCode = fc.CatchOutput(1, func() { + fc.translateStmt(&ast.AssignStmt{ + Lhs: assignLHS, + Tok: token.DEFINE, + Rhs: []ast.Expr{init.Rhs}, + }, nil) + }) + + // Initializer code may have introduced auxiliary variables (e.g. for + // handling multi-assignment or blocking calls), add them to the decl too. + d.Vars = append(d.Vars, fc.localVars...) + fc.localVars = nil // Clean up after ourselves. + }) + + d.Dce().SetName(init.Lhs[0]) + if len(init.Lhs) != 1 || analysis.HasSideEffect(init.Rhs, fc.pkgCtx.Info.Info) { + d.Dce().SetAsAlive() + } + return d +} + +// funcDecls translates all package-level function and methods. +// +// `functions` must contain all package-level function and method declarations +// found in the AST. The function returns Decls that define corresponding JS +// functions at runtime. For special functions like init() and main() decls will +// also contain code necessary to invoke them. +func (fc *funcContext) funcDecls(functions []*ast.FuncDecl) ([]*Decl, error) { + var funcDecls []*Decl + var mainFunc *types.Func + for _, fun := range functions { + o := fc.pkgCtx.Defs[fun.Name].(*types.Func) + + if fun.Recv == nil { + // Auxiliary decl shared by all instances of the function that defines + // package-level variable by which they all are referenced. + objName := fc.objectName(o) + varDecl := &Decl{ + FullName: funcVarDeclFullName(o), + Vars: []string{objName}, + } + varDecl.Dce().SetName(o) + if o.Type().(*types.Signature).TypeParams().Len() != 0 { + varDecl.DeclCode = fc.CatchOutput(0, func() { + fc.Printf("%s = {};", objName) + }) + } + funcDecls = append(funcDecls, varDecl) + } + + for _, inst := range fc.knownInstances(o) { + funcDecls = append(funcDecls, fc.newFuncDecl(fun, inst)) + + if o.Name() == "main" { + mainFunc = o // main() function candidate. + } + } + } + if fc.pkgCtx.isMain() { + if mainFunc == nil { + return nil, fmt.Errorf("missing main function") + } + // Add a special Decl for invoking main() function after the program has + // been initialized. It must come after all other functions, especially all + // init() functions, otherwise main() will be invoked too early. + funcDecls = append(funcDecls, &Decl{ + FullName: mainFuncDeclFullName(), + InitCode: fc.CatchOutput(1, func() { fc.translateStmt(fc.callMainFunc(mainFunc), nil) }), + }) + } + return funcDecls, nil +} + +// newFuncDecl returns a Decl that defines a package-level function or a method. +func (fc *funcContext) newFuncDecl(fun *ast.FuncDecl, inst typeparams.Instance) *Decl { + o := fc.pkgCtx.Defs[fun.Name].(*types.Func) + d := &Decl{ + FullName: funcDeclFullName(inst), + Blocking: fc.pkgCtx.IsBlocking(inst), + LinkingName: symbol.New(o), + } + d.Dce().SetName(o, inst.TArgs...) + + if typesutil.IsMethod(o) { + recv := typesutil.RecvType(o.Type().(*types.Signature)).Obj() + d.NamedRecvType = fc.objectName(recv) + } else { + d.RefExpr = fc.instName(inst) + switch o.Name() { + case "main": + if fc.pkgCtx.isMain() { // Found main() function of the program. + d.Dce().SetAsAlive() // Always reachable. + } + case "init": + d.InitCode = fc.CatchOutput(1, func() { fc.translateStmt(fc.callInitFunc(o), nil) }) + d.Dce().SetAsAlive() // init() function is always reachable. + } + } + + fc.pkgCtx.CollectDCEDeps(d, func() { + d.DeclCode = fc.namedFuncContext(inst).translateTopLevelFunction(fun) + }) + return d +} + +// callInitFunc returns an AST statement for calling the given instance of the +// package's init() function. +func (fc *funcContext) callInitFunc(init *types.Func) ast.Stmt { + id := fc.newIdentFor(init) + call := &ast.CallExpr{Fun: id} + if fc.pkgCtx.IsBlocking(typeparams.Instance{Object: init}) { + fc.Blocking[call] = true + } + return &ast.ExprStmt{X: call} +} + +// callMainFunc returns an AST statement for calling the main() function of the +// program, which should be included in the $init() function of the main package. +func (fc *funcContext) callMainFunc(main *types.Func) ast.Stmt { + id := fc.newIdentFor(main) + call := &ast.CallExpr{Fun: id} + ifStmt := &ast.IfStmt{ + Cond: fc.newIdent("$pkg === $mainPkg", types.Typ[types.Bool]), + Body: &ast.BlockStmt{ + List: []ast.Stmt{ + &ast.ExprStmt{X: call}, + &ast.AssignStmt{ + Lhs: []ast.Expr{fc.newIdent("$mainFinished", types.Typ[types.Bool])}, + Tok: token.ASSIGN, + Rhs: []ast.Expr{fc.newConst(types.Typ[types.Bool], constant.MakeBool(true))}, + }, + }, + }, + } + if fc.pkgCtx.IsBlocking(typeparams.Instance{Object: main}) { + fc.Blocking[call] = true + fc.Flattened[ifStmt] = true + } + + return ifStmt +} + +// namedTypeDecls returns Decls that define all names Go types. +// +// `typeNames` must contain all named types defined in the package, including +// those defined inside function bodies. +func (fc *funcContext) namedTypeDecls(typeNames typesutil.TypeNames) ([]*Decl, error) { + if !fc.isRoot() { + panic(bailout(fmt.Errorf("functionContext.namedTypeDecls() must be only called on the package-level context"))) + } + + var typeDecls []*Decl + for _, o := range typeNames.Slice() { + if o.IsAlias() { + continue + } + + typeDecls = append(typeDecls, fc.newNamedTypeVarDecl(o)) + + for _, inst := range fc.knownInstances(o) { + d, err := fc.newNamedTypeInstDecl(inst) + if err != nil { + return nil, err + } + typeDecls = append(typeDecls, d) + } + } + + return typeDecls, nil +} + +// newNamedTypeVarDecl returns a Decl that defines a JS variable to store named +// type definition. +// +// For generic types, the variable is an object containing known instantiations +// of the type, keyed by the type argument combination. Otherwise it contains +// the type definition directly. +func (fc *funcContext) newNamedTypeVarDecl(obj *types.TypeName) *Decl { + name := fc.objectName(obj) + varDecl := &Decl{ + FullName: typeVarDeclFullName(obj), + Vars: []string{name}, + } + if fc.pkgCtx.instanceSet.Pkg(obj.Pkg()).ObjHasInstances(obj) { + varDecl.DeclCode = fc.CatchOutput(0, func() { + fc.Printf("%s = {};", name) + }) + } + if isPkgLevel(obj) { + varDecl.TypeInitCode = fc.CatchOutput(0, func() { + fc.Printf("$pkg.%s = %s;", encodeIdent(obj.Name()), name) + }) + } + return varDecl +} + +// newNamedTypeInstDecl returns a Decl that represents an instantiation of a +// named Go type. +func (fc *funcContext) newNamedTypeInstDecl(inst typeparams.Instance) (*Decl, error) { + originType := inst.Object.Type().(*types.Named) + + var nestResolver *typeparams.Resolver + if len(inst.TNest) > 0 { + fn := typeparams.FindNestingFunc(inst.Object) + tp := typeparams.SignatureTypeParams(fn.Type().(*types.Signature)) + nestResolver = typeparams.NewResolver(fc.pkgCtx.typesCtx, tp, inst.TNest, nil) + } + fc.typeResolver = typeparams.NewResolver(fc.pkgCtx.typesCtx, originType.TypeParams(), inst.TArgs, nestResolver) + defer func() { fc.typeResolver = nil }() + + instanceType := originType + if !inst.IsTrivial() { + if len(inst.TArgs) > 0 { + instantiated, err := types.Instantiate(fc.pkgCtx.typesCtx, originType, inst.TArgs, true) + if err != nil { + return nil, fmt.Errorf("failed to instantiate type %v with args %v: %w", originType, inst.TArgs, err) + } + instanceType = instantiated.(*types.Named) + } + if len(inst.TNest) > 0 { + instantiated := nestResolver.Substitute(instanceType) + instanceType = instantiated.(*types.Named) + } + } + + underlying := instanceType.Underlying() + d := &Decl{ + FullName: typeDeclFullName(inst), + } + d.Dce().SetName(inst.Object, inst.TArgs...) + fc.pkgCtx.CollectDCEDeps(d, func() { + // Code that declares a JS type (i.e. prototype) for each Go type. + d.DeclCode = fc.CatchOutput(0, func() { + size := int64(0) + constructor := "null" + + switch t := underlying.(type) { + case *types.Struct: + constructor = fc.structConstructor(t) + case *types.Basic, *types.Array, *types.Slice, *types.Chan, *types.Signature, *types.Interface, *types.Pointer, *types.Map: + size = sizes32.Sizeof(t) + } + if tPointer, ok := underlying.(*types.Pointer); ok { + if _, ok := tPointer.Elem().Underlying().(*types.Array); ok { + // Array pointers have non-default constructors to support wrapping + // of the native objects. + constructor = "$arrayPtrCtor()" + } + } + fc.Printf(`%s = $newType(%d, %s, %q, %t, "%s", %t, %s);`, + fc.instName(inst), size, typeKind(originType), inst.TypeString(), inst.Object.Name() != "", inst.Object.Pkg().Path(), inst.Object.Exported(), constructor) + }) + + // Reflection metadata about methods the type has. + d.MethodListCode = fc.CatchOutput(0, func() { + if _, ok := underlying.(*types.Interface); ok { + return + } + var methods []string + var ptrMethods []string + for i := 0; i < instanceType.NumMethods(); i++ { + entry, isPtr := fc.methodListEntry(instanceType.Method(i)) + if isPtr { + ptrMethods = append(ptrMethods, entry) + } else { + methods = append(methods, entry) + } + } + if len(methods) > 0 { + fc.Printf("%s.methods = [%s];", fc.instName(inst), strings.Join(methods, ", ")) + } + if len(ptrMethods) > 0 { + fc.Printf("%s.methods = [%s];", fc.typeName(types.NewPointer(instanceType)), strings.Join(ptrMethods, ", ")) + } + }) + + // Certain types need to run additional type-specific logic to fully + // initialize themselves. + switch t := underlying.(type) { + case *types.Array, *types.Chan, *types.Interface, *types.Map, *types.Pointer, *types.Slice, *types.Signature, *types.Struct: + d.TypeInitCode = fc.CatchOutput(0, func() { + fc.Printf("%s.init(%s);", fc.instName(inst), fc.initArgs(t)) + }) + } + }) + return d, nil +} + +// structConstructor returns JS constructor function for a struct type. +func (fc *funcContext) structConstructor(t *types.Struct) string { + constructor := &strings.Builder{} + + ctrArgs := make([]string, t.NumFields()) + for i := 0; i < t.NumFields(); i++ { + ctrArgs[i] = fieldName(t, i) + "_" + } + + fmt.Fprintf(constructor, "function(%s) {\n", strings.Join(ctrArgs, ", ")) + fmt.Fprintf(constructor, "\t\tthis.$val = this;\n") + + // If no arguments were passed, zero-initialize all fields. + fmt.Fprintf(constructor, "\t\tif (arguments.length === 0) {\n") + for i := 0; i < t.NumFields(); i++ { + zeroValue := fc.zeroValue(fc.fieldType(t, i)) + fmt.Fprintf(constructor, "\t\t\tthis.%s = %s;\n", fieldName(t, i), fc.translateExpr(zeroValue).String()) + } + fmt.Fprintf(constructor, "\t\t\treturn;\n") + fmt.Fprintf(constructor, "\t\t}\n") + + // Otherwise initialize fields with the provided values. + for i := 0; i < t.NumFields(); i++ { + fmt.Fprintf(constructor, "\t\tthis.%[1]s = %[1]s_;\n", fieldName(t, i)) + } + fmt.Fprintf(constructor, "\t}") + return constructor.String() +} + +// methodListEntry returns a JS code fragment that describes the given method +// function for runtime reflection. It returns isPtr=true if the method belongs +// to the pointer-receiver method list. +func (fc *funcContext) methodListEntry(method *types.Func) (entry string, isPtr bool) { + name := method.Name() + if reservedKeywords[name] { + name += "$" + } + pkgPath := "" + if !method.Exported() { + pkgPath = method.Pkg().Path() + } + t := method.Type().(*types.Signature) + entry = fmt.Sprintf(`{prop: "%s", name: %s, pkg: "%s", typ: $funcType(%s)}`, + name, encodeString(method.Name()), pkgPath, fc.initArgs(t)) + _, isPtr = t.Recv().Type().(*types.Pointer) + return entry, isPtr +} + +// anonTypeDecls returns a list of Decls corresponding to anonymous Go types +// encountered in the package. +// +// `anonTypes` must contain an ordered list of anonymous types with the +// identifiers that were auto-assigned to them. They must be sorted in the +// topological initialization order (e.g. `[]int` is before `struct{f []int}`). +// +// See also typesutil.AnonymousTypes. +func (fc *funcContext) anonTypeDecls(anonTypes []*types.TypeName) []*Decl { + if !fc.isRoot() { + panic(bailout(fmt.Errorf("functionContext.anonTypeDecls() must be only called on the package-level context"))) + } + decls := []*Decl{} + for _, t := range anonTypes { + d := &Decl{ + FullName: anonTypeDeclFullName(t), + Vars: []string{t.Name()}, + } + d.Dce().SetName(t) + fc.pkgCtx.CollectDCEDeps(d, func() { + d.DeclCode = []byte(fmt.Sprintf("\t%s = $%sType(%s);\n", t.Name(), strings.ToLower(typeKind(t.Type())[5:]), fc.initArgs(t.Type()))) + }) + decls = append(decls, d) + } + return decls +} diff --git a/compiler/expressions.go b/compiler/expressions.go index 21971ab5f..781a37a3e 100644 --- a/compiler/expressions.go +++ b/compiler/expressions.go @@ -11,8 +11,9 @@ import ( "strconv" "strings" - "github.com/gopherjs/gopherjs/compiler/analysis" "github.com/gopherjs/gopherjs/compiler/astutil" + "github.com/gopherjs/gopherjs/compiler/internal/analysis" + "github.com/gopherjs/gopherjs/compiler/internal/typeparams" "github.com/gopherjs/gopherjs/compiler/typesutil" ) @@ -33,7 +34,7 @@ func (e *expression) StringWithParens() string { } func (fc *funcContext) translateExpr(expr ast.Expr) *expression { - exprType := fc.pkgCtx.TypeOf(expr) + exprType := fc.typeOf(expr) if value := fc.pkgCtx.Types[expr].Value; value != nil { basic := exprType.Underlying().(*types.Basic) switch { @@ -76,19 +77,16 @@ func (fc *funcContext) translateExpr(expr ast.Expr) *expression { } } - var obj types.Object + var inst typeparams.Instance switch e := expr.(type) { case *ast.SelectorExpr: - obj = fc.pkgCtx.Uses[e.Sel] + inst = fc.instanceOf(e.Sel) case *ast.Ident: - obj = fc.pkgCtx.Defs[e] - if obj == nil { - obj = fc.pkgCtx.Uses[e] - } + inst = fc.instanceOf(e) } - if obj != nil && typesutil.IsJsPackage(obj.Pkg()) { - switch obj.Name() { + if inst.Object != nil && typesutil.IsJsPackage(inst.Object.Pkg()) { + switch inst.Object.Name() { case "Global": return fc.formatExpr("$global") case "Module": @@ -180,18 +178,18 @@ func (fc *funcContext) translateExpr(expr ast.Expr) *expression { } if !isKeyValue { for i, element := range e.Elts { - elements[i] = fc.translateImplicitConversionWithCloning(element, t.Field(i).Type()).String() + elements[i] = fc.translateImplicitConversionWithCloning(element, fc.fieldType(t, i)).String() } } if isKeyValue { for i := range elements { - elements[i] = fc.translateExpr(fc.zeroValue(t.Field(i).Type())).String() + elements[i] = fc.translateExpr(fc.zeroValue(fc.fieldType(t, i))).String() } for _, element := range e.Elts { kve := element.(*ast.KeyValueExpr) for j := range elements { if kve.Key.(*ast.Ident).Name == t.Field(j).Name() { - elements[j] = fc.translateImplicitConversionWithCloning(kve.Value, t.Field(j).Type()).String() + elements[j] = fc.translateImplicitConversionWithCloning(kve.Value, fc.fieldType(t, j)).String() break } } @@ -203,11 +201,16 @@ func (fc *funcContext) translateExpr(expr ast.Expr) *expression { } case *ast.FuncLit: - _, fun := translateFunction(e.Type, nil, e.Body, fc, exprType.(*types.Signature), fc.pkgCtx.FuncLitInfos[e], "") + fun := fc.literalFuncContext(e).translateFunctionBody(e.Type, nil, e.Body) if len(fc.pkgCtx.escapingVars) != 0 { names := make([]string, 0, len(fc.pkgCtx.escapingVars)) for obj := range fc.pkgCtx.escapingVars { - names = append(names, fc.pkgCtx.objectNames[obj]) + name, ok := fc.assignedObjectName(obj) + if !ok { + // This should never happen. + panic(fmt.Errorf("escaping variable %s hasn't been assigned a JS name", obj)) + } + names = append(names, name) } sort.Strings(names) list := strings.Join(names, ", ") @@ -216,7 +219,7 @@ func (fc *funcContext) translateExpr(expr ast.Expr) *expression { return fc.formatExpr("(%s)", fun) case *ast.UnaryExpr: - t := fc.pkgCtx.TypeOf(e.X) + t := fc.typeOf(e.X) switch e.Op { case token.AND: if typesutil.IsJsObject(exprType) { @@ -236,26 +239,31 @@ func (fc *funcContext) translateExpr(expr ast.Expr) *expression { switch x := astutil.RemoveParens(e.X).(type) { case *ast.CompositeLit: - return fc.formatExpr("$newDataPointer(%e, %s)", x, fc.typeName(fc.pkgCtx.TypeOf(e))) + return fc.formatExpr("$newDataPointer(%e, %s)", x, fc.typeName(fc.typeOf(e))) case *ast.Ident: obj := fc.pkgCtx.Uses[x].(*types.Var) if fc.pkgCtx.escapingVars[obj] { - return fc.formatExpr("(%1s.$ptr || (%1s.$ptr = new %2s(function() { return this.$target[0]; }, function($v) { this.$target[0] = $v; }, %1s)))", fc.pkgCtx.objectNames[obj], fc.typeName(exprType)) + name, ok := fc.assignedObjectName(obj) + if !ok { + // This should never happen. + panic(fmt.Errorf("escaping variable %s hasn't been assigned a JS name", obj)) + } + return fc.formatExpr("(%1s.$ptr || (%1s.$ptr = new %2s(function() { return this.$target[0]; }, function($v) { this.$target[0] = $v; }, %1s)))", name, fc.typeName(exprType)) } return fc.formatExpr(`(%1s || (%1s = new %2s(function() { return %3s; }, function($v) { %4s })))`, fc.varPtrName(obj), fc.typeName(exprType), fc.objectName(obj), fc.translateAssign(x, fc.newIdent("$v", elemType), false)) case *ast.SelectorExpr: - sel, ok := fc.pkgCtx.SelectionOf(x) + sel, ok := fc.selectionOf(x) if !ok { // qualified identifier obj := fc.pkgCtx.Uses[x.Sel].(*types.Var) return fc.formatExpr(`(%1s || (%1s = new %2s(function() { return %3s; }, function($v) { %4s })))`, fc.varPtrName(obj), fc.typeName(exprType), fc.objectName(obj), fc.translateAssign(x, fc.newIdent("$v", elemType), false)) } - newSel := &ast.SelectorExpr{X: fc.newIdent("this.$target", fc.pkgCtx.TypeOf(x.X)), Sel: x.Sel} + newSel := &ast.SelectorExpr{X: fc.newIdent("this.$target", fc.typeOf(x.X)), Sel: x.Sel} fc.setType(newSel, exprType) fc.pkgCtx.additionalSelections[newSel] = sel return fc.formatExpr("(%1e.$ptr_%2s || (%1e.$ptr_%2s = new %3s(function() { return %4e; }, function($v) { %5s }, %1e)))", x.X, x.Sel.Name, fc.typeName(exprType), newSel, fc.translateAssign(newSel, fc.newIdent("$v", exprType), false)) case *ast.IndexExpr: - if _, ok := fc.pkgCtx.TypeOf(x.X).Underlying().(*types.Slice); ok { + if _, ok := fc.typeOf(x.X).Underlying().(*types.Slice); ok { return fc.formatExpr("$indexPtr(%1e.$array, %1e.$offset + %2e, %3s)", x.X, x.Index, fc.typeName(exprType)) } return fc.formatExpr("$indexPtr(%e, %e, %s)", x.X, x.Index, fc.typeName(exprType)) @@ -312,8 +320,8 @@ func (fc *funcContext) translateExpr(expr ast.Expr) *expression { })) } - t := fc.pkgCtx.TypeOf(e.X) - t2 := fc.pkgCtx.TypeOf(e.Y) + t := fc.typeOf(e.X) + t2 := fc.typeOf(e.Y) _, isInterface := t2.Underlying().(*types.Interface) if isInterface || types.Identical(t, types.Typ[types.UntypedNil]) { t = t2 @@ -379,7 +387,7 @@ func (fc *funcContext) translateExpr(expr ast.Expr) *expression { switch basic.Kind() { case types.Int32, types.Int: return fc.formatParenExpr("$imul(%e, %e)", e.X, e.Y) - case types.Uint32, types.Uintptr: + case types.Uint32, types.Uint, types.Uintptr: return fc.formatParenExpr("$imul(%e, %e) >>> 0", e.X, e.Y) } return fc.fixNumber(fc.formatExpr("%e * %e", e.X, e.Y), basic) @@ -390,14 +398,14 @@ func (fc *funcContext) translateExpr(expr ast.Expr) *expression { if isUnsigned(basic) { shift = ">>>" } - return fc.formatExpr(`(%1s = %2e / %3e, (%1s === %1s && %1s !== 1/0 && %1s !== -1/0) ? %1s %4s 0 : $throwRuntimeError("integer divide by zero"))`, fc.newVariable("_q"), e.X, e.Y, shift) + return fc.formatExpr(`(%1s = %2e / %3e, (%1s === %1s && %1s !== 1/0 && %1s !== -1/0) ? %1s %4s 0 : $throwRuntimeError("integer divide by zero"))`, fc.newLocalVariable("_q"), e.X, e.Y, shift) } if basic.Kind() == types.Float32 { return fc.fixNumber(fc.formatExpr("%e / %e", e.X, e.Y), basic) } return fc.formatExpr("%e / %e", e.X, e.Y) case token.REM: - return fc.formatExpr(`(%1s = %2e %% %3e, %1s === %1s ? %1s : $throwRuntimeError("integer divide by zero"))`, fc.newVariable("_r"), e.X, e.Y) + return fc.formatExpr(`(%1s = %2e %% %3e, %1s === %1s ? %1s : $throwRuntimeError("integer divide by zero"))`, fc.newLocalVariable("_r"), e.X, e.Y) case token.SHL, token.SHR: op := e.Op.String() if e.Op == token.SHR && isUnsigned(basic) { @@ -413,7 +421,7 @@ func (fc *funcContext) translateExpr(expr ast.Expr) *expression { if e.Op == token.SHR && !isUnsigned(basic) { return fc.fixNumber(fc.formatParenExpr("%e >> $min(%f, 31)", e.X, e.Y), basic) } - y := fc.newVariable("y") + y := fc.newLocalVariable("y") return fc.fixNumber(fc.formatExpr("(%s = %f, %s < 32 ? (%e %s %s) : 0)", y, e.Y, y, e.X, op, y), basic) case token.AND, token.OR: if isUnsigned(basic) { @@ -436,7 +444,7 @@ func (fc *funcContext) translateExpr(expr ast.Expr) *expression { if fc.Blocking[e.Y] { skipCase := fc.caseCounter fc.caseCounter++ - resultVar := fc.newVariable("_v") + resultVar := fc.newLocalVariable("_v") fc.Printf("if (!(%s)) { %s = false; $s = %d; continue s; }", fc.translateExpr(e.X), resultVar, skipCase) fc.Printf("%s = %s; case %d:", resultVar, fc.translateExpr(e.Y), skipCase) return fc.formatExpr("%s", resultVar) @@ -446,7 +454,7 @@ func (fc *funcContext) translateExpr(expr ast.Expr) *expression { if fc.Blocking[e.Y] { skipCase := fc.caseCounter fc.caseCounter++ - resultVar := fc.newVariable("_v") + resultVar := fc.newLocalVariable("_v") fc.Printf("if (%s) { %s = true; $s = %d; continue s; }", fc.translateExpr(e.X), resultVar, skipCase) fc.Printf("%s = %s; case %d:", resultVar, fc.translateExpr(e.Y), skipCase) return fc.formatExpr("%s", resultVar) @@ -477,7 +485,7 @@ func (fc *funcContext) translateExpr(expr ast.Expr) *expression { return fc.formatParenExpr("%e", e.X) case *ast.IndexExpr: - switch t := fc.pkgCtx.TypeOf(e.X).Underlying().(type) { + switch t := fc.typeOf(e.X).Underlying().(type) { case *types.Pointer: if _, ok := t.Elem().Underlying().(*types.Array); !ok { // Should never happen in type-checked code. @@ -498,14 +506,14 @@ func (fc *funcContext) translateExpr(expr ast.Expr) *expression { case *types.Slice: return fc.formatExpr(rangeCheck("%1e.$array[%1e.$offset + %2f]", fc.pkgCtx.Types[e.Index].Value != nil, false), e.X, e.Index) case *types.Map: - if typesutil.IsJsObject(fc.pkgCtx.TypeOf(e.Index)) { + if typesutil.IsJsObject(fc.typeOf(e.Index)) { fc.pkgCtx.errList = append(fc.pkgCtx.errList, types.Error{Fset: fc.pkgCtx.fileSet, Pos: e.Index.Pos(), Msg: "cannot use js.Object as map key"}) } key := fmt.Sprintf("%s.keyFor(%s)", fc.typeName(t.Key()), fc.translateImplicitConversion(e.Index, t.Key())) if _, isTuple := exprType.(*types.Tuple); isTuple { return fc.formatExpr( `(%1s = $mapIndex(%2e,%3s), %1s !== undefined ? [%1s.v, true] : [%4e, false])`, - fc.newVariable("_entry"), + fc.newLocalVariable("_entry"), e.X, key, fc.zeroValue(t.Elem()), @@ -513,7 +521,7 @@ func (fc *funcContext) translateExpr(expr ast.Expr) *expression { } return fc.formatExpr( `(%1s = $mapIndex(%2e,%3s), %1s !== undefined ? %1s.v : %4e)`, - fc.newVariable("_entry"), + fc.newLocalVariable("_entry"), e.X, key, fc.zeroValue(t.Elem()), @@ -521,14 +529,19 @@ func (fc *funcContext) translateExpr(expr ast.Expr) *expression { case *types.Basic: return fc.formatExpr("%e.charCodeAt(%f)", e.X, e.Index) case *types.Signature: - err := bailout(fmt.Errorf(`unsupported type parameters used at %s`, fc.pkgCtx.fileSet.Position(e.Pos()))) - panic(err) + return fc.formatExpr("%s", fc.instName(fc.instanceOf(e.X.(*ast.Ident)))) default: panic(fmt.Errorf(`unhandled IndexExpr: %T`, t)) } - + case *ast.IndexListExpr: + switch t := fc.typeOf(e.X).Underlying().(type) { + case *types.Signature: + return fc.formatExpr("%s", fc.instName(fc.instanceOf(e.X.(*ast.Ident)))) + default: + panic(fmt.Errorf("unhandled IndexListExpr: %T", t)) + } case *ast.SliceExpr: - if b, isBasic := fc.pkgCtx.TypeOf(e.X).Underlying().(*types.Basic); isBasic && isString(b) { + if b, isBasic := fc.typeOf(e.X).Underlying().(*types.Basic); isBasic && isString(b) { switch { case e.Low == nil && e.High == nil: return fc.translateExpr(e.X) @@ -559,10 +572,10 @@ func (fc *funcContext) translateExpr(expr ast.Expr) *expression { } case *ast.SelectorExpr: - sel, ok := fc.pkgCtx.SelectionOf(e) + sel, ok := fc.selectionOf(e) if !ok { // qualified identifier - return fc.formatExpr("%s", fc.objectName(obj)) + return fc.formatExpr("%s", fc.instName(inst)) } switch sel.Kind() { @@ -578,9 +591,7 @@ func (fc *funcContext) translateExpr(expr ast.Expr) *expression { case types.MethodVal: return fc.formatExpr(`$methodVal(%s, "%s")`, fc.makeReceiver(e), sel.Obj().(*types.Func).Name()) case types.MethodExpr: - if !sel.Obj().Exported() { - fc.pkgCtx.dependencies[sel.Obj()] = true - } + fc.pkgCtx.DeclareDCEDep(sel.Obj(), inst.TArgs...) if _, ok := sel.Recv().Underlying().(*types.Interface); ok { return fc.formatExpr(`$ifaceMethodExpr("%s")`, sel.Obj().(*types.Func).Name()) } @@ -593,10 +604,10 @@ func (fc *funcContext) translateExpr(expr ast.Expr) *expression { plainFun := astutil.RemoveParens(e.Fun) if astutil.IsTypeExpr(plainFun, fc.pkgCtx.Info.Info) { - return fc.formatExpr("(%s)", fc.translateConversion(e.Args[0], fc.pkgCtx.TypeOf(plainFun))) + return fc.formatExpr("(%s)", fc.translateConversion(e.Args[0], fc.typeOf(plainFun))) } - sig := fc.pkgCtx.TypeOf(plainFun).Underlying().(*types.Signature) + sig := fc.typeOf(plainFun).Underlying().(*types.Signature) switch f := plainFun.(type) { case *ast.Ident: @@ -610,10 +621,13 @@ func (fc *funcContext) translateExpr(expr ast.Expr) *expression { return fc.translateCall(e, sig, fc.translateExpr(f)) case *ast.SelectorExpr: - sel, ok := fc.pkgCtx.SelectionOf(f) + sel, ok := fc.selectionOf(f) if !ok { // qualified identifier obj := fc.pkgCtx.Uses[f.Sel] + if o, ok := obj.(*types.Builtin); ok { + return fc.translateBuiltin(o.Name(), sig, e.Args, e.Ellipsis.IsValid()) + } if typesutil.IsJsPackage(obj.Pkg()) { switch obj.Name() { case "Debugger": @@ -626,7 +640,7 @@ func (fc *funcContext) translateExpr(expr ast.Expr) *expression { } externalizeExpr := func(e ast.Expr) string { - t := fc.pkgCtx.TypeOf(e) + t := fc.typeOf(e) if types.Identical(t, types.Typ[types.UntypedNil]) { return "null" } @@ -673,13 +687,13 @@ func (fc *funcContext) translateExpr(expr ast.Expr) *expression { case "Call": if id, ok := fc.identifierConstant(e.Args[0]); ok { if e.Ellipsis.IsValid() { - objVar := fc.newVariable("obj") + objVar := fc.newLocalVariable("obj") return fc.formatExpr("(%s = %s, %s.%s.apply(%s, %s))", objVar, recv, objVar, id, objVar, externalizeExpr(e.Args[1])) } return fc.formatExpr("%s(%s)", globalRef(id), externalizeArgs(e.Args[1:])) } if e.Ellipsis.IsValid() { - objVar := fc.newVariable("obj") + objVar := fc.newLocalVariable("obj") return fc.formatExpr("(%s = %s, %s[$externalize(%e, $String)].apply(%s, %s))", objVar, recv, objVar, e.Args[0], objVar, externalizeExpr(e.Args[1])) } return fc.formatExpr("%s[$externalize(%e, $String)](%s)", recv, e.Args[0], externalizeArgs(e.Args[1:])) @@ -714,10 +728,7 @@ func (fc *funcContext) translateExpr(expr ast.Expr) *expression { } } - methodName := sel.Obj().Name() - if reservedKeywords[methodName] { - methodName += "$" - } + methodName := fc.methodName(sel.Obj().(*types.Func)) return fc.translateCall(e, sig, fc.formatExpr("%s.%s", recv, methodName)) case types.FieldVal: @@ -746,11 +757,11 @@ func (fc *funcContext) translateExpr(expr ast.Expr) *expression { } case *ast.StarExpr: - if typesutil.IsJsObject(fc.pkgCtx.TypeOf(e.X)) { + if typesutil.IsJsObject(fc.typeOf(e.X)) { return fc.formatExpr("new $jsObjectPtr(%e)", e.X) } if c1, isCall := e.X.(*ast.CallExpr); isCall && len(c1.Args) == 1 { - if c2, isCall := c1.Args[0].(*ast.CallExpr); isCall && len(c2.Args) == 1 && types.Identical(fc.pkgCtx.TypeOf(c2.Fun), types.Typ[types.UnsafePointer]) { + if c2, isCall := c1.Args[0].(*ast.CallExpr); isCall && len(c2.Args) == 1 && types.Identical(fc.typeOf(c2.Fun), types.Typ[types.UnsafePointer]) { if unary, isUnary := c2.Args[0].(*ast.UnaryExpr); isUnary && unary.Op == token.AND { return fc.translateExpr(unary.X) // unsafe conversion } @@ -766,7 +777,7 @@ func (fc *funcContext) translateExpr(expr ast.Expr) *expression { if e.Type == nil { return fc.translateExpr(e.X) } - t := fc.pkgCtx.TypeOf(e.Type) + t := fc.typeOf(e.Type) if _, isTuple := exprType.(*types.Tuple); isTuple { return fc.formatExpr("$assertType(%e, %s, true)", e.X, fc.typeName(t)) } @@ -776,11 +787,11 @@ func (fc *funcContext) translateExpr(expr ast.Expr) *expression { if e.Name == "_" { panic("Tried to translate underscore identifier.") } - switch o := obj.(type) { + switch o := inst.Object.(type) { case *types.Var, *types.Const: - return fc.formatExpr("%s", fc.objectName(o)) + return fc.formatExpr("%s", fc.instName(inst)) case *types.Func: - return fc.formatExpr("%s", fc.objectName(o)) + return fc.formatExpr("%s", fc.instName(inst)) case *types.TypeName: return fc.formatExpr("%s", fc.typeName(o.Type())) case *types.Nil: @@ -790,7 +801,7 @@ func (fc *funcContext) translateExpr(expr ast.Expr) *expression { switch t := exprType.Underlying().(type) { case *types.Basic: if t.Kind() != types.UnsafePointer { - panic("unexpected basic type") + panic(fmt.Errorf(`unexpected basic type: %v in %v`, t, e.Name)) } return fc.formatExpr("0") case *types.Slice, *types.Pointer: @@ -826,7 +837,7 @@ func (fc *funcContext) translateCall(e *ast.CallExpr, sig *types.Signature, fun fc.caseCounter++ returnVar := "$r" if sig.Results().Len() != 0 { - returnVar = fc.newVariable("_r") + returnVar = fc.newLocalVariable("_r") } fc.Printf("%[1]s = %[2]s(%[3]s); /* */ $s = %[4]d; case %[4]d: if($c) { $c = false; %[1]s = %[1]s.$blk(); } if (%[1]s && %[1]s.$blk !== undefined) { break s; }", returnVar, fun, strings.Join(args, ", "), resumeCase) if sig.Results().Len() != 0 { @@ -841,7 +852,7 @@ func (fc *funcContext) translateCall(e *ast.CallExpr, sig *types.Signature, fun // and its arguments to be invoked elsewhere. // // This function is necessary in conjunction with keywords such as `go` and `defer`, -// where we need to compute function and its arguments at the the keyword site, +// where we need to compute function and its arguments at the keyword site, // but the call itself will happen elsewhere (hence "delegated"). // // Built-in functions and cetrain `js.Object` methods don't translate into JS @@ -857,9 +868,8 @@ func (fc *funcContext) delegatedCall(expr *ast.CallExpr) (callable *expression, case *ast.SelectorExpr: isJs = typesutil.IsJsPackage(fc.pkgCtx.Uses[fun.Sel].Pkg()) } - sig := fc.pkgCtx.TypeOf(expr.Fun).Underlying().(*types.Signature) - sigTypes := signatureTypes{Sig: sig} - args := fc.translateArgs(sig, expr.Args, expr.Ellipsis.IsValid()) + sig := typesutil.Signature{Sig: fc.typeOf(expr.Fun).Underlying().(*types.Signature)} + args := fc.translateArgs(sig.Sig, expr.Args, expr.Ellipsis.IsValid()) if !isBuiltin && !isJs { // Normal function calls don't require wrappers. @@ -876,12 +886,12 @@ func (fc *funcContext) delegatedCall(expr *ast.CallExpr) (callable *expression, ellipsis := expr.Ellipsis for i := range expr.Args { - v := fc.newVariable("_arg") + v := fc.newLocalVariable("_arg") vars[i] = v // Subtle: the proxy lambda argument needs to be assigned with the type // that the original function expects, and not with the argument // expression result type, or we may do implicit type conversion twice. - callArgs[i] = fc.newIdent(v, sigTypes.Param(i, ellipsis.IsValid())) + callArgs[i] = fc.newIdent(v, sig.Param(i, ellipsis.IsValid())) } wrapper := &ast.CallExpr{ Fun: expr.Fun, @@ -894,9 +904,9 @@ func (fc *funcContext) delegatedCall(expr *ast.CallExpr) (callable *expression, } func (fc *funcContext) makeReceiver(e *ast.SelectorExpr) *expression { - sel, _ := fc.pkgCtx.SelectionOf(e) + sel, _ := fc.selectionOf(e) if !sel.Obj().Exported() { - fc.pkgCtx.dependencies[sel.Obj()] = true + fc.pkgCtx.DeclareDCEDep(sel.Obj()) } x := e.X @@ -907,16 +917,11 @@ func (fc *funcContext) makeReceiver(e *ast.SelectorExpr) *expression { recvType = ptr.Elem() } s := recvType.Underlying().(*types.Struct) - recvType = s.Field(index).Type() + recvType = fc.fieldType(s, index) } fakeSel := &ast.SelectorExpr{X: x, Sel: ast.NewIdent("o")} - fc.pkgCtx.additionalSelections[fakeSel] = &fakeSelection{ - kind: types.FieldVal, - recv: sel.Recv(), - index: sel.Index()[:len(sel.Index())-1], - typ: recvType, - } + fc.pkgCtx.additionalSelections[fakeSel] = typesutil.NewSelection(types.FieldVal, sel.Recv(), sel.Index()[:len(sel.Index())-1], nil, recvType) x = fc.setType(fakeSel, recvType) } @@ -953,9 +958,9 @@ func (fc *funcContext) translateBuiltin(name string, sig *types.Signature, args return fc.formatExpr("$newDataPointer(%e, %s)", fc.zeroValue(t.Elem()), fc.typeName(t)) } case "make": - switch argType := fc.pkgCtx.TypeOf(args[0]).Underlying().(type) { + switch argType := fc.typeOf(args[0]).Underlying().(type) { case *types.Slice: - t := fc.typeName(fc.pkgCtx.TypeOf(args[0])) + t := fc.typeName(fc.typeOf(args[0])) if len(args) == 3 { return fc.formatExpr("$makeSlice(%s, %f, %f)", t, args[1], args[2]) } @@ -970,13 +975,17 @@ func (fc *funcContext) translateBuiltin(name string, sig *types.Signature, args if len(args) == 2 { length = fc.formatExpr("%f", args[1]).String() } - return fc.formatExpr("new $Chan(%s, %s)", fc.typeName(fc.pkgCtx.TypeOf(args[0]).Underlying().(*types.Chan).Elem()), length) + return fc.formatExpr("new $Chan(%s, %s)", fc.typeName(fc.typeOf(args[0]).Underlying().(*types.Chan).Elem()), length) default: panic(fmt.Sprintf("Unhandled make type: %T\n", argType)) } case "len": - switch argType := fc.pkgCtx.TypeOf(args[0]).Underlying().(type) { + switch argType := fc.typeOf(args[0]).Underlying().(type) { case *types.Basic: + // If the argument is a concatenation of strings, then add parentheses. + if _, ok := args[0].(*ast.BinaryExpr); ok { + return fc.formatExpr("(%e).length", args[0]) + } return fc.formatExpr("%e.length", args[0]) case *types.Slice: return fc.formatExpr("%e.$length", args[0]) @@ -991,7 +1000,7 @@ func (fc *funcContext) translateBuiltin(name string, sig *types.Signature, args panic(fmt.Sprintf("Unhandled len type: %T\n", argType)) } case "cap": - switch argType := fc.pkgCtx.TypeOf(args[0]).Underlying().(type) { + switch argType := fc.typeOf(args[0]).Underlying().(type) { case *types.Slice, *types.Chan: return fc.formatExpr("%e.$capacity", args[0]) case *types.Pointer: @@ -1011,7 +1020,7 @@ func (fc *funcContext) translateBuiltin(name string, sig *types.Signature, args return fc.formatExpr("$append(%e, %s)", args[0], strings.Join(fc.translateExprSlice(args[1:], sliceType.Elem()), ", ")) case "delete": args = fc.expandTupleArgs(args) - keyType := fc.pkgCtx.TypeOf(args[0]).Underlying().(*types.Map).Key() + keyType := fc.typeOf(args[0]).Underlying().(*types.Map).Key() return fc.formatExpr( `$mapDelete(%1e, %2s.keyFor(%3s))`, args[0], @@ -1020,7 +1029,7 @@ func (fc *funcContext) translateBuiltin(name string, sig *types.Signature, args ) case "copy": args = fc.expandTupleArgs(args) - if basic, isBasic := fc.pkgCtx.TypeOf(args[1]).Underlying().(*types.Basic); isBasic && isString(basic) { + if basic, isBasic := fc.typeOf(args[1]).Underlying().(*types.Basic); isBasic && isString(basic) { return fc.formatExpr("$copyString(%e, %e)", args[0], args[1]) } return fc.formatExpr("$copySlice(%e, %e)", args[0], args[1]) @@ -1041,6 +1050,13 @@ func (fc *funcContext) translateBuiltin(name string, sig *types.Signature, args return fc.formatExpr("$recover()") case "close": return fc.formatExpr(`$close(%e)`, args[0]) + case "Sizeof": + return fc.formatExpr("%d", sizes32.Sizeof(fc.typeOf(args[0]))) + case "Alignof": + return fc.formatExpr("%d", sizes32.Alignof(fc.typeOf(args[0]))) + case "Offsetof": + sel, _ := fc.selectionOf(astutil.RemoveParens(args[0]).(*ast.SelectorExpr)) + return fc.formatExpr("%d", typesutil.OffsetOf(sizes32, sel)) default: panic(fmt.Sprintf("Unhandled builtin: %s\n", name)) } @@ -1072,13 +1088,13 @@ func (fc *funcContext) translateExprSlice(exprs []ast.Expr, desiredType types.Ty } func (fc *funcContext) translateConversion(expr ast.Expr, desiredType types.Type) *expression { - exprType := fc.pkgCtx.TypeOf(expr) + exprType := fc.typeOf(expr) if types.Identical(exprType, desiredType) { return fc.translateExpr(expr) } if fc.pkgCtx.Pkg.Path() == "reflect" || fc.pkgCtx.Pkg.Path() == "internal/reflectlite" { - if call, isCall := expr.(*ast.CallExpr); isCall && types.Identical(fc.pkgCtx.TypeOf(call.Fun), types.Typ[types.UnsafePointer]) { + if call, isCall := expr.(*ast.CallExpr); isCall && types.Identical(fc.typeOf(call.Fun), types.Typ[types.UnsafePointer]) { if ptr, isPtr := desiredType.(*types.Pointer); isPtr { if named, isNamed := ptr.Elem().(*types.Named); isNamed { switch named.Obj().Name() { @@ -1111,8 +1127,6 @@ func (fc *funcContext) translateConversion(expr ast.Expr, desiredType types.Type return fc.fixNumber(fc.formatParenExpr("%1l + ((%1h >> 31) * 4294967296)", expr), t) } return fc.fixNumber(fc.formatExpr("%s.$low", fc.translateExpr(expr)), t) - case isFloat(basicExprType): - return fc.formatParenExpr("%e >> 0", expr) case types.Identical(exprType, types.Typ[types.UnsafePointer]): return fc.translateExpr(expr) default: @@ -1153,10 +1167,10 @@ func (fc *funcContext) translateConversion(expr ast.Expr, desiredType types.Type return fc.formatExpr("new Uint8Array(0)") } } - if ptr, isPtr := fc.pkgCtx.TypeOf(expr).(*types.Pointer); fc.pkgCtx.Pkg.Path() == "syscall" && isPtr { + if ptr, isPtr := fc.typeOf(expr).(*types.Pointer); fc.pkgCtx.Pkg.Path() == "syscall" && isPtr { if s, isStruct := ptr.Elem().Underlying().(*types.Struct); isStruct { - array := fc.newVariable("_array") - target := fc.newVariable("_struct") + array := fc.newLocalVariable("_array") + target := fc.newLocalVariable("_struct") fc.Printf("%s = new Uint8Array(%d);", array, sizes32.Sizeof(s)) fc.Delayed(func() { fc.Printf("%s = %s, %s;", target, fc.translateExpr(expr), fc.loadStruct(array, target, s)) @@ -1166,7 +1180,7 @@ func (fc *funcContext) translateConversion(expr ast.Expr, desiredType types.Type } if call, ok := expr.(*ast.CallExpr); ok { if id, ok := call.Fun.(*ast.Ident); ok && id.Name == "new" { - return fc.formatExpr("new Uint8Array(%d)", int(sizes32.Sizeof(fc.pkgCtx.TypeOf(call.Args[0])))) + return fc.formatExpr("new Uint8Array(%d)", int(sizes32.Sizeof(fc.typeOf(call.Args[0])))) } } } @@ -1204,8 +1218,8 @@ func (fc *funcContext) translateConversion(expr ast.Expr, desiredType types.Type // struct pointer when handling syscalls. // TODO(nevkontakte): Add a runtime assertion that the unsafe.Pointer is // indeed pointing at a byte array. - array := fc.newVariable("_array") - target := fc.newVariable("_struct") + array := fc.newLocalVariable("_array") + target := fc.newLocalVariable("_struct") return fc.formatExpr("(%s = %e, %s = %e, %s, %s)", array, expr, target, fc.zeroValue(t.Elem()), fc.loadStruct(array, target, ptrElType), target) } // Convert between structs of different types but identical layouts, @@ -1227,7 +1241,7 @@ func (fc *funcContext) translateConversion(expr ast.Expr, desiredType types.Type // type iPtr *int; var c int = 42; println((iPtr)(&c)); // TODO(nevkontakte): Are there any other cases that fall into this case? exprTypeElem := exprType.Underlying().(*types.Pointer).Elem() - ptrVar := fc.newVariable("_ptr") + ptrVar := fc.newLocalVariable("_ptr") getterConv := fc.translateConversion(fc.setType(&ast.StarExpr{X: fc.newIdent(ptrVar, exprType)}, exprTypeElem), t.Elem()) setterConv := fc.translateConversion(fc.newIdent("$v", t.Elem()), exprTypeElem) return fc.formatExpr("(%1s = %2e, new %3s(function() { return %4s; }, function($v) { %1s.$set(%5s); }, %1s.$target))", ptrVar, expr, fc.typeName(desiredType), getterConv, setterConv) @@ -1255,7 +1269,7 @@ func (fc *funcContext) translateImplicitConversion(expr ast.Expr, desiredType ty return fc.translateExpr(expr) } - exprType := fc.pkgCtx.TypeOf(expr) + exprType := fc.typeOf(expr) if types.Identical(exprType, desiredType) { return fc.translateExpr(expr) } @@ -1286,7 +1300,7 @@ func (fc *funcContext) translateImplicitConversion(expr ast.Expr, desiredType ty } func (fc *funcContext) translateConversionToSlice(expr ast.Expr, desiredType types.Type) *expression { - switch fc.pkgCtx.TypeOf(expr).Underlying().(type) { + switch fc.typeOf(expr).Underlying().(type) { case *types.Array, *types.Pointer: return fc.formatExpr("new %s(%e)", fc.typeName(desiredType), expr) } @@ -1294,18 +1308,19 @@ func (fc *funcContext) translateConversionToSlice(expr ast.Expr, desiredType typ } func (fc *funcContext) loadStruct(array, target string, s *types.Struct) string { - view := fc.newVariable("_view") + view := fc.newLocalVariable("_view") code := fmt.Sprintf("%s = new DataView(%s.buffer, %s.byteOffset)", view, array, array) var fields []*types.Var var collectFields func(s *types.Struct, path string) collectFields = func(s *types.Struct, path string) { for i := 0; i < s.NumFields(); i++ { - field := s.Field(i) - if fs, isStruct := field.Type().Underlying().(*types.Struct); isStruct { - collectFields(fs, path+"."+fieldName(s, i)) + fieldName := path + "." + fieldName(s, i) + fieldType := fc.fieldType(s, i) + if fs, isStruct := fieldType.Underlying().(*types.Struct); isStruct { + collectFields(fs, fieldName) continue } - fields = append(fields, types.NewVar(0, nil, path+"."+fieldName(s, i), field.Type())) + fields = append(fields, types.NewVar(0, nil, fieldName, fieldType)) } } collectFields(s, target) @@ -1424,7 +1439,7 @@ func (fc *funcContext) formatExprInternal(format string, a []interface{}, parens out.WriteByte('(') parens = false } - v := fc.newVariable("x") + v := fc.newLocalVariable("x") out.WriteString(v + " = " + fc.translateExpr(e.(ast.Expr)).String() + ", ") vars[i] = v } @@ -1447,7 +1462,7 @@ func (fc *funcContext) formatExprInternal(format string, a []interface{}, parens } out.WriteString(a[n].(string)) case 'd': - out.WriteString(strconv.Itoa(a[n].(int))) + fmt.Fprintf(out, "%d", a[n]) case 't': out.WriteString(a[n].(token.Token).String()) case 'e': @@ -1464,7 +1479,7 @@ func (fc *funcContext) formatExprInternal(format string, a []interface{}, parens out.WriteString(strconv.FormatInt(d, 10)) return } - if is64Bit(fc.pkgCtx.TypeOf(e).Underlying().(*types.Basic)) { + if is64Bit(fc.typeOf(e).Underlying().(*types.Basic)) { out.WriteString("$flatten64(") writeExpr("") out.WriteString(")") @@ -1475,7 +1490,7 @@ func (fc *funcContext) formatExprInternal(format string, a []interface{}, parens e := a[n].(ast.Expr) if val := fc.pkgCtx.Types[e].Value; val != nil { d, _ := constant.Uint64Val(constant.ToInt(val)) - if fc.pkgCtx.TypeOf(e).Underlying().(*types.Basic).Kind() == types.Int64 { + if fc.typeOf(e).Underlying().(*types.Basic).Kind() == types.Int64 { out.WriteString(strconv.FormatInt(int64(d)>>32, 10)) return } diff --git a/compiler/functions.go b/compiler/functions.go new file mode 100644 index 000000000..361c92f0f --- /dev/null +++ b/compiler/functions.go @@ -0,0 +1,355 @@ +package compiler + +// functions.go contains logic responsible for translating top-level functions +// and function literals. + +import ( + "bytes" + "errors" + "fmt" + "go/ast" + "go/types" + "sort" + "strings" + + "github.com/gopherjs/gopherjs/compiler/astutil" + "github.com/gopherjs/gopherjs/compiler/internal/analysis" + "github.com/gopherjs/gopherjs/compiler/internal/typeparams" + "github.com/gopherjs/gopherjs/compiler/typesutil" +) + +// nestedFunctionContext creates a new nested context for a function corresponding +// to the provided info and instance. +func (fc *funcContext) nestedFunctionContext(info *analysis.FuncInfo, inst typeparams.Instance) *funcContext { + if info == nil { + panic(errors.New("missing *analysis.FuncInfo")) + } + if inst.Object == nil { + panic(errors.New("missing inst.Object")) + } + o := inst.Object.(*types.Func) + sig := o.Type().(*types.Signature) + + c := &funcContext{ + FuncInfo: info, + instance: inst, + pkgCtx: fc.pkgCtx, + parent: fc, + allVars: make(map[string]int, len(fc.allVars)), + localVars: []string{}, + flowDatas: map[*types.Label]*flowData{nil: {}}, + caseCounter: 1, + labelCases: make(map[*types.Label]int), + typeResolver: fc.typeResolver, + objectNames: map[types.Object]string{}, + sig: &typesutil.Signature{Sig: sig}, + } + for k, v := range fc.allVars { + c.allVars[k] = v + } + + if sig.TypeParams().Len() > 0 { + c.typeResolver = typeparams.NewResolver(c.pkgCtx.typesCtx, sig.TypeParams(), inst.TArgs, nil) + } else if sig.RecvTypeParams().Len() > 0 { + c.typeResolver = typeparams.NewResolver(c.pkgCtx.typesCtx, sig.RecvTypeParams(), inst.TArgs, nil) + } + if c.objectNames == nil { + c.objectNames = map[types.Object]string{} + } + + // Synthesize an identifier by which the function may reference itself. Since + // it appears in the stack trace, it's useful to include the receiver type in + // it. + funcRef := o.Name() + if recvType := typesutil.RecvType(sig); recvType != nil { + funcRef = recvType.Obj().Name() + midDot + funcRef + } + c.funcRef = c.newVariable(funcRef, true /*pkgLevel*/) + + return c +} + +// namedFuncContext creates a new funcContext for a named Go function +// (standalone or method). +func (fc *funcContext) namedFuncContext(inst typeparams.Instance) *funcContext { + info := fc.pkgCtx.FuncInfo(inst) + c := fc.nestedFunctionContext(info, inst) + + return c +} + +// literalFuncContext creates a new funcContext for a function literal. Since +// go/types doesn't generate *types.Func objects for function literals, we +// generate a synthetic one for it. +func (fc *funcContext) literalFuncContext(fun *ast.FuncLit) *funcContext { + info := fc.pkgCtx.FuncLitInfo(fun, fc.TypeArgs()) + sig := fc.pkgCtx.TypeOf(fun).(*types.Signature) + o := types.NewFunc(fun.Pos(), fc.pkgCtx.Pkg, fc.newLitFuncName(), sig) + inst := typeparams.Instance{Object: o} + + c := fc.nestedFunctionContext(info, inst) + return c +} + +// translateTopLevelFunction translates a top-level function declaration +// (standalone function or method) into a corresponding JS function. Must be +// called on the function context created for the function corresponding instance. +// +// Returns a string with JavaScript statements that define the function or +// method. For methods it returns declarations for both value- and +// pointer-receiver (if appropriate). +func (fc *funcContext) translateTopLevelFunction(fun *ast.FuncDecl) []byte { + if fun.Recv == nil { + return fc.translateStandaloneFunction(fun) + } + + return fc.translateMethod(fun) +} + +// translateStandaloneFunction translates a package-level function. +// +// It returns JS statements which define the corresponding function in a +// package context. Exported functions are also assigned to the `$pkg` object. +func (fc *funcContext) translateStandaloneFunction(fun *ast.FuncDecl) []byte { + o := fc.instance.Object.(*types.Func) + + if fun.Recv != nil { + panic(fmt.Errorf("expected standalone function, got method: %s", o)) + } + + lvalue := fc.instName(fc.instance) + + if fun.Body == nil { + return []byte(fmt.Sprintf("\t%s = %s;\n", lvalue, fc.unimplementedFunction(o))) + } + + body := fc.translateFunctionBody(fun.Type, nil, fun.Body) + code := bytes.NewBuffer(nil) + fmt.Fprintf(code, "\t%s = %s;\n", lvalue, body) + if fun.Name.IsExported() { + fmt.Fprintf(code, "\t$pkg.%s = %s;\n", encodeIdent(fun.Name.Name), lvalue) + } + return code.Bytes() +} + +// translateMethod translates a named type method. +// +// It returns one or more JS statements which define the method. Methods with +// non-pointer receiver are automatically defined for the pointer-receiver type. +func (fc *funcContext) translateMethod(fun *ast.FuncDecl) []byte { + o := fc.instance.Object.(*types.Func) + funName := fc.methodName(o) + + // primaryFunction generates a JS function equivalent of the current Go function + // and assigns it to the JS expression defined by lvalue. + primaryFunction := func(lvalue string) []byte { + if fun.Body == nil { + return []byte(fmt.Sprintf("\t%s = %s;\n", lvalue, fc.unimplementedFunction(o))) + } + + var recv *ast.Ident + if fun.Recv != nil && fun.Recv.List[0].Names != nil { + recv = fun.Recv.List[0].Names[0] + } + fun := fc.translateFunctionBody(fun.Type, recv, fun.Body) + return []byte(fmt.Sprintf("\t%s = %s;\n", lvalue, fun)) + } + + recvInst := fc.instance.Recv() + recvInstName := fc.instName(recvInst) + recvType := recvInst.Object.Type().(*types.Named) + + // Objects the method should be assigned to for the plain and pointer type + // of the receiver. + prototypeVar := fmt.Sprintf("%s.prototype.%s", recvInstName, funName) + ptrPrototypeVar := fmt.Sprintf("$ptrType(%s).prototype.%s", recvInstName, funName) + + // Methods with pointer-receiver are only attached to the pointer-receiver type. + if _, isPointer := fc.sig.Sig.Recv().Type().(*types.Pointer); isPointer { + return primaryFunction(ptrPrototypeVar) + } + + // Methods with non-pointer receivers must be defined both for the pointer + // and non-pointer types. To minimize generated code size, we generate a + // complete implementation for only one receiver (non-pointer for most types) + // and define a proxy function on the other, which converts the receiver type + // and forwards the call to the primary implementation. + proxyFunction := func(lvalue, receiver string) []byte { + fun := fmt.Sprintf("function(...$args) { return %s.%s(...$args); }", receiver, funName) + return []byte(fmt.Sprintf("\t%s = %s;\n", lvalue, fun)) + } + + // Structs are a special case: they are represented by JS objects and their + // methods are the underlying object's methods. Due to reference semantics of + // the JS variables, the actual backing object is considered to represent the + // pointer-to-struct type, and methods are attacher to it first and foremost. + if _, isStruct := recvType.Underlying().(*types.Struct); isStruct { + code := bytes.Buffer{} + code.Write(primaryFunction(ptrPrototypeVar)) + code.Write(proxyFunction(prototypeVar, "this.$val")) + return code.Bytes() + } + + // Methods defined for non-pointer receiver are attached to both pointer- and + // non-pointer-receiver types. + proxyRecvExpr := "this.$get()" + if isWrapped(recvType) { + proxyRecvExpr = fmt.Sprintf("new %s(%s)", recvInstName, proxyRecvExpr) + } + code := bytes.Buffer{} + code.Write(primaryFunction(prototypeVar)) + code.Write(proxyFunction(ptrPrototypeVar, proxyRecvExpr)) + return code.Bytes() +} + +// unimplementedFunction returns a JS function expression for a Go function +// without a body, which would throw an exception if called. +// +// In Go such functions are either used with a //go:linkname directive or with +// assembler intrinsics, only former of which is supported by GopherJS. +func (fc *funcContext) unimplementedFunction(o *types.Func) string { + return fmt.Sprintf("function() {\n\t\t$throwRuntimeError(\"native function not implemented: %s\");\n\t}", o.FullName()) +} + +// translateFunctionBody translates body of a top-level or literal function. +// +// It returns a JS function expression that represents the given Go function. +// Function receiver must have been created with nestedFunctionContext() to have +// required metadata set up. +func (fc *funcContext) translateFunctionBody(typ *ast.FuncType, recv *ast.Ident, body *ast.BlockStmt) string { + prevEV := fc.pkgCtx.escapingVars + + // Generate a list of function argument variables. Since Go allows nameless + // arguments, we have to generate synthetic names for their JS counterparts. + var args []string + for _, param := range typ.Params.List { + if len(param.Names) == 0 { + args = append(args, fc.newLocalVariable("param")) + continue + } + for _, ident := range param.Names { + if isBlank(ident) { + args = append(args, fc.newLocalVariable("param")) + continue + } + args = append(args, fc.objectName(fc.pkgCtx.Defs[ident])) + } + } + + bodyOutput := string(fc.CatchOutput(1, func() { + if fc.IsBlocking() { + fc.pkgCtx.Scopes[body] = fc.pkgCtx.Scopes[typ] + fc.handleEscapingVars(body) + } + + if fc.sig != nil && fc.sig.HasNamedResults() { + fc.resultNames = make([]ast.Expr, fc.sig.Sig.Results().Len()) + for i := 0; i < fc.sig.Sig.Results().Len(); i++ { + result := fc.sig.Sig.Results().At(i) + typ := fc.typeResolver.Substitute(result.Type()) + fc.Printf("%s = %s;", fc.objectName(result), fc.translateExpr(fc.zeroValue(typ)).String()) + id := ast.NewIdent("") + fc.pkgCtx.Uses[id] = result + fc.resultNames[i] = fc.setType(id, typ) + } + } + + if recv != nil && !isBlank(recv) { + this := "this" + if isWrapped(fc.typeOf(recv)) { + this = "this.$val" // Unwrap receiver value. + } + fc.Printf("%s = %s;", fc.translateExpr(recv), this) + } + + fc.translateStmtList(body.List) + if len(fc.Flattened) != 0 && !astutil.EndsWithReturn(body.List) { + fc.translateStmt(&ast.ReturnStmt{}, nil) + } + })) + + sort.Strings(fc.localVars) + + var prefix, suffix string + + if len(fc.Flattened) != 0 { + // $s contains an index of the switch case a blocking function reached + // before getting blocked. When execution resumes, it will allow to continue + // from where we left off. + fc.localVars = append(fc.localVars, "$s") + prefix = prefix + " $s = $s || 0;" + } + + if fc.HasDefer { + fc.localVars = append(fc.localVars, "$deferred") + suffix = " }" + suffix + if fc.IsBlocking() { + suffix = " }" + suffix + } + } + + localVarDefs := "" // Function-local var declaration at the top. + + if fc.IsBlocking() { + localVars := append([]string{}, fc.localVars...) + // There are several special variables involved in handling blocking functions: + // $r is sometimes used as a temporary variable to store blocking call result. + // $c indicates that a function is being resumed after a blocking call when set to true. + // $f is an object used to save and restore function context for blocking calls. + localVars = append(localVars, "$r") + // funcRef identifies the function object itself, so it doesn't need to be saved + // or restored. + localVars = removeMatching(localVars, fc.funcRef) + // If a blocking function is being resumed, initialize local variables from the saved context. + localVarDefs = fmt.Sprintf("var {%s, $c} = $restore(this, {%s});\n", strings.Join(localVars, ", "), strings.Join(args, ", ")) + // If the function gets blocked, save local variables for future. + saveContext := fmt.Sprintf("var $f = {$blk: "+fc.funcRef+", $c: true, $r, %s};", strings.Join(fc.localVars, ", ")) + + suffix = " " + saveContext + "return $f;" + suffix + } else if len(fc.localVars) > 0 { + // Non-blocking functions simply declare local variables with no need for restore support. + localVarDefs = fmt.Sprintf("var %s;\n", strings.Join(fc.localVars, ", ")) + } + + if fc.HasDefer { + prefix = prefix + " var $err = null; try {" + deferSuffix := " } catch(err) { $err = err;" + if fc.IsBlocking() { + deferSuffix += " $s = -1;" + } + if fc.resultNames == nil && fc.sig.HasResults() { + deferSuffix += fmt.Sprintf(" return%s;", fc.translateResults(nil)) + } + deferSuffix += " } finally { $callDeferred($deferred, $err);" + if fc.resultNames != nil { + deferSuffix += fmt.Sprintf(" if (!$curGoroutine.asleep) { return %s; }", fc.translateResults(fc.resultNames)) + } + if fc.IsBlocking() { + deferSuffix += " if($curGoroutine.asleep) {" + } + suffix = deferSuffix + suffix + } + + if len(fc.Flattened) != 0 { + prefix = prefix + " s: while (true) { switch ($s) { case 0:" + suffix = " } return; }" + suffix + } + + if fc.HasDefer { + prefix = prefix + " $deferred = []; $curGoroutine.deferStack.push($deferred);" + } + + if prefix != "" { + bodyOutput = fc.Indentation(1) + "/* */" + prefix + "\n" + bodyOutput + } + if suffix != "" { + bodyOutput = bodyOutput + fc.Indentation(1) + "/* */" + suffix + "\n" + } + if localVarDefs != "" { + bodyOutput = fc.Indentation(1) + localVarDefs + bodyOutput + } + + fc.pkgCtx.escapingVars = prevEV + + return fmt.Sprintf("function %s(%s) {\n%s%s}", fc.funcRef, strings.Join(args, ", "), bodyOutput, fc.Indentation(0)) +} diff --git a/compiler/analysis/bool.go b/compiler/internal/analysis/bool.go similarity index 100% rename from compiler/analysis/bool.go rename to compiler/internal/analysis/bool.go diff --git a/compiler/analysis/break.go b/compiler/internal/analysis/break.go similarity index 100% rename from compiler/analysis/break.go rename to compiler/internal/analysis/break.go diff --git a/compiler/internal/analysis/defer.go b/compiler/internal/analysis/defer.go new file mode 100644 index 000000000..5d4f151a3 --- /dev/null +++ b/compiler/internal/analysis/defer.go @@ -0,0 +1,101 @@ +package analysis + +import ( + "go/ast" + "go/types" + + "github.com/gopherjs/gopherjs/compiler/internal/typeparams" + "github.com/gopherjs/gopherjs/compiler/typesutil" +) + +// deferStmt represents a defer statement that is blocking or not. +// +// A blocking defer statement will cause a return statement to be blocking +// since the defer is called and potentially blocked while leaving the method. +// We try to determine which defers affect which returns so that we only +// mark returns as blocking if they are affected by a blocking defer. +// In general we know that a defer will affect all returns that have been +// declared after the defer statement. +// +// Since analysis doesn't create [CFG] basic blocks for full control +// flow analysis we can't easily determine several cases: +// +// - Terminating if-statements(i.e. does the body of the if-statement always +// return from the method) are difficult to determine. Any defer that is +// added whilst inside a terminating if-statement body can only affect the +// returns inside that if-statement body. +// Otherwise, the defer may affect returns after the if-statement block has +// rejoined the flow that it branched from. Since terminating if-statements +// are difficult to determine without [CFG] blocks, we treat all +// if-statements as if they are not terminating. +// That means there may be some false positives, since returns declared +// after a terminating branch will be marked as affected by a defer +// declared in that branch, when in reality they are not. +// +// - Same as above but for else blocks, switch cases, and any branching. +// +// - Loops (i.e. for-statements and for-range-statements) can cause return +// statements declared earlier in the loop to be affected by defers +// declared after it in the loop. We can't determine which branches in a +// loop may return to the start of the loop so we assume anywhere inside +// of a loop can return to the start of the loop. +// To handle this, all defers defined anywhere within a loop are assumed +// to affect any return also defined in that loop. +// We only need to track the top-level loop since nested loops will be +// superseded by the top-level loop. +// +// - Labels and goto's are similar to loops in [CFG] blocks but without those +// blocks it's harder to determine which defers will affect which returns. +// To be safe, for any function with any blocking defers, returns, and +// goto's, all the returns are defaulted to blocking. +// +// [CFG]: https://en.wikipedia.org/wiki/Control-flow_graph +type deferStmt struct { + obj types.Object + lit *ast.FuncLit + typeArgs typesutil.TypeList +} + +// newBlockingDefer creates a new defer statement that is blocking. +// +// If the defer is calling a js.Object method then the defer is non-blocking. +// If the defers calling an interface method or function pointer in a var +// then the defer is blocking. +func newBlockingDefer() *deferStmt { + return &deferStmt{} +} + +// newInstDefer creates a new defer statement for an instances of a method. +// The instance is used to look up the blocking information later. +func newInstDefer(inst typeparams.Instance) *deferStmt { + return &deferStmt{obj: inst.Object, typeArgs: inst.TArgs} +} + +// newLitDefer creates a new defer statement for a function literal. +// The literal is used to look up the blocking information later. +func newLitDefer(lit *ast.FuncLit, typeArgs typesutil.TypeList) *deferStmt { + return &deferStmt{lit: lit, typeArgs: typeArgs} +} + +// IsBlocking determines if the defer statement is blocking or not. +func (d *deferStmt) IsBlocking(info *Info) bool { + // If the object or the literal is set then we can look up the blocking, + // otherwise assume blocking because otherwise the defer wouldn't + // have been recorded. + if d.obj != nil { + return info.IsBlocking(typeparams.Instance{Object: d.obj, TArgs: d.typeArgs}) + } + if d.lit != nil { + return info.FuncLitInfo(d.lit, d.typeArgs).IsBlocking() + } + return true +} + +func isAnyDeferBlocking(deferStmts []*deferStmt, info *Info) bool { + for _, def := range deferStmts { + if def.IsBlocking(info) { + return true + } + } + return false +} diff --git a/compiler/analysis/escape.go b/compiler/internal/analysis/escape.go similarity index 100% rename from compiler/analysis/escape.go rename to compiler/internal/analysis/escape.go diff --git a/compiler/internal/analysis/info.go b/compiler/internal/analysis/info.go new file mode 100644 index 000000000..e400c870c --- /dev/null +++ b/compiler/internal/analysis/info.go @@ -0,0 +1,714 @@ +package analysis + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + "strings" + + "github.com/gopherjs/gopherjs/compiler/astutil" + "github.com/gopherjs/gopherjs/compiler/internal/typeparams" + "github.com/gopherjs/gopherjs/compiler/typesutil" +) + +type continueStmt struct { + forStmt *ast.ForStmt + analyzeStack astPath +} + +func newContinueStmt(forStmt *ast.ForStmt, stack astPath) continueStmt { + cs := continueStmt{ + forStmt: forStmt, + analyzeStack: stack.copy(), + } + return cs +} + +// astPath is a list of AST nodes where each previous node is a parent of the +// next node. +type astPath []ast.Node + +func (src astPath) copy() astPath { + dst := make(astPath, len(src)) + copy(dst, src) + return dst +} + +func (ap astPath) String() string { + s := &strings.Builder{} + s.WriteString("[") + for i, n := range ap { + if i > 0 { + s.WriteString(", ") + } + fmt.Fprintf(s, "%T(%p)", n, n) + } + s.WriteString("]") + return s.String() +} + +type Info struct { + *types.Info + Pkg *types.Package + typeCtx *types.Context + InstanceSets *typeparams.PackageInstanceSets + HasPointer map[*types.Var]bool + funcInstInfos *typeparams.InstanceMap[*FuncInfo] + funcLitInfos map[*ast.FuncLit][]*FuncInfo + InitFuncInfo *FuncInfo // Context for package variable initialization. + + infoImporter InfoImporter // To get `Info` for other packages. + allInfos []*FuncInfo +} + +// InfoImporter is used to get the `Info` for another package. +// The path is the resolved import path of the package to get the `Info` for. +type InfoImporter func(path string) (*Info, error) + +func (info *Info) newFuncInfo(n ast.Node, obj types.Object, typeArgs typesutil.TypeList, resolver *typeparams.Resolver) *FuncInfo { + funcInfo := &FuncInfo{ + pkgInfo: info, + Flattened: make(map[ast.Node]bool), + Blocking: make(map[ast.Node]bool), + GotoLabel: make(map[*types.Label]bool), + loopReturnIndex: -1, + instCallees: new(typeparams.InstanceMap[[]astPath]), + literalFuncCallees: make(map[*ast.FuncLit]astPath), + typeArgs: typeArgs, + resolver: resolver, + } + + // Register the function in the appropriate map. + switch n := n.(type) { + case *ast.FuncDecl: + if n.Body == nil { + // Function body comes from elsewhere (for example, from a go:linkname + // directive), conservatively assume that it may be blocking. + // TODO(nevkontakte): It is possible to improve accuracy of this detection. + // Since GopherJS supports only "import-style" go:linkname, at this stage + // the compiler already determined whether the implementation function is + // blocking, and we could check that. + funcInfo.Blocking[n] = true + } + + if obj == nil { + obj = info.Defs[n.Name] + } + inst := typeparams.Instance{Object: obj, TArgs: typeArgs} + info.funcInstInfos.Set(inst, funcInfo) + + case *ast.FuncLit: + info.funcLitInfos[n] = append(info.funcLitInfos[n], funcInfo) + } + + // And add it to the list of all functions. + info.allInfos = append(info.allInfos, funcInfo) + + return funcInfo +} + +func (info *Info) newFuncInfoInstances(fd *ast.FuncDecl) []*FuncInfo { + obj := info.Defs[fd.Name] + instances := info.InstanceSets.Pkg(info.Pkg).ForObj(obj) + if len(instances) == 0 { + if typeparams.HasTypeParams(obj.Type()) { + // This is a generic function, but no instances were found, + // this is an unused function, so skip over it. + return []*FuncInfo{} + } + + // No instances found and this is a non-generic function. + return []*FuncInfo{info.newFuncInfo(fd, nil, nil, nil)} + } + + funcInfos := make([]*FuncInfo, 0, len(instances)) + for _, inst := range instances { + var resolver *typeparams.Resolver + if sig, ok := obj.Type().(*types.Signature); ok { + tp := typeparams.SignatureTypeParams(sig) + resolver = typeparams.NewResolver(info.typeCtx, tp, inst.TArgs, nil) + } + fi := info.newFuncInfo(fd, inst.Object, inst.TArgs, resolver) + funcInfos = append(funcInfos, fi) + } + return funcInfos +} + +// IsBlocking returns true if the function may contain blocking calls or operations. +// If inst is from a different package, this will use the getImportInfo function +// to lookup the information from the other package. +func (info *Info) IsBlocking(inst typeparams.Instance) bool { + if inst.Object.Pkg() != info.Pkg { + path := inst.Object.Pkg().Path() + otherInfo, err := info.infoImporter(path) + if err != nil { + panic(fmt.Errorf(`failed to get info for package %q: %v`, path, err)) + } + return otherInfo.IsBlocking(inst) + } + if funInfo := info.FuncInfo(inst); funInfo != nil { + return funInfo.IsBlocking() + } + panic(fmt.Errorf(`info did not have function declaration instance for %q`, inst.TypeString())) +} + +// FuncInfo returns information about the given function declaration instance, or nil if not found. +func (info *Info) FuncInfo(inst typeparams.Instance) *FuncInfo { + return info.funcInstInfos.Get(inst) +} + +// FuncLitInfo returns information about the given function literal, or nil if not found. +// The given type arguments are used to identify the correct instance of the +// function literal in the case the literal was defined inside a generic function. +func (info *Info) FuncLitInfo(fun *ast.FuncLit, typeArgs typesutil.TypeList) *FuncInfo { + lits := info.funcLitInfos[fun] + for _, fi := range lits { + if fi.typeArgs.Equal(typeArgs) { + return fi + } + } + return nil +} + +// VarsWithInitializers returns a set of package-level variables that have +// explicit initializers. +func (info *Info) VarsWithInitializers() map[*types.Var]bool { + result := map[*types.Var]bool{} + for _, init := range info.InitOrder { + for _, o := range init.Lhs { + result[o] = true + } + } + return result +} + +// AnalyzePkg analyzes the given package for blocking calls, defers, etc. +// +// Note that at the end of this call the analysis information +// has NOT been propagated across packages yet. Once all the packages +// have been analyzed, call PropagateAnalysis to propagate the information. +func AnalyzePkg(files []*ast.File, fileSet *token.FileSet, typesInfo *types.Info, typeCtx *types.Context, typesPkg *types.Package, instanceSets *typeparams.PackageInstanceSets, infoImporter InfoImporter) *Info { + info := &Info{ + Info: typesInfo, + Pkg: typesPkg, + typeCtx: typeCtx, + InstanceSets: instanceSets, + HasPointer: make(map[*types.Var]bool), + infoImporter: infoImporter, + funcInstInfos: new(typeparams.InstanceMap[*FuncInfo]), + funcLitInfos: make(map[*ast.FuncLit][]*FuncInfo), + } + info.InitFuncInfo = info.newFuncInfo(nil, nil, nil, nil) + + // Traverse the full AST of the package and collect information about existing + // functions. + for _, file := range files { + ast.Walk(info.InitFuncInfo, file) + } + + return info +} + +// PropagateAnalysis will propagate analysis information across package +// boundaries to finish the analysis of a whole project. +func PropagateAnalysis(allInfo []*Info) { + done := false + for !done { + done = true + for _, info := range allInfo { + if !info.propagateFunctionBlocking() { + done = false + } + } + } + + for _, info := range allInfo { + info.propagateControlStatementBlocking() + } +} + +// propagateFunctionBlocking propagates information about blocking calls +// to the caller functions. Returns true if done, false if more iterations +// are needed. +// +// For each function we check all other functions it may call and if any of +// them are blocking, we mark the caller blocking as well. The process is +// repeated until no new blocking functions is detected. +func (info *Info) propagateFunctionBlocking() bool { + done := true + for _, caller := range info.allInfos { + // Check calls to named functions and function-typed variables. + caller.instCallees.Iterate(func(callee typeparams.Instance, callSites []astPath) { + if info.IsBlocking(callee) { + for _, callSite := range callSites { + caller.markBlocking(callSite) + } + caller.instCallees.Delete(callee) + done = false + } + }) + + // Check direct calls to function literals. + for callee, callSite := range caller.literalFuncCallees { + if info.FuncLitInfo(callee, caller.typeArgs).IsBlocking() { + caller.markBlocking(callSite) + delete(caller.literalFuncCallees, callee) + done = false + } + } + } + return done +} + +// propagateControlStatementBlocking is called after all function blocking +// information was propagated, mark flow control statements as blocking +// whenever they may lead to a blocking function call. +func (info *Info) propagateControlStatementBlocking() { + for _, funcInfo := range info.allInfos { + funcInfo.propagateReturnBlocking() + funcInfo.propagateContinueBlocking() + } +} + +type FuncInfo struct { + // HasDefer indicates if any defer statement exists in the function. + HasDefer bool + // Nodes are "flattened" into a switch-case statement when we need to be able + // to jump into an arbitrary position in the code with a GOTO statement, or + // resume a goroutine after a blocking call unblocks. + Flattened map[ast.Node]bool + // Blocking indicates that either the AST node itself or its descendant may + // block goroutine execution (for example, a channel operation). + Blocking map[ast.Node]bool + // GotoLabel indicates a label referenced by a goto statement, rather than a + // named loop. + GotoLabel map[*types.Label]bool + // List of continue statements in the function. + continueStmts []continueStmt + // List of return statements in the function. + returnStmts []returnStmt + // List of deferred function calls which could be blocking. + // This is built up as the function is analyzed so that we can mark all + // return statements with the defers that each return would need to call. + deferStmts []*deferStmt + // The index of the return statement that was analyzed prior to a top-level + // loop starting. This is used to determine which return statements + // were added within the loop so that they can be updated to reflect all + // the defers that were added anywhere inside the loop. This is because + // returns defined before any defers in a loop may still be affected by + // those defers because of the loop. See comment on [deferStmt]. + loopReturnIndex int + // List of other named functions in the current package or another package + // that this function calls. + // If any of them are blocking, this function will become blocking too. + instCallees *typeparams.InstanceMap[[]astPath] + // List of function literals directly called from this function (for example: + // `func() { /* do stuff */ }()`). This is distinct from function literals + // assigned to named variables (for example: `doStuff := func() {}; + // doStuff()`), which are handled by localInstCallees. If any of them are + // identified as blocking, this function will become blocking too. + literalFuncCallees map[*ast.FuncLit]astPath + // typeArgs are the type arguments for the function instance. + typeArgs typesutil.TypeList + // resolver is used by this function instance to resolve any type arguments + // for internal function calls. + // This may be nil if not an instance of a generic function. + resolver *typeparams.Resolver + + pkgInfo *Info // Function's parent package. + visitorStack astPath +} + +// IsBlocking indicates if this function may block goroutine execution. +// +// For example, a channel operation in a function or a call to another +// possibly blocking function may block the function. +func (fi *FuncInfo) IsBlocking() bool { + return fi == nil || len(fi.Blocking) != 0 +} + +// TypeArgs gets the type arguments of this inside of a function instance +// or empty if not in a function instance. +func (fi *FuncInfo) TypeArgs() typesutil.TypeList { + return fi.typeArgs +} + +// propagateReturnBlocking updates the blocking on the return statements. +// See comment on [deferStmt]. +// +// This should only be called once when finishing analysis and only after +// all functions have been analyzed and all blocking information has been +// propagated across functions. +func (fi *FuncInfo) propagateReturnBlocking() { + if len(fi.GotoLabel) > 0 { + // If there are any goto statements in the function then + // all the return statements are marked the same. + // If any defer is blocking, then all return statements are blocking. + if isAnyDeferBlocking(fi.deferStmts, fi.pkgInfo) { + for _, returnStmt := range fi.returnStmts { + fi.markBlocking(returnStmt.analyzeStack) + } + } + return + } + + for _, returnStmt := range fi.returnStmts { + // Check all the defer statements that affect the return statement, + // if any are blocking then the return statement is blocking. + if returnStmt.IsBlocking(fi) { + fi.markBlocking(returnStmt.analyzeStack) + } + } +} + +// propagateContinueBlocking updates the blocking on the continue statements. +// +// This should only be called once when finishing analysis and only after +// all functions have been analyzed and all blocking information has been +// propagated across functions. +func (fi *FuncInfo) propagateContinueBlocking() { + for _, continueStmt := range fi.continueStmts { + if fi.Blocking[continueStmt.forStmt.Post] { + // If a for-loop post-expression is blocking, the continue statement + // that leads to it must be treated as blocking. + fi.markBlocking(continueStmt.analyzeStack) + } + } +} + +func (fi *FuncInfo) Visit(node ast.Node) ast.Visitor { + if node == nil { + if len(fi.visitorStack) != 0 { + fi.visitorStack = fi.visitorStack[:len(fi.visitorStack)-1] + } + return nil + } + fi.visitorStack = append(fi.visitorStack, node) + + switch n := node.(type) { + case *ast.FuncDecl: + // Analyze all the instances of the function declarations + // in their own context with their own type arguments. + fis := fi.pkgInfo.newFuncInfoInstances(n) + if n.Body != nil { + for _, fi := range fis { + ast.Walk(fi, n.Body) + } + } + return nil + case *ast.FuncLit: + // Analyze the function literal in its own context. + return fi.pkgInfo.newFuncInfo(n, nil, fi.typeArgs, fi.resolver) + case *ast.BranchStmt: + switch n.Tok { + case token.GOTO: + // Emulating GOTO in JavaScript requires the code to be flattened into a + // switch-statement. + fi.markFlattened(fi.visitorStack) + fi.GotoLabel[fi.pkgInfo.Uses[n.Label].(*types.Label)] = true + case token.CONTINUE: + loopStmt := astutil.FindLoopStmt(fi.visitorStack, n, fi.pkgInfo.Info) + if forStmt, ok := (loopStmt).(*ast.ForStmt); ok { + // In `for x; y; z { ... }` loops `z` may be potentially blocking + // and therefore continue expression that triggers it would have to + // be treated as blocking. + fi.continueStmts = append(fi.continueStmts, newContinueStmt(forStmt, fi.visitorStack)) + } + } + return fi + case *ast.CallExpr: + return fi.visitCallExpr(n, false) + case *ast.SendStmt: + // Sending into a channel is blocking. + fi.markBlocking(fi.visitorStack) + return fi + case *ast.UnaryExpr: + switch n.Op { + case token.AND: + if id, ok := astutil.RemoveParens(n.X).(*ast.Ident); ok { + fi.pkgInfo.HasPointer[fi.pkgInfo.Uses[id].(*types.Var)] = true + } + case token.ARROW: + // Receiving from a channel is blocking. + fi.markBlocking(fi.visitorStack) + } + return fi + case *ast.RangeStmt: + if _, ok := fi.pkgInfo.TypeOf(n.X).Underlying().(*types.Chan); ok { + // for-range loop over a channel is blocking. + fi.markBlocking(fi.visitorStack) + } + if fi.loopReturnIndex >= 0 { + // Already in a loop so just continue walking. + return fi + } + // Top-level for-loop, analyze it separately to be able to update + // returns with the defers that were added inside the loop. + // See comment on deferStmt. + fi.loopReturnIndex = len(fi.returnStmts) + // Analyze the for-loop's children. + ast.Walk(skipParentNode{then: fi}, n) + // After the for-loop is analyzed, update all return statements that + // were inside the loop with the resulting list of defer statements. + for i := fi.loopReturnIndex; i < len(fi.returnStmts); i++ { + fi.returnStmts[i].deferStmts = fi.deferStmts + } + fi.loopReturnIndex = -1 + return nil + case *ast.ForStmt: + if fi.loopReturnIndex >= 0 { + // Already in a loop so just continue walking. + return fi + } + // Top-level for-loop, analyze it separately to be able to update + // returns with the defers that were added inside the loop. + // See comment on deferStmt. + fi.loopReturnIndex = len(fi.returnStmts) + // Analyze the for-loop's children. + ast.Walk(skipParentNode{then: fi}, n) + // After the for-loop is analyzed, update all return statements that + // were inside the loop with the resulting list of defer statements. + for i := fi.loopReturnIndex; i < len(fi.returnStmts); i++ { + fi.returnStmts[i].deferStmts = fi.deferStmts + } + fi.loopReturnIndex = -1 + return nil + case *ast.SelectStmt: + for _, s := range n.Body.List { + if s.(*ast.CommClause).Comm == nil { // default clause + return fi + } + } + // Select statements without a default case are blocking. + fi.markBlocking(fi.visitorStack) + return fi + case *ast.CommClause: + // FIXME(nevkontakte): Does this need to be manually spelled out? Presumably + // ast.Walk would visit all those nodes anyway, and we are not creating any + // new contexts here. + // https://github.com/gopherjs/gopherjs/issues/230 seems to be relevant? + switch comm := n.Comm.(type) { + case *ast.SendStmt: + ast.Walk(fi, comm.Chan) + ast.Walk(fi, comm.Value) + case *ast.ExprStmt: + ast.Walk(fi, comm.X.(*ast.UnaryExpr).X) + case *ast.AssignStmt: + ast.Walk(fi, comm.Rhs[0].(*ast.UnaryExpr).X) + } + for _, s := range n.Body { + ast.Walk(fi, s) + } + return nil // The subtree was manually checked, no need to visit it again. + case *ast.GoStmt: + // Unlike a regular call, the function in a go statement doesn't block the + // caller goroutine, but the expression that determines the function and its + // arguments still need to be checked. + ast.Walk(fi, n.Call.Fun) + for _, arg := range n.Call.Args { + ast.Walk(fi, arg) + } + return nil // The subtree was manually checked, no need to visit it again. + case *ast.DeferStmt: + fi.HasDefer = true + return fi.visitCallExpr(n.Call, true) + case *ast.ReturnStmt: + // Capture all return statements in the function. They could become blocking + // if the function has a blocking deferred call. + rs := newReturnStmt(fi.visitorStack, fi.deferStmts) + fi.returnStmts = append(fi.returnStmts, rs) + return fi + default: + return fi + } + // Deliberately no return here to make sure that each of the cases above is + // self-sufficient and explicitly decides in which context the its AST subtree + // needs to be analyzed. +} + +func (fi *FuncInfo) visitCallExpr(n *ast.CallExpr, deferredCall bool) ast.Visitor { + switch f := astutil.RemoveParens(n.Fun).(type) { + case *ast.Ident: + fi.callToNamedFunc(fi.instanceForIdent(f), deferredCall) + return fi + case *ast.SelectorExpr: + if sel := fi.pkgInfo.Selections[f]; sel != nil { + if typesutil.IsJsObject(sel.Recv()) { + // js.Object methods are known to be non-blocking, + // but we still must check its arguments. + // We don't need to add a deferStmt when `deferredCall` + // is true, since that defer will always be non-blocking. + return fi + } + // selection is a method call like `foo.Bar()`, where `foo` might + // be generic and needs to be substituted with the type argument. + fi.callToNamedFunc(fi.instanceForSelection(sel), deferredCall) + return fi + } + + fi.callToNamedFunc(fi.instanceForIdent(f.Sel), deferredCall) + return fi + case *ast.FuncLit: + // Collect info about the function literal itself. + ast.Walk(fi, n.Fun) + + // Check all argument expressions. + for _, arg := range n.Args { + ast.Walk(fi, arg) + } + // Register literal function call site in case it is identified as blocking. + fi.literalFuncCallees[f] = fi.visitorStack.copy() + if deferredCall { + fi.deferStmts = append(fi.deferStmts, newLitDefer(f, fi.typeArgs)) + } + return nil // No need to walk under this CallExpr, we already did it manually. + case *ast.IndexExpr: + // Collect info about the instantiated type or function, or index expression. + if astutil.IsTypeExpr(f, fi.pkgInfo.Info) { + // This is a type conversion to an instance of a generic type, + // not a call. Type assertion itself is not blocking, but we will + // visit the input expression. + return fi + } + if astutil.IsTypeExpr(f.Index, fi.pkgInfo.Info) { + // This is a call of an instantiation of a generic function, + // e.g. `foo[int]` in `func foo[T any]() { ... }; func main() { foo[int]() }` + fi.callToNamedFunc(fi.instanceForIdent(f.X.(*ast.Ident)), deferredCall) + return fi + } + // The called function is gotten with an index or key from a map, array, or slice. + // e.g. `m := map[string]func(){}; m["key"]()`, `s := []func(); s[0]()`. + // Since we can't predict if the returned function will be blocking + // or not, we have to be conservative and assume that function might be blocking. + fi.markBlocking(fi.visitorStack) + if deferredCall { + fi.deferStmts = append(fi.deferStmts, newBlockingDefer()) + } + return fi + case *ast.IndexListExpr: + // Collect info about the instantiated type or function. + if astutil.IsTypeExpr(f, fi.pkgInfo.Info) { + // This is a type conversion to an instance of a generic type, + // not a call. Type assertion itself is not blocking, but we will + // visit the input expression. + return fi + } + // This is a call of an instantiation of a generic function, + // e.g. `foo[int, bool]` in `func foo[T1, T2 any]() { ... }; func main() { foo[int, bool]() }` + fi.callToNamedFunc(fi.instanceForIdent(f.X.(*ast.Ident)), deferredCall) + return fi + default: + if astutil.IsTypeExpr(f, fi.pkgInfo.Info) { + // This is a type conversion, not a call. Type assertion itself is not + // blocking, but we will visit the input expression. + return fi + } + // The function is returned by a non-trivial expression. We have to be + // conservative and assume that function might be blocking. + fi.markBlocking(fi.visitorStack) + if deferredCall { + fi.deferStmts = append(fi.deferStmts, newBlockingDefer()) + } + return fi + } +} + +func (fi *FuncInfo) instanceForIdent(fnId *ast.Ident) typeparams.Instance { + tArgs := fi.pkgInfo.Info.Instances[fnId].TypeArgs + return typeparams.Instance{ + Object: fi.pkgInfo.Uses[fnId], + TArgs: fi.resolver.SubstituteAll(tArgs), + } +} + +func (fi *FuncInfo) instanceForSelection(sel *types.Selection) typeparams.Instance { + if _, ok := sel.Obj().Type().(*types.Signature); ok { + // Substitute the selection to ensure that the receiver has the correct + // type arguments propagated down from the caller. + resolved := fi.resolver.SubstituteSelection(sel) + sig := resolved.Obj().Type().(*types.Signature) + + // Using the substituted receiver type, find the instance of this call. + // This does require looking up the original method in the receiver type + // that may or may not have been the receiver prior to the substitution. + if recv := sig.Recv(); recv != nil { + typ := recv.Type() + if ptrType, ok := typ.(*types.Pointer); ok { + typ = ptrType.Elem() + } + + if rt, ok := typ.(*types.Named); ok { + origMethod, _, _ := types.LookupFieldOrMethod(rt.Origin(), true, rt.Obj().Pkg(), resolved.Obj().Name()) + if origMethod == nil { + panic(fmt.Errorf(`failed to lookup field %q in type %v`, resolved.Obj().Name(), rt.Origin())) + } + return typeparams.Instance{ + Object: origMethod, + TArgs: fi.resolver.SubstituteAll(rt.TypeArgs()), + } + } + } + } + return typeparams.Instance{Object: sel.Obj()} +} + +func (fi *FuncInfo) callToNamedFunc(callee typeparams.Instance, deferredCall bool) { + switch o := callee.Object.(type) { + case *types.Func: + o = o.Origin() + if recv := o.Type().(*types.Signature).Recv(); recv != nil { + if _, ok := recv.Type().Underlying().(*types.Interface); ok { + // Conservatively assume that an interface implementation may be blocking. + fi.markBlocking(fi.visitorStack) + if deferredCall { + fi.deferStmts = append(fi.deferStmts, newBlockingDefer()) + } + return + } + } + + // We probably don't know yet whether the callee function is blocking. + // Record the calls site for the later stage. + paths := fi.instCallees.Get(callee) + paths = append(paths, fi.visitorStack.copy()) + fi.instCallees.Set(callee, paths) + if deferredCall { + fi.deferStmts = append(fi.deferStmts, newInstDefer(callee)) + } + case *types.Var: + // Conservatively assume that a function in a variable might be blocking. + fi.markBlocking(fi.visitorStack) + if deferredCall { + fi.deferStmts = append(fi.deferStmts, newBlockingDefer()) + } + default: + // No need to add defers for other call types, such as *types.Builtin, + // since those are considered non-blocking. + return + } +} + +func (fi *FuncInfo) markBlocking(stack astPath) { + for _, n := range stack { + fi.Blocking[n] = true + fi.Flattened[n] = true + } +} + +func (fi *FuncInfo) markFlattened(stack astPath) { + for _, n := range stack { + fi.Flattened[n] = true + } +} + +// skipParentNode is a visitor that skips the next node in the AST +// but will continue visiting the rest of the tree including the +// children of the skipped node. +type skipParentNode struct { + then ast.Visitor +} + +func (v skipParentNode) Visit(node ast.Node) ast.Visitor { + return v.then +} diff --git a/compiler/internal/analysis/info_test.go b/compiler/internal/analysis/info_test.go new file mode 100644 index 000000000..0df26b0b9 --- /dev/null +++ b/compiler/internal/analysis/info_test.go @@ -0,0 +1,1896 @@ +package analysis + +import ( + "fmt" + "go/ast" + "go/types" + "sort" + "testing" + + "github.com/gopherjs/gopherjs/compiler/internal/typeparams" + "github.com/gopherjs/gopherjs/internal/srctesting" +) + +func TestBlocking_Simple(t *testing.T) { + bt := newBlockingTest(t, + `package test + + func notBlocking() { + println("hi") + }`) + bt.assertNotBlocking(`notBlocking`) +} + +func TestBlocking_Recursive(t *testing.T) { + bt := newBlockingTest(t, + `package test + + func notBlocking(i int) { + if i > 0 { + println(i) + notBlocking(i - 1) + } + }`) + bt.assertNotBlocking(`notBlocking`) +} + +func TestBlocking_AlternatingRecursive(t *testing.T) { + bt := newBlockingTest(t, + `package test + + func near(i int) { + if i > 0 { + println(i) + far(i) + } + } + + func far(i int) { + near(i - 1) + }`) + bt.assertNotBlocking(`near`) + bt.assertNotBlocking(`far`) +} + +func TestBlocking_Channels(t *testing.T) { + bt := newBlockingTest(t, + `package test + + func readFromChannel(c chan bool) { + <-c + } + + func readFromChannelAssign(c chan bool) { + v := <-c + println(v) + } + + func readFromChannelAsArg(c chan bool) { + println(<-c) + } + + func sendToChannel(c chan bool) { + c <- true + } + + func rangeOnChannel(c chan bool) { + for v := range c { + println(v) + } + } + + func rangeOnSlice(c []bool) { + for v := range c { + println(v) + } + }`) + bt.assertBlocking(`readFromChannel`) + bt.assertBlocking(`sendToChannel`) + bt.assertBlocking(`rangeOnChannel`) + bt.assertBlocking(`readFromChannelAssign`) + bt.assertBlocking(`readFromChannelAsArg`) + bt.assertNotBlocking(`rangeOnSlice`) +} + +func TestBlocking_Selects(t *testing.T) { + bt := newBlockingTest(t, + `package test + + func selectReadWithoutDefault(a, b chan bool) { + select { + case <-a: + println("a") + case v := <-b: + println("b", v) + } + } + + func selectReadWithDefault(a, b chan bool) { + select { + case <-a: + println("a") + case v := <-b: + println("b", v) + default: + println("nothing") + } + } + + func selectSendWithoutDefault(a, b chan bool) { + select { + case a <- true: + println("a") + case b <- false: + println("b") + } + } + + func selectSendWithDefault(a, b chan bool) { + select { + case a <- true: + println("a") + case b <- false: + println("b") + default: + println("nothing") + } + }`) + bt.assertBlocking(`selectReadWithoutDefault`) + bt.assertBlocking(`selectSendWithoutDefault`) + bt.assertNotBlocking(`selectReadWithDefault`) + bt.assertNotBlocking(`selectSendWithDefault`) +} + +func TestBlocking_GoRoutines_WithFuncLiterals(t *testing.T) { + bt := newBlockingTest(t, + `package test + + func notBlocking(c chan bool) { + go func(c chan bool) { // line 4 + println(<-c) + }(c) + } + + func blocking(c chan bool) { + go func(v bool) { // line 10 + println(v) + }(<-c) + }`) + bt.assertNotBlocking(`notBlocking`) + bt.assertBlockingLit(4, ``) + + bt.assertBlocking(`blocking`) + bt.assertNotBlockingLit(10, ``) +} + +func TestBlocking_GoRoutines_WithNamedFuncs(t *testing.T) { + bt := newBlockingTest(t, + `package test + + func blockingRoutine(c chan bool) { + println(<-c) + } + + func nonBlockingRoutine(v bool) { + println(v) + } + + func notBlocking(c chan bool) { + go blockingRoutine(c) + } + + func blocking(c chan bool) { + go nonBlockingRoutine(<-c) + }`) + bt.assertBlocking(`blockingRoutine`) + bt.assertNotBlocking(`nonBlockingRoutine`) + + bt.assertNotBlocking(`notBlocking`) + bt.assertBlocking(`blocking`) +} + +func TestBlocking_Defers_WithoutReturns_WithFuncLiterals(t *testing.T) { + bt := newBlockingTest(t, + `package test + + func blockingBody(c chan bool) { + defer func(c chan bool) { // line 4 + println(<-c) + }(c) + } + + func blockingArg(c chan bool) { + defer func(v bool) { // line 10 + println(v) + }(<-c) + } + + func notBlocking(c chan bool) { + defer func(v bool) { // line 16 + println(v) + }(true) + }`) + bt.assertBlocking(`blockingBody`) + bt.assertBlockingLit(4, ``) + + bt.assertBlocking(`blockingArg`) + bt.assertNotBlockingLit(10, ``) + + bt.assertNotBlocking(`notBlocking`) + bt.assertNotBlockingLit(16, ``) +} + +func TestBlocking_Defers_WithoutReturns_WithNamedFuncs(t *testing.T) { + bt := newBlockingTest(t, + `package test + + func blockingPrint(c chan bool) { + println(<-c) + } + + func nonBlockingPrint(v bool) { + println(v) + } + + func blockingBody(c chan bool) { + defer blockingPrint(c) + } + + func blockingArg(c chan bool) { + defer nonBlockingPrint(<-c) + } + + func notBlocking(c chan bool) { + defer nonBlockingPrint(true) + }`) + bt.assertFuncInstCount(5) + bt.assertFuncLitCount(0) + + bt.assertBlocking(`blockingPrint`) + bt.assertNotBlocking(`nonBlockingPrint`) + + bt.assertBlocking(`blockingBody`) + bt.assertBlocking(`blockingArg`) + bt.assertNotBlocking(`notBlocking`) +} + +func TestBlocking_Defers_WithReturns_WithFuncLiterals(t *testing.T) { + bt := newBlockingTest(t, + `package test + + func blockingBody(c chan bool) int { + defer func(c chan bool) { // line 4 + println(<-c) + }(c) + return 42 + } + + func blockingArg(c chan bool) int { + defer func(v bool) { // line 11 + println(v) + }(<-c) + return 42 + } + + func notBlocking(c chan bool) int { + defer func(v bool) { // line 18 + println(v) + }(true) + return 42 + }`) + bt.assertBlocking(`blockingBody`) + bt.assertBlockingLit(4, ``) + + bt.assertBlocking(`blockingArg`) + bt.assertNotBlockingLit(11, ``) + + bt.assertNotBlocking(`notBlocking`) + bt.assertNotBlockingLit(18, ``) +} + +func TestBlocking_Defers_WithReturns_WithNamedFuncs(t *testing.T) { + bt := newBlockingTest(t, + `package test + + func blockingPrint(c chan bool) { + println(<-c) + } + + func nonBlockingPrint(v bool) { + println(v) + } + + func blockingBody(c chan bool) int { + defer blockingPrint(c) + return 42 // line 13 + } + + func blockingArg(c chan bool) int { + defer nonBlockingPrint(<-c) + return 42 // line 18 + } + + func notBlocking(c chan bool) int { + defer nonBlockingPrint(true) + return 42 // line 23 + }`) + bt.assertBlocking(`blockingPrint`) + bt.assertNotBlocking(`nonBlockingPrint`) + + bt.assertBlocking(`blockingBody`) + bt.assertBlockingReturn(13, ``) + + bt.assertBlocking(`blockingArg`) + // The defer is non-blocking so the return is not blocking + // even though the function is blocking. + bt.assertNotBlockingReturn(18, ``) + + bt.assertNotBlocking(`notBlocking`) + bt.assertNotBlockingReturn(23, ``) +} + +func TestBlocking_Defers_WithMultipleReturns(t *testing.T) { + bt := newBlockingTest(t, + `package test + + func foo(c chan int) bool { + defer func() { // line 4 + if r := recover(); r != nil { + println("Error", r) + } + }() + + if c == nil { + return false // line 11 + } + + defer func(v int) { // line 14 + println(v) + }(<-c) + + value := <-c + if value < 0 { + return false // line 20 + } + + if value > 0 { + defer func() { // line 24 + println(<-c) + }() + + return false // line 28 + } + + return true // line 31 + }`) + bt.assertBlocking(`foo`) + bt.assertNotBlockingLit(4, ``) + // Early escape from function without blocking defers is not blocking. + bt.assertNotBlockingReturn(11, ``) + bt.assertNotBlockingLit(14, ``) + // Function has had blocking by this point but no blocking defers yet. + bt.assertNotBlockingReturn(20, ``) + bt.assertBlockingLit(24, ``) + // The return is blocking because of a blocking defer. + bt.assertBlockingReturn(28, ``) + // Technically the return on line 31 is not blocking since the defer that + // is blocking can only exit through the return on line 28, but it would be + // difficult to determine which defers would only affect certain returns + // without doing full control flow analysis. + // + // TODO(grantnelson-wf): We could fix this at some point by keeping track + // of which flow control statements (e.g. if-statements) are terminating + // or not. Any defers added in a terminating control flow would not + // propagate to returns that are not in that block. + // See golang.org/x/tools/go/ssa for flow control analysis. + // + // For now we simply build up the list of defers as we go making + // the return on line 31 also blocking. + bt.assertBlockingReturn(31, ``) +} + +func TestBlocking_Defers_WithReturnsAndDefaultBlocking(t *testing.T) { + bt := newBlockingTest(t, + `package test + + type foo struct {} + func (f foo) Bar() { + println("foo") + } + + type stringer interface { + Bar() + } + + var fb = foo{}.Bar + + func deferInterfaceCall() bool { + var s stringer = foo{} + defer s.Bar() + return true // line 17 + } + + func deferVarCall() bool { + defer fb() + return true // line 22 + } + + func deferLocalVarCall() bool { + fp := foo{}.Bar + defer fp() + return true // line 28 + } + + func deferMethodExpressionCall() bool { + fp := foo.Bar + defer fp(foo{}) + return true // line 34 + } + + func deferSlicedFuncCall() bool { + s := []func() { fb, foo{}.Bar } + defer s[0]() + return true // line 40 + } + + func deferMappedFuncCall() bool { + m := map[string]func() { + "fb": fb, + "fNew": foo{}.Bar, + } + defer m["fb"]() + return true // line 49 + }`) + + bt.assertFuncInstCount(7) + bt.assertNotBlocking(`foo.Bar`) + + // None of these are actually blocking but we treat them like they are + // because the defers invoke functions via interfaces and function pointers. + bt.assertBlocking(`deferInterfaceCall`) + bt.assertBlocking(`deferVarCall`) + bt.assertBlocking(`deferLocalVarCall`) + bt.assertBlocking(`deferMethodExpressionCall`) + bt.assertBlocking(`deferSlicedFuncCall`) + bt.assertBlocking(`deferMappedFuncCall`) + + // All of these returns are blocking because they have blocking defers. + bt.assertBlockingReturn(17, ``) + bt.assertBlockingReturn(22, ``) + bt.assertBlockingReturn(28, ``) + bt.assertBlockingReturn(34, ``) + bt.assertBlockingReturn(40, ``) + bt.assertBlockingReturn(49, ``) +} + +func TestBlocking_Defers_WithReturnsAndDeferBuiltin(t *testing.T) { + bt := newBlockingTest(t, + `package test + + type strSet map[string]bool + + func deferBuiltinCall() strSet { + m := strSet{ + "foo": true, + } + defer delete(m, "foo") + return m // line 10 + }`) + + bt.assertFuncInstCount(1) + bt.assertNotBlocking(`deferBuiltinCall`) + bt.assertNotBlockingReturn(10, ``) +} + +func TestBlocking_Defers_WithReturnsInLoops(t *testing.T) { + // These are example of where a defer can affect the return that + // occurs prior to the defer in the function body. + bt := newBlockingTest(t, + `package test + + func blocking(c chan int) { + println(<-c) + } + + func deferInForLoop(c chan int) bool { + i := 1000 + for { + i-- + if i <= 0 { + return true // line 12 + } + defer blocking(c) + } + } + + func deferInForLoopReturnAfter(c chan int) bool { + for i := 1000; i > 0; i-- { + defer blocking(c) + } + return true // line 22 + } + + func deferInNamedForLoop(c chan int) bool { + i := 1000 + Start: + for { + i-- + if i <= 0 { + return true // line 31 + } + defer blocking(c) + continue Start + } + } + + func deferInNamedForLoopReturnAfter(c chan int) bool { + Start: + for i := 1000; i > 0; i-- { + defer blocking(c) + continue Start + } + return true // line 44 + } + + func deferInGotoLoop(c chan int) bool { + i := 1000 + Start: + i-- + if i <= 0 { + return true // line 52 + } + defer blocking(c) + goto Start + } + + func deferInGotoLoopReturnAfter(c chan int) bool { + i := 1000 + Start: + defer blocking(c) + i-- + if i > 0 { + goto Start + } + return true // line 66 + } + + func deferInRangeLoop(c chan int) bool { + s := []int{1, 2, 3} + for i := range s { + if i > 3 { + return true // line 73 + } + defer blocking(c) + } + return false // line 77 + }`) + + bt.assertFuncInstCount(8) + bt.assertBlocking(`blocking`) + bt.assertBlocking(`deferInForLoop`) + bt.assertBlocking(`deferInForLoopReturnAfter`) + bt.assertBlocking(`deferInNamedForLoop`) + bt.assertBlocking(`deferInNamedForLoopReturnAfter`) + bt.assertBlocking(`deferInGotoLoop`) + bt.assertBlocking(`deferInGotoLoopReturnAfter`) + bt.assertBlocking(`deferInRangeLoop`) + // When the following 2 returns are defined there are no defers, however, + // because of the loop, the blocking defers defined after the return will + // block the returns. + bt.assertBlockingReturn(12, ``) + bt.assertBlockingReturn(22, ``) + bt.assertBlockingReturn(31, ``) + bt.assertBlockingReturn(44, ``) + bt.assertBlockingReturn(52, ``) + bt.assertBlockingReturn(66, ``) + bt.assertBlockingReturn(73, ``) + bt.assertBlockingReturn(77, ``) +} + +func TestBlocking_Defers_WithReturnsInLoopsInLoops(t *testing.T) { + // These are example of where a defer can affect the return that + // occurs prior to the defer in the function body. + bt := newBlockingTest(t, + `package test + + func blocking(c chan int) { + println(<-c) + } + + func forLoopTheLoop(c chan int) bool { + if c == nil { + return false // line 9 + } + for i := 0; i < 10; i++ { + if i > 3 { + return true // line 13 + } + for j := 0; j < 10; j++ { + if j > 3 { + return true // line 17 + } + defer blocking(c) + if j > 2 { + return false // line 21 + } + } + if i > 2 { + return false // line 25 + } + } + return false // line 28 + } + + func rangeLoopTheLoop(c chan int) bool { + data := []int{1, 2, 3} + for i := range data { + for j := range data { + if i + j > 3 { + return true // line 36 + } + } + defer blocking(c) + } + return false // line 41 + } + + func noopThenLoop(c chan int) bool { + data := []int{1, 2, 3} + for i := range data { + if i > 13 { + return true // line 48 + } + defer func() { println("hi") }() + } + for i := range data { + if i > 3 { + return true // line 54 + } + defer blocking(c) + } + return false // line 58 + }`) + + bt.assertFuncInstCount(4) + bt.assertBlocking(`blocking`) + bt.assertBlocking(`forLoopTheLoop`) + bt.assertNotBlockingReturn(9, ``) + bt.assertBlockingReturn(13, ``) + bt.assertBlockingReturn(17, ``) + bt.assertBlockingReturn(21, ``) + bt.assertBlockingReturn(25, ``) + bt.assertBlockingReturn(28, ``) + bt.assertBlocking(`rangeLoopTheLoop`) + bt.assertBlockingReturn(36, ``) + bt.assertBlockingReturn(41, ``) + bt.assertBlocking(`noopThenLoop`) + bt.assertNotBlockingReturn(48, ``) + bt.assertBlockingReturn(54, ``) + bt.assertBlockingReturn(58, ``) +} + +func TestBlocking_Returns_WithoutDefers(t *testing.T) { + bt := newBlockingTest(t, + `package test + + func blocking(c chan bool) bool { + return <-c // line 4 + } + + func blockingBeforeReturn(c chan bool) bool { + v := <-c + return v // line 9 + } + + func indirectlyBlocking(c chan bool) bool { + return blocking(c) // line 13 + } + + func indirectlyBlockingBeforeReturn(c chan bool) bool { + v := blocking(c) + return v // line 18 + } + + func notBlocking(c chan bool) bool { + return true // line 22 + }`) + bt.assertBlocking(`blocking`) + bt.assertBlockingReturn(4, ``) + + bt.assertBlocking(`blockingBeforeReturn`) + bt.assertNotBlockingReturn(9, ``) + + bt.assertBlocking(`indirectlyBlocking`) + bt.assertBlockingReturn(13, ``) + + bt.assertBlocking(`indirectlyBlockingBeforeReturn`) + bt.assertNotBlockingReturn(18, ``) + + bt.assertNotBlocking(`notBlocking`) + bt.assertNotBlockingReturn(22, ``) +} + +func TestBlocking_Defers_WithReturnsInInstances(t *testing.T) { + // This is an example of a deferred function literal inside of + // an instance of a generic function affecting the return + // differently based on the type arguments of the instance. + bt := newBlockingTest(t, + `package test + + type BazBlocker struct { + c chan bool + } + func (bb BazBlocker) Baz() { + println(<-bb.c) + } + + type BazNotBlocker struct {} + func (bnb BazNotBlocker) Baz() { + println("hi") + } + + type Foo interface { Baz() } + func FooBaz[T Foo]() bool { + defer func() { // line 17 + var foo T + foo.Baz() + }() + return true // line 21 + } + + func main() { + FooBaz[BazBlocker]() + FooBaz[BazNotBlocker]() + }`) + + bt.assertFuncInstCount(5) + bt.assertBlocking(`BazBlocker.Baz`) + bt.assertNotBlocking(`BazNotBlocker.Baz`) + bt.assertBlockingInst(`pkg/test.FooBaz`) + bt.assertNotBlockingInst(`pkg/test.FooBaz`) + bt.assertBlocking(`main`) + + bt.assertFuncLitCount(2) + bt.assertBlockingLit(17, `pkg/test.BazBlocker`) + bt.assertNotBlockingLit(17, `pkg/test.BazNotBlocker`) + + bt.assertBlockingReturn(21, `pkg/test.BazBlocker`) + bt.assertNotBlockingReturn(21, `pkg/test.BazNotBlocker`) +} + +func TestBlocking_Defers_WithReturnsAndOtherPackages(t *testing.T) { + otherSrc := `package other + + func Blocking() { + c := make(chan int) + println(<-c) + } + + func NotBlocking() { + println("Hello") + }` + + testSrc := `package test + + import "pkg/other" + + func deferOtherBlocking() bool { + defer other.Blocking() + return true // line 7 + } + + func deferOtherNotBlocking() bool { + defer other.NotBlocking() + return true // line 12 + }` + + bt := newBlockingTestWithOtherPackage(t, testSrc, otherSrc) + + bt.assertBlocking(`deferOtherBlocking`) + bt.assertBlockingReturn(7, ``) + + bt.assertNotBlocking(`deferOtherNotBlocking`) + bt.assertNotBlockingReturn(12, ``) +} + +func TestBlocking_FunctionLiteral(t *testing.T) { + // See: https://github.com/gopherjs/gopherjs/issues/955. + bt := newBlockingTest(t, + `package test + + func blocking() { + c := make(chan bool) + <-c + } + + func indirectlyBlocking() { + func() { blocking() }() // line 9 + } + + func directlyBlocking() { + func() { // line 13 + c := make(chan bool) + <-c + }() + } + + func notBlocking() { + func() { println() } () // line 20 + }`) + bt.assertBlocking(`blocking`) + + bt.assertBlocking(`indirectlyBlocking`) + bt.assertBlockingLit(9, ``) + + bt.assertBlocking(`directlyBlocking`) + bt.assertBlockingLit(13, ``) + + bt.assertNotBlocking(`notBlocking`) + bt.assertNotBlockingLit(20, ``) +} + +func TestBlocking_LinkedFunction(t *testing.T) { + bt := newBlockingTest(t, + `package test + + // linked to some other function + func blocking() + + func indirectlyBlocking() { + blocking() + }`) + bt.assertBlocking(`blocking`) + bt.assertBlocking(`indirectlyBlocking`) +} + +func TestBlocking_Instances_WithSingleTypeArg(t *testing.T) { + bt := newBlockingTest(t, + `package test + + func blocking[T any]() { + c := make(chan T) + <-c + } + + func notBlocking[T any]() { + var v T + println(v) + } + + func bInt() { + blocking[int]() + } + + func nbUint() { + notBlocking[uint]() + }`) + bt.assertFuncInstCount(4) + // blocking and notBlocking as generics do not have FuncInfo, + // only non-generic and instances have FuncInfo. + + bt.assertBlockingInst(`pkg/test.blocking`) + bt.assertBlocking(`bInt`) + bt.assertNotBlockingInst(`pkg/test.notBlocking`) + bt.assertNotBlocking(`nbUint`) +} + +func TestBlocking_Instances_WithMultipleTypeArgs(t *testing.T) { + bt := newBlockingTest(t, + `package test + + func blocking[K comparable, V any, M ~map[K]V]() { + c := make(chan M) + <-c + } + + func notBlocking[K comparable, V any, M ~map[K]V]() { + var m M + println(m) + } + + func bInt() { + blocking[string, int, map[string]int]() + } + + func nbUint() { + notBlocking[string, uint, map[string]uint]() + }`) + bt.assertFuncInstCount(4) + // blocking and notBlocking as generics do not have FuncInfo, + // only non-generic and instances have FuncInfo. + + bt.assertBlockingInst(`pkg/test.blocking`) + bt.assertBlocking(`bInt`) + bt.assertNotBlockingInst(`pkg/test.notBlocking`) + bt.assertNotBlocking(`nbUint`) +} + +func TestBlocking_Indexed_FunctionSlice(t *testing.T) { + // This calls notBlocking but since the function pointers + // are in the slice they will both be considered as blocking. + bt := newBlockingTest(t, + `package test + + func blocking() { + c := make(chan int) + <-c + } + + func notBlocking() { + println() + } + + var funcs = []func() { blocking, notBlocking } + + func indexer(i int) { + funcs[i]() + }`) + bt.assertBlocking(`blocking`) + bt.assertBlocking(`indexer`) + bt.assertNotBlocking(`notBlocking`) +} + +func TestBlocking_Indexed_FunctionMap(t *testing.T) { + // This calls notBlocking but since the function pointers + // are in the map they will both be considered as blocking. + bt := newBlockingTest(t, + `package test + + func blocking() { + c := make(chan int) + <-c + } + + func notBlocking() { + println() + } + + var funcs = map[string]func() { + "b": blocking, + "nb": notBlocking, + } + + func indexer(key string) { + funcs[key]() + }`) + bt.assertBlocking(`blocking`) + bt.assertBlocking(`indexer`) + bt.assertNotBlocking(`notBlocking`) +} + +func TestBlocking_Indexed_FunctionArray(t *testing.T) { + // This calls notBlocking but since the function pointers + // are in the array they will both be considered as blocking. + bt := newBlockingTest(t, + `package test + + func blocking() { + c := make(chan int) + <-c + } + + func notBlocking() { + println() + } + + var funcs = [2]func() { blocking, notBlocking } + + func indexer(i int) { + funcs[i]() + }`) + bt.assertBlocking(`blocking`) + bt.assertBlocking(`indexer`) + bt.assertNotBlocking(`notBlocking`) +} + +func TestBlocking_Casting_InterfaceInstanceWithSingleTypeParam(t *testing.T) { + // This checks that casting to an instance type with a single type parameter + // is treated as a cast and not accidentally treated as a function call. + bt := newBlockingTest(t, + `package test + + type Foo[T any] interface { + Baz() T + } + + type Bar struct { + name string + } + + func (b Bar) Baz() string { + return b.name + } + + func caster() Foo[string] { + b := Bar{name: "foo"} + return Foo[string](b) + }`) + bt.assertNotBlocking(`caster`) +} + +func TestBlocking_Casting_InterfaceInstanceWithMultipleTypeParams(t *testing.T) { + // This checks that casting to an instance type with multiple type parameters + // is treated as a cast and not accidentally treated as a function call. + bt := newBlockingTest(t, + `package test + + type Foo[K comparable, V any] interface { + Baz(K) V + } + + type Bar struct { + dat map[string]int + } + + func (b Bar) Baz(key string) int { + return b.dat[key] + } + + func caster() Foo[string, int] { + b := Bar{ dat: map[string]int{ "foo": 2 }} + return Foo[string, int](b) + }`) + bt.assertNotBlocking(`caster`) +} + +func TestBlocking_Casting_Interface(t *testing.T) { + // This checks that non-generic casting of type is treated as a + // cast and not accidentally treated as a function call. + bt := newBlockingTest(t, + `package test + + type Foo interface { + Baz() string + } + + type Bar struct { + name string + } + + func (b Bar) Baz() string { + return b.name + } + + func caster() Foo { + b := Bar{"foo"} + return Foo(b) + }`) + bt.assertNotBlocking(`caster`) +} + +func TestBlocking_ComplexCasting(t *testing.T) { + // This checks a complex casting to a type is treated as a + // cast and not accidentally treated as a function call. + bt := newBlockingTest(t, + `package test + + type Foo interface { + Bar() string + } + + func doNothing(f Foo) Foo { + return interface{ Bar() string }(f) + }`) + bt.assertNotBlocking(`doNothing`) +} + +func TestBlocking_ComplexCall(t *testing.T) { + // This checks a complex call of a function is defaulted to blocking. + bt := newBlockingTest(t, + `package test + + type Foo func() string + + func bar(f any) string { + return f.(Foo)() + }`) + bt.assertBlocking(`bar`) +} + +func TestBlocking_CallWithNamedInterfaceReceiver(t *testing.T) { + // This checks that calling a named interface function is defaulted to blocking. + bt := newBlockingTest(t, + `package test + + type Foo interface { + Baz() + } + + func bar(f Foo) { + f.Baz() + }`) + bt.assertBlocking(`bar`) +} + +func TestBlocking_CallWithUnnamedInterfaceReceiver(t *testing.T) { + // This checks that calling an unnamed interface function is defaulted to blocking. + bt := newBlockingTest(t, + `package test + + func bar(f interface { Baz() }) { + f.Baz() + }`) + bt.assertBlocking(`bar`) +} + +func TestBlocking_VarFunctionCall(t *testing.T) { + // This checks that calling a function in a var is defaulted to blocking. + bt := newBlockingTest(t, + `package test + + var foo = func() { // line 3 + println("hi") + } + + func bar() { + foo() + }`) + bt.assertNotBlockingLit(3, ``) + bt.assertBlocking(`bar`) +} + +func TestBlocking_FieldFunctionCallOnNamed(t *testing.T) { + // This checks that calling a function in a field is defaulted to blocking. + // This should be the same as the previous test but with a field since + // all function pointers are treated as blocking. + bt := newBlockingTest(t, + `package test + + type foo struct { + Baz func() + } + + func bar(f foo) { + f.Baz() + }`) + bt.assertBlocking(`bar`) +} + +func TestBlocking_FieldFunctionCallOnUnnamed(t *testing.T) { + // Same as previous test but with an unnamed struct. + bt := newBlockingTest(t, + `package test + + func bar(f struct { Baz func() }) { + f.Baz() + }`) + bt.assertBlocking(`bar`) +} + +func TestBlocking_ParamFunctionCall(t *testing.T) { + // Same as previous test but with an unnamed function parameter. + bt := newBlockingTest(t, + `package test + + func bar(baz func()) { + baz() + }`) + bt.assertBlocking(`bar`) +} + +func TestBlocking_FunctionUnwrapping(t *testing.T) { + // Test that calling a function that calls a function etc. + // is defaulted to blocking. + bt := newBlockingTest(t, + `package test + + func bar(baz func()func()func()) { + baz()()() + }`) + bt.assertBlocking(`bar`) +} + +func TestBlocking_MethodCall_NonPointer(t *testing.T) { + // Test that calling a method on a non-pointer receiver. + bt := newBlockingTest(t, + `package test + + type Foo struct {} + + func (f Foo) blocking() { + ch := make(chan bool) + <-ch + } + + func (f Foo) notBlocking() { + println("hi") + } + + func blocking(f Foo) { + f.blocking() + } + + func notBlocking(f Foo) { + f.notBlocking() + }`) + bt.assertBlocking(`Foo.blocking`) + bt.assertNotBlocking(`Foo.notBlocking`) + bt.assertBlocking(`blocking`) + bt.assertNotBlocking(`notBlocking`) +} + +func TestBlocking_MethodCall_Pointer(t *testing.T) { + // Test that calling a method on a pointer receiver. + bt := newBlockingTest(t, + `package test + + type Foo struct {} + + func (f *Foo) blocking() { + ch := make(chan bool) + <-ch + } + + func (f *Foo) notBlocking() { + println("hi") + } + + func blocking(f *Foo) { + f.blocking() + } + + func notBlocking(f *Foo) { + f.notBlocking() + }`) + bt.assertBlocking(`Foo.blocking`) + bt.assertNotBlocking(`Foo.notBlocking`) + bt.assertBlocking(`blocking`) + bt.assertNotBlocking(`notBlocking`) +} + +func TestBlocking_InstantiationBlocking(t *testing.T) { + // This checks that the instantiation of a generic function is + // being used when checking for blocking not the type argument interface. + bt := newBlockingTest(t, + `package test + + type BazBlocker struct { + c chan bool + } + func (bb BazBlocker) Baz() { + println(<-bb.c) + } + + type BazNotBlocker struct {} + func (bnb BazNotBlocker) Baz() { + println("hi") + } + + type Foo interface { Baz() } + func FooBaz[T Foo](foo T) { + foo.Baz() + } + + func blockingViaExplicit() { + FooBaz[BazBlocker](BazBlocker{c: make(chan bool)}) + } + + func notBlockingViaExplicit() { + FooBaz[BazNotBlocker](BazNotBlocker{}) + } + + func blockingViaImplicit() { + FooBaz(BazBlocker{c: make(chan bool)}) + } + + func notBlockingViaImplicit() { + FooBaz(BazNotBlocker{}) + }`) + bt.assertFuncInstCount(8) + // `FooBaz` as a generic function does not have FuncInfo for it, + // only non-generic or instantiations of a generic functions have FuncInfo. + + bt.assertBlocking(`BazBlocker.Baz`) + bt.assertBlocking(`blockingViaExplicit`) + bt.assertBlocking(`blockingViaImplicit`) + bt.assertBlockingInst(`pkg/test.FooBaz`) + + bt.assertNotBlocking(`BazNotBlocker.Baz`) + bt.assertNotBlocking(`notBlockingViaExplicit`) + bt.assertNotBlocking(`notBlockingViaImplicit`) + bt.assertNotBlockingInst(`pkg/test.FooBaz`) +} + +func TestBlocking_NestedInstantiations(t *testing.T) { + // Checking that the type parameters are being propagated down into calls. + bt := newBlockingTest(t, + `package test + + func Foo[T any](t T) { + println(t) + } + + func Bar[K comparable, V any, M ~map[K]V](m M) { + Foo(m) + } + + func Baz[T any, S ~[]T](s S) { + m:= map[int]T{} + for i, v := range s { + m[i] = v + } + Bar(m) + } + + func bazInt() { + Baz([]int{1, 2, 3}) + } + + func bazString() { + Baz([]string{"one", "two", "three"}) + }`) + bt.assertFuncInstCount(8) + bt.assertNotBlocking(`bazInt`) + bt.assertNotBlocking(`bazString`) + bt.assertNotBlockingInst(`pkg/test.Foo`) + bt.assertNotBlockingInst(`pkg/test.Foo`) + bt.assertNotBlockingInst(`pkg/test.Bar`) + bt.assertNotBlockingInst(`pkg/test.Bar`) + bt.assertNotBlockingInst(`pkg/test.Baz`) + bt.assertNotBlockingInst(`pkg/test.Baz`) +} + +func TestBlocking_UnusedGenericFunctions(t *testing.T) { + // Checking that the type parameters are being propagated down into callee. + // This is based off of go1.19.13/test/typeparam/orderedmap.go + bt := newBlockingTest(t, + `package test + + type node[K, V any] struct { + key K + val V + left, right *node[K, V] + } + + type Tree[K, V any] struct { + root *node[K, V] + eq func(K, K) bool + } + + func New[K, V any](eq func(K, K) bool) *Tree[K, V] { + return &Tree[K, V]{eq: eq} + } + + func NewStrKey[K ~string, V any]() *Tree[K, V] { // unused + return New[K, V](func(k1, k2 K) bool { + return string(k1) == string(k2) + }) + } + + func NewStrStr[V any]() *Tree[string, V] { // unused + return NewStrKey[string, V]() + } + + func main() { + t := New[int, string](func(k1, k2 int) bool { + return k1 == k2 + }) + println(t) + }`) + bt.assertFuncInstCount(2) + // Notice that `NewStrKey` and `NewStrStr` are not called so doesn't have + // any known instances and therefore they don't have any FuncInfos. + bt.assertNotBlockingInst(`pkg/test.New`) + bt.assertNotBlocking(`main`) +} + +func TestBlocking_LitInstanceCalls(t *testing.T) { + // Literals defined inside a generic function must inherit the + // type arguments (resolver) of the enclosing instance it is defined in + // so that things like calls to other generic functions create the + // call to the correct concrete instance. + bt := newBlockingTest(t, + `package test + + func foo[T any](x T) { + println(x) + } + + func bar[T any](x T) { + f := func(v T) { // line 8 + foo[T](v) + } + f(x) + } + + func main() { + bar[int](42) + bar[float64](3.14) + }`) + bt.assertFuncInstCount(5) + + bt.assertNotBlockingInst(`pkg/test.foo`) + bt.assertNotBlockingInst(`pkg/test.foo`) + bt.assertNotBlockingLit(8, `int`) + bt.assertNotBlockingLit(8, `float64`) + // The following are blocking because the function literal call. + bt.assertBlockingInst(`pkg/test.bar`) + bt.assertBlockingInst(`pkg/test.bar`) +} + +func TestBlocking_BlockingLitInstance(t *testing.T) { + bt := newBlockingTest(t, + `package test + + type BazBlocker struct { + c chan bool + } + func (bb BazBlocker) Baz() { + println(<-bb.c) + } + + type BazNotBlocker struct {} + func (bnb BazNotBlocker) Baz() { + println("hi") + } + + type Foo interface { Baz() } + func FooBaz[T Foo](foo T) func() { + return func() { // line 17 + foo.Baz() + } + } + + func main() { + _ = FooBaz(BazBlocker{}) + _ = FooBaz(BazNotBlocker{}) + }`) + bt.assertFuncInstCount(5) + + bt.assertBlocking(`BazBlocker.Baz`) + // THe following is not blocking because the function literal is not called. + bt.assertNotBlockingInst(`pkg/test.FooBaz`) + bt.assertBlockingLit(17, `pkg/test.BazBlocker`) + + bt.assertNotBlocking(`BazNotBlocker.Baz`) + bt.assertNotBlockingInst(`pkg/test.FooBaz`) + bt.assertNotBlockingLit(17, `pkg/test.BazNotBlocker`) +} + +func TestBlocking_MethodSelection(t *testing.T) { + // This tests method selection using method expression (receiver as the first + // argument) selecting on type and method call selecting on a variable. + // This tests in both generic (FooBaz[T]) and non-generic contexts. + bt := newBlockingTest(t, + `package test + + type Foo interface { Baz() } + + type BazBlocker struct { + c chan bool + } + func (bb BazBlocker) Baz() { + println(<-bb.c) + } + + type BazNotBlocker struct {} + func (bnb BazNotBlocker) Baz() { + println("hi") + } + + type FooBaz[T Foo] struct {} + func (fb FooBaz[T]) ByMethodExpression() { + var foo T + T.Baz(foo) + } + func (fb FooBaz[T]) ByInstance() { + var foo T + foo.Baz() + } + + func blocking() { + fb := FooBaz[BazBlocker]{} + + FooBaz[BazBlocker].ByMethodExpression(fb) + FooBaz[BazBlocker].ByInstance(fb) + + fb.ByMethodExpression() + fb.ByInstance() + } + + func notBlocking() { + fb := FooBaz[BazNotBlocker]{} + + FooBaz[BazNotBlocker].ByMethodExpression(fb) + FooBaz[BazNotBlocker].ByInstance(fb) + + fb.ByMethodExpression() + fb.ByInstance() + }`) + bt.assertFuncInstCount(8) + + bt.assertBlocking(`BazBlocker.Baz`) + bt.assertBlockingInst(`pkg/test.FooBaz.ByMethodExpression`) + bt.assertBlockingInst(`pkg/test.FooBaz.ByInstance`) + bt.assertBlocking(`blocking`) + + bt.assertNotBlocking(`BazNotBlocker.Baz`) + bt.assertNotBlockingInst(`pkg/test.FooBaz.ByMethodExpression`) + bt.assertNotBlockingInst(`pkg/test.FooBaz.ByInstance`) + bt.assertNotBlocking(`notBlocking`) +} + +func TestBlocking_IsImportBlocking_Simple(t *testing.T) { + otherSrc := `package other + + func Blocking() { + ch := make(chan bool) + <-ch + } + + func NotBlocking() { + println("hi") + }` + + testSrc := `package test + + import "pkg/other" + + func blocking() { + other.Blocking() + } + + func notBlocking() { + other.NotBlocking() + }` + + bt := newBlockingTestWithOtherPackage(t, testSrc, otherSrc) + bt.assertBlocking(`blocking`) + bt.assertNotBlocking(`notBlocking`) +} + +func TestBlocking_IsImportBlocking_ForwardInstances(t *testing.T) { + otherSrc := `package other + + type BazBlocker struct { + c chan bool + } + func (bb BazBlocker) Baz() { + println(<-bb.c) + } + + type BazNotBlocker struct {} + func (bnb BazNotBlocker) Baz() { + println("hi") + }` + + testSrc := `package test + + import "pkg/other" + + type Foo interface { Baz() } + func FooBaz[T Foo](f T) { + f.Baz() + } + + func blocking() { + FooBaz(other.BazBlocker{}) + } + + func notBlocking() { + FooBaz(other.BazNotBlocker{}) + }` + + bt := newBlockingTestWithOtherPackage(t, testSrc, otherSrc) + bt.assertBlocking(`blocking`) + bt.assertNotBlocking(`notBlocking`) +} + +func TestBlocking_IsImportBlocking_BackwardInstances(t *testing.T) { + // This tests propagation of information across package boundaries. + // `FooBaz` has no instances in it until it is referenced in the `test` package. + // That instance information needs to propagate back across the package + // boundary to the `other` package. The information for `BazBlocker` and + // `BazNotBlocker` is propagated back to `FooBaz[BazBlocker]` and + // `FooBaz[BazNotBlocker]`. That information is then propagated forward + // to the `blocking` and `notBlocking` functions in the `test` package. + + otherSrc := `package other + + type Foo interface { Baz() } + func FooBaz[T Foo](f T) { + f.Baz() + }` + + testSrc := `package test + + import "pkg/other" + + type BazBlocker struct { + c chan bool + } + func (bb BazBlocker) Baz() { + println(<-bb.c) + } + + type BazNotBlocker struct {} + func (bnb BazNotBlocker) Baz() { + println("hi") + } + + func blocking() { + other.FooBaz(BazBlocker{}) + } + + func notBlocking() { + other.FooBaz(BazNotBlocker{}) + }` + + bt := newBlockingTestWithOtherPackage(t, testSrc, otherSrc) + bt.assertBlocking(`blocking`) + bt.assertNotBlocking(`notBlocking`) +} + +type blockingTest struct { + f *srctesting.Fixture + file *ast.File + pkgInfo *Info +} + +func newBlockingTest(t *testing.T, src string) *blockingTest { + f := srctesting.New(t) + tContext := types.NewContext() + tc := typeparams.Collector{ + TContext: tContext, + Info: f.Info, + Instances: &typeparams.PackageInstanceSets{}, + } + + file := f.Parse(`test.go`, src) + testInfo, testPkg := f.Check(`pkg/test`, file) + tc.Scan(testPkg, file) + + getImportInfo := func(path string) (*Info, error) { + return nil, fmt.Errorf(`getImportInfo should not be called in this test, called with %v`, path) + } + pkgInfo := AnalyzePkg([]*ast.File{file}, f.FileSet, testInfo, tContext, testPkg, tc.Instances, getImportInfo) + PropagateAnalysis([]*Info{pkgInfo}) + + return &blockingTest{ + f: f, + file: file, + pkgInfo: pkgInfo, + } +} + +func newBlockingTestWithOtherPackage(t *testing.T, testSrc string, otherSrc string) *blockingTest { + f := srctesting.New(t) + tContext := types.NewContext() + tc := typeparams.Collector{ + TContext: tContext, + Info: f.Info, + Instances: &typeparams.PackageInstanceSets{}, + } + + pkgInfo := map[string]*Info{} + getImportInfo := func(path string) (*Info, error) { + if info, ok := pkgInfo[path]; ok { + return info, nil + } + return nil, fmt.Errorf(`unexpected package in getImportInfo for %v`, path) + } + + otherFile := f.Parse(`other.go`, otherSrc) + _, otherPkg := f.Check(`pkg/other`, otherFile) + tc.Scan(otherPkg, otherFile) + + testFile := f.Parse(`test.go`, testSrc) + _, testPkg := f.Check(`pkg/test`, testFile) + tc.Scan(testPkg, testFile) + + otherPkgInfo := AnalyzePkg([]*ast.File{otherFile}, f.FileSet, f.Info, tContext, otherPkg, tc.Instances, getImportInfo) + pkgInfo[otherPkg.Path()] = otherPkgInfo + + testPkgInfo := AnalyzePkg([]*ast.File{testFile}, f.FileSet, f.Info, tContext, testPkg, tc.Instances, getImportInfo) + pkgInfo[testPkg.Path()] = testPkgInfo + + PropagateAnalysis([]*Info{otherPkgInfo, testPkgInfo}) + + return &blockingTest{ + f: f, + file: testFile, + pkgInfo: testPkgInfo, + } +} + +func (bt *blockingTest) assertFuncInstCount(expCount int) { + bt.f.T.Helper() + if got := bt.pkgInfo.funcInstInfos.Len(); got != expCount { + bt.f.T.Errorf(`Got %d function instance infos but expected %d.`, got, expCount) + for i, inst := range bt.pkgInfo.funcInstInfos.Keys() { + bt.f.T.Logf(` %d. %q`, i+1, inst.String()) + } + } +} + +func (bt *blockingTest) assertFuncLitCount(expCount int) { + bt.f.T.Helper() + got := 0 + for _, fis := range bt.pkgInfo.funcLitInfos { + got += len(fis) + } + if got != expCount { + bt.f.T.Errorf(`Got %d function literal infos but expected %d.`, got, expCount) + + lits := make([]string, 0, len(bt.pkgInfo.funcLitInfos)) + for fl, fis := range bt.pkgInfo.funcLitInfos { + pos := bt.f.FileSet.Position(fl.Pos()).String() + for _, fi := range fis { + lits = append(lits, pos+`<`+fi.typeArgs.String()+`>`) + } + } + sort.Strings(lits) + for i := range lits { + bt.f.T.Logf(` %d. %q`, i+1, lits[i]) + } + } +} + +func (bt *blockingTest) assertBlocking(funcName string) { + bt.f.T.Helper() + if !bt.isTypesFuncBlocking(funcName) { + bt.f.T.Errorf(`Got %q as not blocking but expected it to be blocking.`, funcName) + } +} + +func (bt *blockingTest) assertNotBlocking(funcName string) { + bt.f.T.Helper() + if bt.isTypesFuncBlocking(funcName) { + bt.f.T.Errorf(`Got %q as blocking but expected it to be not blocking.`, funcName) + } +} + +func getFuncDeclName(fd *ast.FuncDecl) string { + name := fd.Name.Name + if fd.Recv != nil && len(fd.Recv.List) == 1 && fd.Recv.List[0].Type != nil { + typ := fd.Recv.List[0].Type + if p, ok := typ.(*ast.StarExpr); ok { + typ = p.X + } + if id, ok := typ.(*ast.Ident); ok { + name = id.Name + `.` + name + } + } + return name +} + +func (bt *blockingTest) isTypesFuncBlocking(funcName string) bool { + bt.f.T.Helper() + var decl *ast.FuncDecl + ast.Inspect(bt.file, func(n ast.Node) bool { + if f, ok := n.(*ast.FuncDecl); ok && getFuncDeclName(f) == funcName { + decl = f + return false + } + return decl == nil + }) + + if decl == nil { + bt.f.T.Fatalf(`Declaration of %q is not found in the AST.`, funcName) + } + + blockingType, ok := bt.pkgInfo.Defs[decl.Name] + if !ok { + bt.f.T.Fatalf(`No function declaration found for %q.`, decl.Name) + } + + inst := typeparams.Instance{Object: blockingType.(*types.Func)} + return bt.pkgInfo.IsBlocking(inst) +} + +func (bt *blockingTest) assertBlockingLit(lineNo int, typeArgsStr string) { + bt.f.T.Helper() + if !bt.isFuncLitBlocking(lineNo, typeArgsStr) { + bt.f.T.Errorf(`Got FuncLit at line %d with type args %q as not blocking but expected it to be blocking.`, lineNo, typeArgsStr) + } +} + +func (bt *blockingTest) assertNotBlockingLit(lineNo int, typeArgsStr string) { + bt.f.T.Helper() + if bt.isFuncLitBlocking(lineNo, typeArgsStr) { + bt.f.T.Errorf(`Got FuncLit at line %d with type args %q as blocking but expected it to be not blocking.`, lineNo, typeArgsStr) + } +} + +func (bt *blockingTest) isFuncLitBlocking(lineNo int, typeArgsStr string) bool { + bt.f.T.Helper() + fnLit := srctesting.GetNodeAtLineNo[*ast.FuncLit](bt.file, bt.f.FileSet, lineNo) + if fnLit == nil { + bt.f.T.Fatalf(`FuncLit on line %d not found in the AST.`, lineNo) + } + + fis, ok := bt.pkgInfo.funcLitInfos[fnLit] + if !ok { + bt.f.T.Fatalf(`No FuncInfo found for FuncLit at line %d.`, lineNo) + } + + for _, fi := range fis { + if fi.typeArgs.String() == typeArgsStr { + return fi.IsBlocking() + } + } + + bt.f.T.Logf("FuncList instances:") + for i, fi := range fis { + bt.f.T.Logf("\t%d. %q\n", i+1, fi.typeArgs.String()) + } + bt.f.T.Fatalf(`No FuncInfo found for FuncLit at line %d with type args %q.`, lineNo, typeArgsStr) + return false +} + +func (bt *blockingTest) assertBlockingInst(instanceStr string) { + bt.f.T.Helper() + if !bt.isFuncInstBlocking(instanceStr) { + bt.f.T.Errorf(`Got function instance of %q as not blocking but expected it to be blocking.`, instanceStr) + } +} + +func (bt *blockingTest) assertNotBlockingInst(instanceStr string) { + bt.f.T.Helper() + if bt.isFuncInstBlocking(instanceStr) { + bt.f.T.Errorf(`Got function instance of %q as blocking but expected it to be not blocking.`, instanceStr) + } +} + +func (bt *blockingTest) isFuncInstBlocking(instanceStr string) bool { + bt.f.T.Helper() + instances := bt.pkgInfo.funcInstInfos.Keys() + for _, inst := range instances { + if inst.String() == instanceStr { + return bt.pkgInfo.FuncInfo(inst).IsBlocking() + } + } + bt.f.T.Logf(`Function instances found in package info:`) + for i, inst := range instances { + bt.f.T.Logf("\t%d. %s", i+1, inst.String()) + } + bt.f.T.Fatalf(`No function instance found for %q in package info.`, instanceStr) + return false +} + +func (bt *blockingTest) assertBlockingReturn(lineNo int, typeArgsStr string) { + bt.f.T.Helper() + if !bt.isReturnBlocking(lineNo, typeArgsStr) { + bt.f.T.Errorf(`Got return at line %d (%q) as not blocking but expected it to be blocking.`, lineNo, typeArgsStr) + } +} + +func (bt *blockingTest) assertNotBlockingReturn(lineNo int, typeArgsStr string) { + bt.f.T.Helper() + if bt.isReturnBlocking(lineNo, typeArgsStr) { + bt.f.T.Errorf(`Got return at line %d (%q) as blocking but expected it to be not blocking.`, lineNo, typeArgsStr) + } +} + +func (bt *blockingTest) isReturnBlocking(lineNo int, typeArgsStr string) bool { + bt.f.T.Helper() + ret := srctesting.GetNodeAtLineNo[*ast.ReturnStmt](bt.file, bt.f.FileSet, lineNo) + if ret == nil { + bt.f.T.Fatalf(`ReturnStmt on line %d not found in the AST.`, lineNo) + } + + foundInfo := []*FuncInfo{} + for _, info := range bt.pkgInfo.allInfos { + for _, rs := range info.returnStmts { + if rs.analyzeStack[len(rs.analyzeStack)-1] == ret { + if info.typeArgs.String() == typeArgsStr { + // Found info that matches the type args and + // has the return statement so return the blocking value. + return info.Blocking[ret] + } + + // Wrong instance, record for error message in the case + // that the correct one instance is not found. + foundInfo = append(foundInfo, info) + break + } + } + } + + bt.f.T.Logf("FuncInfo instances with ReturnStmt at line %d:", lineNo) + for i, info := range foundInfo { + bt.f.T.Logf("\t%d. %q\n", i+1, info.typeArgs.String()) + } + bt.f.T.Fatalf(`No FuncInfo found for ReturnStmt at line %d with type args %q.`, lineNo, typeArgsStr) + return false +} diff --git a/compiler/internal/analysis/return.go b/compiler/internal/analysis/return.go new file mode 100644 index 000000000..3c83b3c1f --- /dev/null +++ b/compiler/internal/analysis/return.go @@ -0,0 +1,21 @@ +package analysis + +// returnStmt represents a return statement that is blocking or not. +type returnStmt struct { + analyzeStack astPath + deferStmts []*deferStmt +} + +func newReturnStmt(stack astPath, deferStmts []*deferStmt) returnStmt { + return returnStmt{ + analyzeStack: stack.copy(), + deferStmts: deferStmts, + } +} + +// IsBlocking determines if the return statement is blocking or not +// based on the defer statements that affect the return. +// The return may still be blocking if the function has labels and goto's. +func (r returnStmt) IsBlocking(info *FuncInfo) bool { + return isAnyDeferBlocking(r.deferStmts, info.pkgInfo) +} diff --git a/compiler/analysis/sideeffect.go b/compiler/internal/analysis/sideeffect.go similarity index 100% rename from compiler/analysis/sideeffect.go rename to compiler/internal/analysis/sideeffect.go diff --git a/compiler/internal/dce/README.md b/compiler/internal/dce/README.md new file mode 100644 index 000000000..01ec1e8c6 --- /dev/null +++ b/compiler/internal/dce/README.md @@ -0,0 +1,625 @@ +# Dead-Code Elimination + +Dead-Code Eliminations (DCE) is used to remove code that isn't +reachable from a code entry point. Entry points are code like the main method, +init functions, and variable initializations with side effects. +These entry points are always considered alive. Any dependency of +something alive, is also considered alive. + +Once all dependencies are taken into consideration we have the set of alive +declarations. Anything not considered alive is considered dead and +may be safely eliminated, i.e. not outputted to JS. + +- [Idea](#idea) + - [Package](#package) + - [Named Types](#named-types) + - [Named Structs](#named-structs) + - [Interfaces](#interfaces) + - [Functions](#functions) + - [Variables](#variables) + - [Generics and Instances](#generics-and-instances) + - [Links](#links) +- [Design](#design) + - [Initially Alive](#initially-alive) + - [Naming](#naming) + - [Name Specifics](#name-specifics) + - [Dependencies](#dependencies) +- [Examples](#examples) + - [Dead Package](#dead-package) + - [Grandmas and Zombies](#grandmas-and-zombies) + - [Side Effects](#side-effects) + - [Instance Duck-typing](#instance-duck-typing) +- [Additional Notes](#additional-notes) + +## Idea + +The following is the logic behind the DCE mechanism. Not all of the following +is used since some conditions are difficult to determine even with a lot of +additional information, and because GopherJS stores some additional information +making some parts of DCE unnecessary. To ensure that the JS output is fully +functional, we bias the DCE towards things being alive. We'd rather keep +something we don't need than remove something that is needed. + +### Package + +Package declarations (e.g. `package foo`) might be able to be removed +when only used by dead-code. However, packages may be imported and not used +for various reasons including to invoke some initialization or to implement +a link. So it is difficult to determine. +See [Dead Package](#dead-package) example. + +Currently, we won't remove any packages, but someday the complexity +could be added to check for inits, side effects, links, etc then determine +if any of those are are alive or affect alive things. + +### Named Types + +Named type definitions (e.g. `type Foo int`) depend on +the underlying type for each definition. + +When a named type is alive, all of its exported methods +(e.g. `func (f Foo) Bar() { }`) are also alive, even any unused exported method. +Unused exported methods are still important when duck-typing. +See [Interfaces](#interfaces) for more information. +See [Grandmas and Zombies](#grandmas-and-zombies) for an example of what +can happen when removing an unused exported method. + +Also unused exported methods could be accessed by name via reflect +(e.g. `reflect.ValueOf(&Foo{}).MethodByName("Bar")`). Since the +string name may be provided from outside the code, such as the command line, +it is impossible to determine which exported methods could be accessed this way. +It would be very difficult to determine which types are ever accessed via +reflect so by default we simply assume any can be. + +Methods that are unexported may be considered dead when unused even when +the receiver type is alive. The exception is when an interface in the same +package has the same unexported method in it. +See [Interfaces](#interfaces) for more information. + +#### Named Structs + +A named struct is a named type that has a struct as its underlying type, +e.g. `type Foo struct { }`. A struct type depends on all of the types in +its fields and embedded fields. + +If the struct type is alive then all the types of the fields will also be alive. +Even unexported fields maybe accessed via reflections, so they all must be +alive. Also, the fields are needed for comparisons and serializations +(such as `encoding/binary`). + +### Interfaces + +All the types in the function signatures and embedded interfaces are the +dependents of the interface. + +Interfaces may contain exported and unexported function signatures. +If an interface is alive then all of the functions are alive. +Since there are many ways to wrap a type with an interface, any alive type that +duck-types to an interface must have all of the matching methods also alive. + +In theory the unexported functions are also alive however, for GopherJS there +is an exception because duck-typing is handled separately from the method +definitions. Those difference are discussed in [Dependencies](#dependencies) +but for this idea we discuss DCE more generally. + +Since the exported methods in an alive type will be alive, see +[Named Types](#named-types), the only ones here that need to be considered +are the unexported methods. An interface with unexported methods may only +duck-type to types within the package the interface is defined in. +Therefore, if an interface is alive with unexported methods, then all +alive types within the same package that duck-type to that interface, +will have the matching unexported methods be alive. + +Since doing a full `types.Implements` check between every named types and +interfaces in a package is difficult, we simplify this requirement to be +any unexported method in an alive named type that matches an unexported +method in an alive interface is alive even if the named type doesn't duck-type +to the interface. This means that in some rare cases, some unexported +methods on named structs that could have been eliminated will not be. +For example, given `type Foo struct{}; func(f Foo) X(); func (f Foo) y()` the +`Foo.y()` method may be alive if `types Bar interface { Z(); y() }` is alive +even though the `X()` and `Z()` means that `Foo` doesn't implement `Bar` +and therefore `Foo.y()` can not be called via a `Bar.y()`. + +We will try to reduce the false positives in alive unexported methods by using +the parameter and result types of the methods. Meaning that +`y()`, `y(int)`, `y() int`, etc won't match just because they are named `y`. +This also helps with a generic type's unexported methods that use +type parameters, e.g. `Foo.y(T)`. Since the generic type may be instantiated +with `int` and `string`, the different instances of the method are `Foo.y(int)` +and `Foo.y(string)`. By using the parameter and result types, it is possible +to remove the unused unexported method instantiations even when some +instantiations of the same method are used. + +### Functions + +Functions with or without a receiver are dependent on the types used by the +parameters, results, and type uses inside the body of the function. +They are also dependent on any function invoked or used, and +any package level variable that is used. + +Unused functions without a receiver, that are exported or not, may be +considered dead since they aren't used in duck-typing and cannot be accessed +by name via reflections. + +### Variables + +Variables (or constants) depend on their type and anything used during +initialization. + +The exported or unexported variables are dead unless they are used by something +else that is alive or if the initialization has side effects. + +If the initialization has side effects the variable will be alive even +if unused. The side effect may be simply setting another variable's value +that is also unused, however it would be difficult to determine if the +side effects are used or not. +See [Side Effects](#side-effects) example. + +### Generics and Instances + +For functions and types with generics, the definitions are split into +unique instances. For example, `type StringKeys[T any] map[string]T` +could be used in code as `StringKeys[int]` and `StringKeys[*Cat]`. +We don't need all possible instances, only the ones which are realized +in code. Each instance depends on the realized parameter types (type arguments). +In the example the type arguments are `int` and `*Cat`. + +The instance of the generic type also defines the code with the specific +type arguments (e.g. `map[string]int` and `map[string]*Cat`). When an +instance is depended on by alive code, only that instance is alive, not the +entire generic type. This means if `StringKey[*Cat]` is only used from dead +code then it is also dead and can be safely eliminated. + +The named generic types may have methods that are also copied for an instance +with the parameter types replaced by the type arguments. For example, +`func (sk StringKeys[T]) values() []T { ... }` becomes +`func (sk StringKeys[int]) values() []int { ... }` when the type argument +is `int`. This method in the instance now duck-types to +`interface { values() []int }` and therefore must follow the rules for +unexported methods. +See [Instance Duck-typing](#instance-duck-typing) example for more information. + +Functions and named types may be generic, but methods and unnamed types +may not be. This makes somethings simpler. A method with a receiver is used, +only the receiver's type arguments are needed. The generic type or function +may not be needed since only the instances are written out. + +This also means that inside of a generic function or named type there is only +one type parameter list being used. Even generic types used inside of the +generic function must be specified in terms of the type parameter for the +generic and doesn't contribute any type parameters of it's own. +For example, inside of `func Foo[K comparable, V any]() { ... }` every +usage of a generic type must specify a concrete type (`int`, `*Cat`, +`Bar[Bar[bool]]`) or use the parameter types `K` and `V`. This is simpler +than languages that allow a method of an object to have it's own type +parameters, e.g. `class X { void Y() { ... } ... }`. + +However, generics mean that the same method, receiver, type, etc names +will be used with different parameters types caused by different type +arguments. The type arguments are being passed into those parameter types +for a specific instance. +When an interface is alive, the signatures for unexported methods +need to be instantiated with type arguments so that we know which instances +the interface is duck-typing to. See [Interfaces](#interfaces) for more detail. + +### Links + +Links use compiler directives +([`//go:linkname`](https://pkg.go.dev/cmd/compile#hdr-Compiler_Directives)) +to alias a `var` or `func` with another. +For example some code may have `func bar_foo()` as a function stub that is +linked with `foo() { ... }` as a function with a body, i.e. the target of the +link. The links are single directional but allow multiple stubs to link to the +same target. + +When a link is made, the dependencies for the linked code come from +the target. If the target is used by something alive then it is alive. +If a stub linked to a target is used by something alive then that stub and +the target are both alive. + +Since links cross package boundaries in ways that may violate encapsulation +and the dependency tree, it may be difficult to determine if a link is alive +or not. Therefore, currently all links are considered alive. + +## Design + +The design is created taking all the parts of the above idea together and +simplifying the justifications down to a simple set of rules. + +### Initially alive + +- The `main` method in the `main` package +- The `init` in every included file +- Any variable initialization that has a side effect +- Any linked function or variable +- Anything not given a DCE named, e.g. packages + +### Naming + +The following specifies what declarations should be named and how +the names should look. These names are later used to match (via string +comparisons) dependencies with declarations that should be set as alive. +Since the names are used to filter out alive code from all the code +these names may also be referred to as filters. + +Some names will have multiple name parts; an object name and method name. +This is kind of like a first name and last name when a first name alone isn't +specific enough. This helps with matching multiple dependency requirements +for a declaration, i.e. both name parts must be alive before the declaration +is considered alive. + +Currently, only unexported method declarations will have a method +name to support duck-typing with unexported signatures on interfaces. +If the unexported method is depended on, then both names will be in +the dependencies. If the receiver is alive and an alive interface has the +matching unexported signature, then both names will be depended on thus making +the unexported method alive. Since the unexported method is only visible in +the package in which it is defined, the package path is included in the +method name. + +To simplify the above for GopherJS, we don't look at the receiver for +an unexported method before indicating it is alive. Meaning if there is no +interface, only two named objects with identical unexported methods, the use +of either will indicate a use of both. This will cause slightly more unexported +methods to be alive while reducing the complication of type checking which object +or type of object is performing the call. + +| Declaration | exported | unexported | non-generic | generic | object name | method name | +|:------------|:--------:|:----------:|:-----------:|:-------:|:------------|:------------| +| variables | █ | █ | █ | n/a | `.` | | +| functions | █ | █ | █ | | `.` | | +| functions | █ | █ | | █ | `.[]` | | +| named type | █ | █ | █ | | `.` | | +| named type | █ | █ | | █ | `.[]` | | +| method | █ | | █ | | `.` | | +| method | █ | | | █ | `.[]` | | +| method | | █ | █ | | `.` | `.()()` | +| method | | █ | | █ | `.[]` | `.()()` | + +#### Name Specifics + +The following are specifics about the different types of names that show +up in the above table. This isn't the only way to represent this information. +These names can get long but don't have to. The goal is to make the names +as unique as possible whilst still ensuring that signatures in +interfaces will still match the correct methods. The less unique +the more false positives for alive will occur meaning more dead code is +kept alive. However, too unique could cause needed alive code to not match +and be eliminated causing the application to not run. + +`.`, `.`, `.` +and `.` all have the same form. They are +the package path followed by a `.`, if there is a package path, +and the object name or receiver name. +For example [`rand.Shuffle`](https://pkg.go.dev/math/rand@go1.23.1#Shuffle) +will be named `math/rand.Shuffle`. The builtin [`error`](https://pkg.go.dev/builtin@go1.23.1#error) +will be named `error` without a package path. + +`.[]`, `.[]`, +and `.[]` are the same as above +except with comma separated type parameters or type arguments in square brackets. +The type parameter names are not used, instead the constraint types are since +the names for type parameters may not match even if the constraints match. +For example `type Foo[T any] struct{}; type Bar[B any] { f Foo[B] }` +has `Foo[B]` used in `Bar` that is identical to `Foo[T]` even though +technically `Foo[B]` is an instance of `Foo[T]` with the `B` type parameter +as the type argument. + +Command compiles, i.e. compiles with a `main` entry point, and test builds +should not have any type parameters that aren't resolved to concrete types, +however to handle partial compiles of packages, there may still +be a type parameter, including unions of approximate constraints, +i.e. `~int|~string`. + +Therefore, type arguments need to be reduced to only types. This means +something like [`maps.Keys`](https://pkg.go.dev/maps@go1.23.1#Keys), i.e. +`func Keys[Map ~map[K]V, K comparable, V any](m Map) iter.Seq[K]`, +will be named `maps.Keys[~map[comparable]any, comparable, any]` as a generic. +If the instances for `Map` are `map[string]int` and `map[int][]*cats.Cat`, +then respectively the names would be `maps.Keys[map[string]int, string, int]` +and `maps.Keys[map[int][]*cats.Cat, int, []*cats.Cat]`. If this function is used +in `func Foo[T ~string|~int](data map[string]T) { ... maps.Keys(data) ... }` +then the instance of `maps.Keys` that `Foo` depends on would be named +`maps.Keys[map[string]~int|~string, string, ~int|~string]`. + +For the method name of unexposed methods, +`.()()`, the prefix, +`.`, is in the same format as `.`. +The rest contains the signature, `()()`. +The signature is defined with only the types since +`(v, u int)(ok bool, err error)` should match `(x, y int)(bool, error)`. +To match both, both will have to be `(int, int)(bool, error)`. +Also the parameter types should include the veridic indicator, +e.g. `sum(...int) int`, since that affects how the signature is matched. +If there are no results then the results part is left off. Otherwise, +the result types only need parenthesis if there are more than one result, +e.g. `(int, int)`, `(int, int) bool`, and `(int, int)(bool, error)`. + +In either the object name or method name, if there is a recursive +type parameter, e.g. `func Foo[T Bar[T]]()` the second usage of the +type parameter will have it's type parameters as `...` to prevent an +infinite loop whilst also indicating which object in the type parameter +is recursive, e.g. `Foo[Bar[Bar[...]]]`. + +### Dependencies + +The dependencies are specified in an expression. +For example a function that invokes another function will be dependent on +that invoked function. When a dependency is added it will be added as one +or more names to the declaration that depends on it. It follows the +[naming rules](#naming) so that the dependencies will match correctly. + +In theory, structural dependencies would be needed to be added +automatically while the declaration is being named. When an interface is named, +it would automatically add all unexported signatures as dependencies via +`.()()`. +However, we do not need to do that in GopherJS because we aren't using +the existence of realized methods in duck-typing. GopherJS stores full set +of method information when describing the type so that, even when things like +unexported methods in interfaces are removed, duck-typing will still work +correctly. This reduces the size of the code by not keeping a potentially +long method body when the signature is all that is needed. + +Currently we don't filter unused packages so there is no need to automatically +add dependencies on the packages themselves. This is also why the package +declarations aren't named and therefore are always alive. + +## Examples + +### Dead Package + +In this example, a point package defines a `Point` object. +The point package may be used by several repos as shared code so can not +have code manually removed from it to reduce its dependencies for specific +applications. + +For the current example, the `Distance` method is never used and therefore +dead. The `Distance` method is the only method dependent on the math package. +It might be safe to make the whole math package dead too and eliminate it in +this case, however, it is possible that some packages aren't used on purpose +and their reason for being included is to invoke the initialization functions +within the package. If a package has any inits or any variable definitions +with side effects, then the package can not be safely removed. + +```go +package point + +import "math" + +type Point struct { + X float64 + Y float64 +} + +func (p Point) Sub(other Point) Point { + p.X -= other.X + p.Y -= other.Y + return p +} + +func (p Point) ToQuadrant1() Point { + if p.X < 0.0 { + p.X = -p.X + } + if p.Y < 0.0 { + p.Y = -p.Y + } + return p +} + +func (p Point) Manhattan(other Point) float64 { + a := p.Sub(other).ToQuadrant1() + return a.X + a.Y +} + +func (p Point) Distance(other Point) float64 { + d := p.Sub(other) + return math.Sqrt(d.X*d.X + d.Y*d.Y) +} +``` + +```go +package main + +import "point" + +func main() { + a := point.Point{X: 10.2, Y: 45.3} + b := point.Point{X: -23.0, Y: 7.7} + println(`Manhattan a to b:`, a.Manhattan(b)) +} +``` + +### Grandmas and Zombies + +In this example, the following code sorts grandmas and zombies by if they are +`Dangerous`. The method `EatBrains` is never used. If we remove `EatBrains` +from `Zombie` then both the grandmas and zombies are moved to the safe +bunker. If we remove `EatBrains` from `Dangerous` then both grandmas and +zombies will be moved to the air lock because `Dangerous` will duck-type +to all `Person` instances. Unused exported methods and signatures must be +considered alive if the type is alive. + +```go +package main + +import "fmt" + +type Person interface { + MoveTo(loc string) +} + +type Dangerous interface { + Person + EatBrains() +} + +type Grandma struct{} + +func (g Grandma) MoveTo(loc string) { + fmt.Println(`grandma was moved to`, loc) +} + +type Zombie struct{} + +func (z Zombie) MoveTo(loc string) { + fmt.Println(`zombie was moved to`, loc) +} + +func (z Zombie) EatBrains() {} + +func main() { + people := []Person{Grandma{}, Zombie{}, Grandma{}, Zombie{}} + for _, person := range people { + if _, ok := person.(Dangerous); ok { + person.MoveTo(`air lock`) + } else { + person.MoveTo(`safe bunker`) + } + } +} +``` + +### Side Effects + +In this example unused variables are being initialized with expressions +that has side effects. The `max` value is 8 by the time `main` is called +because each initialization calls `count()` that increments `max`. +The expression doesn't have to have a function call and can be any combination +of operations. + +An initialization may have a side effect even if it doesn't set a value. For +example, simply printing a message to the console is a side effect that +can not be removed even if it is part of an unused initializer. + +```go +package main + +import "fmt" + +func count() int { + max++ + return max +} + +var ( + max = 0 + _ = count() // a + b, c = count(), count() + x = []int{count(), count(), count()}[0] + y, z = func() (int, int) { return count(), count() }() +) + +func main() { + fmt.Println(`max count`, max) // Outputs: max count 8 +} +``` + +### Instance Duck-typing + +In this example the type `StringKeys[T any]` is a map that stores +any kind of value with string keys. There is an interface `IntProvider` +that `StringKeys` will duck-type to iff the type argument is `int`, +i.e. `StringKeys[int]`. This exemplifies how the type arguments used +in the type arguments affect the overall signature such that in some +cases a generic object may match an interface and in others it may not. + +Also notice that the structure was typed with `T` as the parameter type's +name whereas the methods use `S`. This shows that the name of the type +doesn't matter in the instancing. Therefore, outputting a methods name +(assuming it is unexported) should use the type argument type, +not the type parameter name, e.g. `value() []int` or `value() []any` +instead of `value() []S` or `value() []T`. + +```go +package main + +import ( + "fmt" + "sort" +) + +type StringKeys[T any] map[string]T + +func (sk StringKeys[S]) Keys() []string { + keys := make([]string, 0, len(sk)) + for key := range sk { + keys = append(keys, key) + } + sort.Strings(keys) + return keys +} + +func (sk StringKeys[S]) Values() []S { + values := make([]S, len(sk)) + for i, key := range sk.Keys() { + values[i] = sk[key] + } + return values +} + +type IntProvider interface { + Values() []int +} + +func Sum(data IntProvider) int { + sum := 0 + for _, value := range data.Values() { + sum += value + } + return sum +} + +func main() { + sInt := StringKeys[int]{ + `one`: 1, + `two`: 2, + `three`: 3, + `four`: 4, + } + fmt.Println(sInt.Keys()) // Outputs: [four one three two] + fmt.Println(sInt.Values()) // Outputs: [4 1 3 2] + fmt.Println(Sum(sInt)) // Outputs: 10 + + sFp := StringKeys[float64]{ + `one`: 1.1, + `two`: 2.2, + `three`: 3.3, + `four`: 4.4, + } + fmt.Println(sFp.Keys()) // Outputs: [four one three two] + fmt.Println(sFp.Values()) // [4.4 1.1 3.3 2.2] + //fmt.Println(Sum(sFp)) // Fails with "StringKeys[float64] does not implement IntProvider" +} +``` + +## Additional Notes + +This DCE is different from those found in +Muchnick, Steven S.. “Advanced Compiler Design and Implementation.” (1997), +Chapter 18 Control-Flow and Low-Level Optimization, +Section 10 Dead-Code Elimination. And different from related DCE designs +such as Knoop, Rüthing, and Steffen. "Partial dead code elimination." (1994), +SIGPLAN Not. 29, 6, 147–158. +See [DCE wiki](https://en.wikipedia.org/wiki/Dead-code_elimination) +for more information. + +Those discuss DCE at the block code level where the higher level +constructs such as functions and objects have been reduced to a graphs of +blocks with variables, procedures, and routines. Since we want to keep the +higher level constructs during transpilation, we simply are reducing +the higher level constructs not being used. + +Any variable internal to the body of a function or method that is unused or +only used for computing new values for itself, are left as is. +The Go compiler and linters have requirements that attempt to prevent this +kind of dead-code in a function body (unless an underscore is used to quite +usage warnings, e.g. `_ = unusedVar`) and prevent unreachable code. +Therefore, we aren't going to worry about trying to DCE inside of function +bodies or in variable initializers. + +GopherJS does not implicitly perform JS Tree Shaking Algorithms, as discussed in +[How Modern Javascript eliminate dead code](https://blog.stackademic.com/how-modern-javascript-eliminates-dead-code-tree-shaking-algorithm-d7861e48df40) +(2023) at this time and provides no guarantees about the effectiveness +of running such an algorithm on the resulting JS. diff --git a/compiler/internal/dce/collector.go b/compiler/internal/dce/collector.go new file mode 100644 index 000000000..fea52468d --- /dev/null +++ b/compiler/internal/dce/collector.go @@ -0,0 +1,46 @@ +package dce + +import ( + "errors" + "go/types" +) + +// Decl is any code declaration that has dead-code elimination (DCE) +// information attached to it. +type Decl interface { + Dce() *Info +} + +// Collector is a tool to collect dependencies for a declaration +// that'll be used in dead-code elimination (DCE). +type Collector struct { + dce *Info +} + +// CollectDCEDeps captures a list of Go objects (types, functions, etc.) +// the code translated inside f() depends on. Then sets those objects +// as dependencies of the given dead-code elimination info. +// +// Only one CollectDCEDeps call can be active at a time. +func (c *Collector) CollectDCEDeps(decl Decl, f func()) { + if c.dce != nil { + panic(errors.New(`called CollectDCEDeps inside another CollectDCEDeps call`)) + } + + c.dce = decl.Dce() + defer func() { c.dce = nil }() + + f() +} + +// DeclareDCEDep records that the code that is currently being transpiled +// depends on a given Go object with optional type arguments. +// +// The given optional type arguments are used to when the object is a +// function with type parameters or anytime the object doesn't carry them. +// If not given, this attempts to get the type arguments from the object. +func (c *Collector) DeclareDCEDep(o types.Object, tArgs ...types.Type) { + if c.dce != nil { + c.dce.addDep(o, tArgs) + } +} diff --git a/compiler/internal/dce/dce_test.go b/compiler/internal/dce/dce_test.go new file mode 100644 index 000000000..3ddeac848 --- /dev/null +++ b/compiler/internal/dce/dce_test.go @@ -0,0 +1,1225 @@ +package dce + +import ( + "fmt" + "go/ast" + "go/importer" + "go/parser" + "go/token" + "go/types" + "regexp" + "sort" + "testing" + + "github.com/gopherjs/gopherjs/compiler/typesutil" +) + +func Test_Collector_CalledOnce(t *testing.T) { + var c Collector + decl1 := &testDecl{} + decl2 := &testDecl{} + + err := capturePanic(t, func() { + c.CollectDCEDeps(decl1, func() { + c.CollectDCEDeps(decl2, func() { + t.Fatal(`the nested collect function was called`) + }) + }) + }) + errorMatches(t, err, `^called CollectDCEDeps inside another`) +} + +func Test_Collector_Collecting(t *testing.T) { + pkg := testPackage(`tristan`) + obj1 := quickVar(pkg, `Primus`) + obj2 := quickVar(pkg, `Secundus`) + obj3 := quickVar(pkg, `Tertius`) + obj4 := quickVar(pkg, `Quartus`) + obj5 := quickVar(pkg, `Quintus`) + obj6 := quickVar(pkg, `Sextus`) + obj7 := quickVar(pkg, `Una`) + + decl1 := quickTestDecl(obj1) + decl2 := quickTestDecl(obj2) + var c Collector + + c.DeclareDCEDep(obj1) // no effect since a collection isn't running. + depCount(t, decl1, 0) + depCount(t, decl2, 0) + + c.CollectDCEDeps(decl1, func() { + c.DeclareDCEDep(obj2) + c.DeclareDCEDep(obj3) + c.DeclareDCEDep(obj3) // already added so has no effect. + }) + depCount(t, decl1, 2) + depCount(t, decl2, 0) + + c.DeclareDCEDep(obj4) // no effect since a collection isn't running. + depCount(t, decl1, 2) + depCount(t, decl2, 0) + + c.CollectDCEDeps(decl2, func() { + c.DeclareDCEDep(obj5) + c.DeclareDCEDep(obj6) + c.DeclareDCEDep(obj7) + }) + depCount(t, decl1, 2) + depCount(t, decl2, 3) + + // The second collection adds to existing dependencies. + c.CollectDCEDeps(decl2, func() { + c.DeclareDCEDep(obj4) + c.DeclareDCEDep(obj5) + }) + depCount(t, decl1, 2) + depCount(t, decl2, 4) +} + +func Test_Info_SetNameAndDep(t *testing.T) { + tests := []struct { + name string + obj types.Object + want Info // expected Info after SetName + }{ + { + name: `package`, + obj: parseObject(t, `Sarah`, + `package jim + import Sarah "fmt"`), + want: Info{ + objectFilter: `jim.Sarah`, + }, + }, + { + name: `exported var`, + obj: parseObject(t, `Toby`, + `package jim + var Toby float64`), + want: Info{ + objectFilter: `jim.Toby`, + }, + }, + { + name: `exported const`, + obj: parseObject(t, `Ludo`, + `package jim + const Ludo int = 42`), + want: Info{ + objectFilter: `jim.Ludo`, + }, + }, + { + name: `label`, + obj: parseObject(t, `Gobo`, + `package jim + func main() { + i := 0 + Gobo: + i++ + if i < 10 { + goto Gobo + } + }`), + want: Info{ + objectFilter: `jim.Gobo`, + }, + }, + { + name: `exported specific type`, + obj: parseObject(t, `Jen`, + `package jim + type Jen struct{}`), + want: Info{ + objectFilter: `jim.Jen`, + }, + }, + { + name: `exported generic type`, + obj: parseObject(t, `Henson`, + `package jim + type Henson[T comparable] struct{}`), + want: Info{ + objectFilter: `jim.Henson[comparable]`, + }, + }, + { + name: `exported specific function`, + obj: parseObject(t, `Jareth`, + `package jim + func Jareth() {}`), + want: Info{ + objectFilter: `jim.Jareth`, + }, + }, + { + name: `exported generic function`, + obj: parseObject(t, `Didymus`, + `package jim + func Didymus[T comparable]() {}`), + want: Info{ + objectFilter: `jim.Didymus[comparable]`, + }, + }, + { + name: `exported specific method`, + obj: parseObject(t, `Kira`, + `package jim + type Fizzgig string + func (f Fizzgig) Kira() {}`), + want: Info{ + objectFilter: `jim.Fizzgig`, + }, + }, + { + name: `unexported specific method without parameters or results`, + obj: parseObject(t, `frank`, + `package jim + type Aughra int + func (a Aughra) frank() {}`), + want: Info{ + objectFilter: `jim.Aughra`, + methodFilter: `jim.frank()`, + }, + }, + { + name: `unexported specific method with parameters and results`, + obj: parseObject(t, `frank`, + `package jim + type Aughra int + func (a Aughra) frank(other Aughra) (bool, error) { + return a == other, nil + }`), + want: Info{ + objectFilter: `jim.Aughra`, + methodFilter: `jim.frank(jim.Aughra)(bool, error)`, + }, + }, + { + name: `unexported specific method with variadic parameter`, + obj: parseObject(t, `frank`, + `package jim + type Aughra int + func (a Aughra) frank(others ...Aughra) int { + return len(others) + 1 + }`), + want: Info{ + objectFilter: `jim.Aughra`, + methodFilter: `jim.frank(...jim.Aughra) int`, + }, + }, + { + name: `unexported generic method with type parameters and instance argument`, + obj: parseObject(t, `frank`, + `package jim + type Aughra[T ~float64] struct { + value T + } + func (a *Aughra[T]) frank(other *Aughra[float64]) bool { + return float64(a.value) == other.value + }`), + want: Info{ + objectFilter: `jim.Aughra[~float64]`, + methodFilter: `jim.frank(*jim.Aughra[float64]) bool`, + }, + }, + { + name: `unexported generic method with type parameters and generic argument`, + obj: parseObject(t, `frank`, + `package jim + type Aughra[T ~float64] struct { + value T + } + func (a *Aughra[T]) frank(other *Aughra[T]) bool { + return a.value == other.value + }`), + want: Info{ + objectFilter: `jim.Aughra[~float64]`, + methodFilter: `jim.frank(*jim.Aughra[~float64]) bool`, + }, + }, + { + name: `specific method on unexported type`, + obj: parseObject(t, `Red`, + `package jim + type wembley struct{} + func (w wembley) Red() {}`), + want: Info{ + objectFilter: `jim.wembley`, + }, + }, + { + name: `unexported method resulting in an interface with exported methods`, + obj: parseObject(t, `bear`, + `package jim + type Fozzie struct{} + func (f *Fozzie) bear() interface{ + WakkaWakka(joke string)(landed bool) + Firth()(string, error) + }`), + want: Info{ + objectFilter: `jim.Fozzie`, + methodFilter: `jim.bear() interface{ Firth()(string, error); WakkaWakka(string) bool }`, + }, + }, + { + name: `unexported method resulting in an interface with unexported methods`, + obj: parseObject(t, `bear`, + `package jim + type Fozzie struct{} + func (f *Fozzie) bear() interface{ + wakkaWakka(joke string)(landed bool) + firth()(string, error) + }`), + want: Info{ + objectFilter: `jim.Fozzie`, + // The package path, i.e. `jim.`, is used on unexported methods + // to ensure the filter will not match another package's method. + methodFilter: `jim.bear() interface{ jim.firth()(string, error); jim.wakkaWakka(string) bool }`, + }, + }, + { + name: `unexported method resulting in an empty interface `, + obj: parseObject(t, `bear`, + `package jim + type Fozzie struct{} + func (f *Fozzie) bear() interface{}`), + want: Info{ + objectFilter: `jim.Fozzie`, + methodFilter: `jim.bear() any`, + }, + }, + { + name: `unexported method resulting in a function`, + obj: parseObject(t, `bear`, + `package jim + type Fozzie struct{} + func (f *Fozzie) bear() func(joke string)(landed bool)`), + want: Info{ + objectFilter: `jim.Fozzie`, + methodFilter: `jim.bear() func(string) bool`, + }, + }, + { + name: `unexported method resulting in a struct`, + obj: parseObject(t, `bear`, + `package jim + type Fozzie struct{} + func (f *Fozzie) bear() struct{ + Joke string + WakkaWakka bool + }`), + want: Info{ + objectFilter: `jim.Fozzie`, + methodFilter: `jim.bear() struct{ Joke string; WakkaWakka bool }`, + }, + }, + { + name: `unexported method resulting in a struct with type parameter`, + obj: parseObject(t, `bear`, + `package jim + type Fozzie[T ~string|~int] struct{} + func (f *Fozzie[T]) bear() struct{ + Joke T + wakkaWakka bool + }`), + want: Info{ + objectFilter: `jim.Fozzie[~int|~string]`, + // The `Joke ~int|~string` part will likely not match other methods + // such as methods with `Joke string` or `Joke int`, however the + // interface should be defined for the instantiations of this type + // and those should have the correct field type for `Joke`. + methodFilter: `jim.bear() struct{ Joke ~int|~string; jim.wakkaWakka bool }`, + }, + }, + { + name: `unexported method resulting in an empty struct`, + obj: parseObject(t, `bear`, + `package jim + type Fozzie struct{} + func (f *Fozzie) bear() struct{}`), + want: Info{ + objectFilter: `jim.Fozzie`, + methodFilter: `jim.bear() struct{}`, + }, + }, + { + name: `unexported method resulting in a slice`, + obj: parseObject(t, `bear`, + `package jim + type Fozzie struct{} + func (f *Fozzie) bear()(jokes []string)`), + want: Info{ + objectFilter: `jim.Fozzie`, + methodFilter: `jim.bear() []string`, + }, + }, + { + name: `unexported method resulting in an array`, + obj: parseObject(t, `bear`, + `package jim + type Fozzie struct{} + func (f *Fozzie) bear()(jokes [2]string)`), + want: Info{ + objectFilter: `jim.Fozzie`, + methodFilter: `jim.bear() [2]string`, + }, + }, + { + name: `unexported method resulting in a map`, + obj: parseObject(t, `bear`, + `package jim + type Fozzie struct{} + func (f *Fozzie) bear()(jokes map[string]bool)`), + want: Info{ + objectFilter: `jim.Fozzie`, + methodFilter: `jim.bear() map[string]bool`, + }, + }, + { + name: `unexported method resulting in a channel`, + obj: parseObject(t, `bear`, + `package jim + type Fozzie struct{} + func (f *Fozzie) bear() chan string`), + want: Info{ + objectFilter: `jim.Fozzie`, + methodFilter: `jim.bear() chan string`, + }, + }, + { + name: `unexported method resulting in a complex compound named type`, + obj: parseObject(t, `packRat`, + `package jim + type Gonzo[T any] struct{ + v T + } + func (g Gonzo[T]) Get() T { return g.v } + type Rizzo struct{} + func (r Rizzo) packRat(v int) Gonzo[Gonzo[Gonzo[int]]] { + return Gonzo[Gonzo[Gonzo[int]]]{v: Gonzo[Gonzo[int]]{v: Gonzo[int]{v: v}}} + } + var _ int = Rizzo{}.packRat(42).Get().Get().Get()`), + want: Info{ + objectFilter: `jim.Rizzo`, + methodFilter: `jim.packRat(int) jim.Gonzo[jim.Gonzo[jim.Gonzo[int]]]`, + }, + }, + { + name: `unexported method resulting in an instance with same type parameter`, + obj: parseObject(t, `sidekick`, + `package jim + type Beaker[T any] struct{} + type Honeydew[S any] struct{} + func (hd Honeydew[S]) sidekick() Beaker[S] { + return Beaker[S]{} + }`), + want: Info{ + objectFilter: `jim.Honeydew[any]`, + methodFilter: `jim.sidekick() jim.Beaker[any]`, + }, + }, + { + name: `struct with self referencing type parameter constraints`, + obj: parseObject(t, `Keys`, + `package jim + func Keys[K comparable, V any, M ~map[K]V](m M) []K { + keys := make([]K, 0, len(m)) + for k := range m { + keys = append(keys, k) + } + return keys + }`), + want: Info{ + objectFilter: `jim.Keys[comparable, any, ~map[comparable]any]`, + }, + }, + { + name: `interface with self referencing type parameter constraints`, + obj: parseObject(t, `ElectricMayhem`, + `package jim + type ElectricMayhem[K comparable, V any, M ~map[K]V] interface { + keys() []K + values() []V + asMap() M + }`), + want: Info{ + objectFilter: `jim.ElectricMayhem[comparable, any, ~map[comparable]any]`, + }, + }, + { + name: `function with recursive referencing type parameter constraints`, + obj: parseObject(t, `doWork`, + `package jim + type Doozer[T any] interface { + comparable + Work() T + } + + func doWork[T Doozer[T]](a T) T { + return a.Work() + }`), + want: Info{ + objectFilter: `jim.doWork[jim.Doozer[jim.Doozer[...]]]`, + }, + }, + { + name: `function with recursive referencing multiple type parameter constraints`, + obj: parseObject(t, `doWork`, + `package jim + type Doozer[T, U any] interface { + Work() T + Play() U + } + + func doWork[T Doozer[T, U], U any](a T) T { + return a.Work() + }`), + want: Info{ + objectFilter: `jim.doWork[jim.Doozer[jim.Doozer[...], any], any]`, + }, + }, + { + name: `function with multiple recursive referencing multiple type parameter constraints`, + obj: parseObject(t, `doWork`, + `package jim + type Doozer[T, U any] interface { + Work() T + Play() U + } + + func doWork[T Doozer[T, U], U Doozer[T, U]](a T) T { + return a.Work() + }`), + want: Info{ + objectFilter: `jim.doWork[jim.Doozer[jim.Doozer[...], jim.Doozer[...]], jim.Doozer[jim.Doozer[...], jim.Doozer[...]]]`, + }, + }, + { + name: `function with multiple recursive referencing type parameter constraints`, + obj: parseObject(t, `doWork`, + `package jim + type Doozer[T any] interface { + Work() T + } + + type Fraggle[U any] interface { + Play() U + } + + func doWork[T Doozer[T], U Fraggle[U]](a T) T { + return a.Work() + }`), + want: Info{ + objectFilter: `jim.doWork[jim.Doozer[jim.Doozer[...]], jim.Fraggle[jim.Fraggle[...]]]`, + }, + }, + { + name: `function with osculating recursive referencing type parameter constraints`, + obj: parseObject(t, `doWork`, + `package jim + type Doozer[T any] interface { + Work() T + } + + type Fraggle[U any] interface { + Play() U + } + + func doWork[T Doozer[U], U Fraggle[T]]() {}`), + want: Info{ + objectFilter: `jim.doWork[jim.Doozer[jim.Fraggle[jim.Doozer[...]]], jim.Fraggle[jim.Doozer[jim.Fraggle[...]]]]`, + }, + }, + } + + t.Run(`SetName`, func(t *testing.T) { + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + d := &testDecl{} + equal(t, d.Dce().unnamed(), true) + equal(t, d.Dce().String(), `[unnamed] -> []`) + t.Log(`object:`, types.ObjectString(tt.obj, nil)) + + d.Dce().SetName(tt.obj) + equal(t, d.Dce().unnamed(), tt.want.unnamed()) + equal(t, d.Dce().objectFilter, tt.want.objectFilter) + equal(t, d.Dce().methodFilter, tt.want.methodFilter) + equalSlices(t, d.Dce().getDeps(), tt.want.getDeps()) + equal(t, d.Dce().String(), tt.want.String()) + }) + } + }) + + t.Run(`addDep`, func(t *testing.T) { + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + d := &testDecl{} + t.Log(`object:`, types.ObjectString(tt.obj, nil)) + + wantDeps := []string{} + if len(tt.want.objectFilter) > 0 { + wantDeps = append(wantDeps, tt.want.objectFilter) + } + if len(tt.want.methodFilter) > 0 { + wantDeps = append(wantDeps, tt.want.methodFilter) + } + sort.Strings(wantDeps) + + c := Collector{} + c.CollectDCEDeps(d, func() { + c.DeclareDCEDep(tt.obj) + }) + equalSlices(t, d.Dce().getDeps(), wantDeps) + }) + } + }) +} + +func Test_Info_SetNameOnlyOnce(t *testing.T) { + pkg := testPackage(`mogwai`) + obj1 := quickVar(pkg, `Gizmo`) + obj2 := quickVar(pkg, `Stripe`) + + decl := &testDecl{} + decl.Dce().SetName(obj1) + + err := capturePanic(t, func() { + decl.Dce().SetName(obj2) + }) + errorMatches(t, err, `^may only set the name once for path/to/mogwai\.Gizmo .*$`) +} + +func Test_Info_UsesDeps(t *testing.T) { + tests := []struct { + name string + id string // identifier to check for usage and instance + line int // line number to find the identifier on + src string + wantDeps []string + }{ + { + name: `usage of specific struct`, + id: `Sinclair`, + line: 5, + src: `package epsilon3 + type Sinclair struct{} + func (s Sinclair) command() { } + func main() { + Sinclair{}.command() //<-- line 5 + }`, + wantDeps: []string{`epsilon3.Sinclair`}, + }, + { + name: `usage of generic struct`, + id: `Sheridan`, + line: 5, + src: `package epsilon3 + type Sheridan[T comparable] struct{} + func (s Sheridan[T]) command() { } + func main() { + Sheridan[string]{}.command() //<-- line 5 + }`, + wantDeps: []string{`epsilon3.Sheridan[string]`}, + }, + { + name: `usage of unexported method of generic struct`, + id: `command`, + line: 5, + src: `package epsilon3 + type Sheridan[T comparable] struct{} + func (s Sheridan[T]) command() { } + func main() { + Sheridan[string]{}.command() //<-- line 5 + }`, + // unexported methods need the method filter for matching with + // unexported methods on interfaces. + wantDeps: []string{ + `epsilon3.Sheridan[string]`, + `epsilon3.command()`, + }, + }, + { + name: `usage of unexported method of generic struct pointer`, + id: `command`, + line: 5, + src: `package epsilon3 + type Sheridan[T comparable] struct{} + func (s *Sheridan[T]) command() { } + func main() { + (&Sheridan[string]{}).command() //<-- line 5 + }`, + // unexported methods need the method filter for matching with + // unexported methods on interfaces. + wantDeps: []string{ + `epsilon3.Sheridan[string]`, + `epsilon3.command()`, + }, + }, + { + name: `invocation of function with implicit type arguments`, + id: `Move`, + line: 5, + src: `package epsilon3 + type Ivanova[T any] struct{} + func Move[T ~string|~int](i Ivanova[T]) { } + func main() { + Move(Ivanova[string]{}) //<-- line 5 + }`, + wantDeps: []string{`epsilon3.Move[string]`}, + }, + { + name: `exported method on a complex generic type`, + id: `Get`, + line: 6, + src: `package epsilon3 + type Garibaldi[T any] struct{ v T } + func (g Garibaldi[T]) Get() T { return g.v } + func main() { + michael := Garibaldi[Garibaldi[Garibaldi[int]]]{v: Garibaldi[Garibaldi[int]]{v: Garibaldi[int]{v: 42}}} + _ = michael.Get() // <-- line 6 + }`, + wantDeps: []string{`epsilon3.Garibaldi[epsilon3.Garibaldi[epsilon3.Garibaldi[int]]]`}, + }, + { + name: `unexported method on a complex generic type`, + id: `get`, + line: 6, + src: `package epsilon3 + type Garibaldi[T any] struct{ v T } + func (g Garibaldi[T]) get() T { return g.v } + func main() { + michael := Garibaldi[Garibaldi[Garibaldi[int]]]{v: Garibaldi[Garibaldi[int]]{v: Garibaldi[int]{v: 42}}} + _ = michael.get() // <-- line 6 + }`, + wantDeps: []string{ + `epsilon3.Garibaldi[epsilon3.Garibaldi[epsilon3.Garibaldi[int]]]`, + `epsilon3.get() epsilon3.Garibaldi[epsilon3.Garibaldi[int]]`, + }, + }, + { + name: `invoke of method with an unnamed interface receiver`, + id: `heal`, + line: 8, + src: `package epsilon3 + type Franklin struct{} + func (g Franklin) heal() {} + func main() { + var stephen interface{ + heal() + } = Franklin{} + stephen.heal() // <-- line 8 + }`, + wantDeps: []string{ + `epsilon3.heal()`, + }, + }, + { + name: `invoke a method with a generic return type via instance`, + // Based on go/1.19.13/x64/test/dictionaryCapture-noinline.go + id: `lennier`, + line: 6, + src: `package epsilon3 + type delenn[T any] struct { a T } + func (d delenn[T]) lennier() T { return d.a } + func cocoon() int { + x := delenn[int]{a: 7} + f := delenn[int].lennier // <-- line 6 + return f(x) + }`, + wantDeps: []string{ + `epsilon3.delenn[int]`, + `epsilon3.lennier() int`, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + d := &testDecl{} + uses, inst := parseInstanceUse(t, tt.line, tt.id, tt.src) + tArgs := typeListToSlice(inst.TypeArgs) + t.Logf(`object: %s with [%s]`, types.ObjectString(uses, nil), (typesutil.TypeList)(tArgs).String()) + + c := Collector{} + c.CollectDCEDeps(d, func() { + c.DeclareDCEDep(uses, tArgs...) + }) + equalSlices(t, d.Dce().getDeps(), tt.wantDeps) + }) + } +} + +func Test_Info_SpecificCasesDeps(t *testing.T) { + tests := []struct { + name string + obj types.Object + tArgs []types.Type + wantDeps []string + }{ + { + name: `struct instantiation with generic object`, + obj: parseObject(t, `Mikey`, + `package astoria; + type Mikey[T comparable] struct{} + `), + tArgs: []types.Type{types.Typ[types.String]}, + wantDeps: []string{`astoria.Mikey[string]`}, + }, + { + name: `method instantiation with generic object`, + obj: parseObject(t, `brand`, + `package astoria; + type Mikey[T comparable] struct{ a T} + func (m Mikey[T]) brand() T { + return m.a + }`), + tArgs: []types.Type{types.Typ[types.String]}, + wantDeps: []string{ + `astoria.Mikey[string]`, + `astoria.brand() string`, + }, + }, + { + name: `method instantiation with generic object and multiple type parameters`, + obj: parseObject(t, `shuffle`, + `package astoria; + type Chunk[K comparable, V any] struct{ data map[K]V } + func (c Chunk[K, V]) shuffle(k K) V { + return c.data[k] + }`), + tArgs: []types.Type{types.Typ[types.String], types.Typ[types.Int]}, + wantDeps: []string{ + `astoria.Chunk[string, int]`, + `astoria.shuffle(string) int`, + }, + }, + { + name: `method instantiation with generic object renamed type parameters`, + obj: parseObject(t, `shuffle`, + `package astoria; + type Chunk[K comparable, V any] struct{ data map[K]V } + func (c Chunk[T, K]) shuffle(k T) K { + return c.data[k] + }`), + tArgs: []types.Type{types.Typ[types.String], types.Typ[types.Int]}, + wantDeps: []string{ + `astoria.Chunk[string, int]`, + `astoria.shuffle(string) int`, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + d := &testDecl{} + t.Logf(`object: %s with [%s]`, types.ObjectString(tt.obj, nil), (typesutil.TypeList)(tt.tArgs).String()) + + c := Collector{} + c.CollectDCEDeps(d, func() { + c.DeclareDCEDep(tt.obj, tt.tArgs...) + }) + equalSlices(t, d.Dce().getDeps(), tt.wantDeps) + }) + } +} + +func Test_Info_SetAsAlive(t *testing.T) { + pkg := testPackage(`fantasia`) + + t.Run(`set alive prior to naming`, func(t *testing.T) { + obj := quickVar(pkg, `Falkor`) + decl := &testDecl{} + equal(t, decl.Dce().isAlive(), true) // unnamed is automatically alive + equal(t, decl.Dce().String(), `[unnamed] -> []`) + + decl.Dce().SetAsAlive() + equal(t, decl.Dce().isAlive(), true) // still alive but now explicitly alive + equal(t, decl.Dce().String(), `[alive] [unnamed] -> []`) + + decl.Dce().SetName(obj) + equal(t, decl.Dce().isAlive(), true) // alive because SetAsAlive was called + equal(t, decl.Dce().String(), `[alive] path/to/fantasia.Falkor -> []`) + }) + + t.Run(`set alive after naming`, func(t *testing.T) { + obj := quickVar(pkg, `Artax`) + decl := &testDecl{} + equal(t, decl.Dce().isAlive(), true) // unnamed is automatically alive + equal(t, decl.Dce().String(), `[unnamed] -> []`) + + decl.Dce().SetName(obj) + equal(t, decl.Dce().isAlive(), false) // named so no longer automatically alive + equal(t, decl.Dce().String(), `path/to/fantasia.Artax -> []`) + + decl.Dce().SetAsAlive() + equal(t, decl.Dce().isAlive(), true) // alive because SetAsAlive was called + equal(t, decl.Dce().String(), `[alive] path/to/fantasia.Artax -> []`) + }) +} + +func Test_Selector_JustVars(t *testing.T) { + pkg := testPackage(`tolkien`) + frodo := quickTestDecl(quickVar(pkg, `Frodo`)) + samwise := quickTestDecl(quickVar(pkg, `Samwise`)) + meri := quickTestDecl(quickVar(pkg, `Meri`)) + pippin := quickTestDecl(quickVar(pkg, `Pippin`)) + aragorn := quickTestDecl(quickVar(pkg, `Aragorn`)) + boromir := quickTestDecl(quickVar(pkg, `Boromir`)) + gimli := quickTestDecl(quickVar(pkg, `Gimli`)) + legolas := quickTestDecl(quickVar(pkg, `Legolas`)) + gandalf := quickTestDecl(quickVar(pkg, `Gandalf`)) + fellowship := []*testDecl{ + frodo, samwise, meri, pippin, aragorn, + boromir, gimli, legolas, gandalf, + } + + c := Collector{} + c.CollectDCEDeps(frodo, func() { + c.DeclareDCEDep(samwise.obj) + c.DeclareDCEDep(meri.obj) + c.DeclareDCEDep(pippin.obj) + }) + c.CollectDCEDeps(pippin, func() { + c.DeclareDCEDep(meri.obj) + }) + c.CollectDCEDeps(aragorn, func() { + c.DeclareDCEDep(boromir.obj) + }) + c.CollectDCEDeps(gimli, func() { + c.DeclareDCEDep(legolas.obj) + }) + c.CollectDCEDeps(legolas, func() { + c.DeclareDCEDep(gimli.obj) + }) + c.CollectDCEDeps(gandalf, func() { + c.DeclareDCEDep(frodo.obj) + c.DeclareDCEDep(aragorn.obj) + c.DeclareDCEDep(gimli.obj) + c.DeclareDCEDep(legolas.obj) + }) + + for _, decl := range fellowship { + equal(t, decl.Dce().isAlive(), false) + } + + tests := []struct { + name string + init []*testDecl // which decls to set explicitly alive + want []*testDecl // which decls should be determined as alive + }{ + { + name: `all alive`, + init: fellowship, + want: fellowship, + }, + { + name: `all dead`, + init: []*testDecl{}, + want: []*testDecl{}, + }, + { + name: `Frodo`, + init: []*testDecl{frodo}, + want: []*testDecl{frodo, samwise, meri, pippin}, + }, + { + name: `Sam and Pippin`, + init: []*testDecl{samwise, pippin}, + want: []*testDecl{samwise, meri, pippin}, + }, + { + name: `Gandalf`, + init: []*testDecl{gandalf}, + want: fellowship, + }, + { + name: `Legolas`, + init: []*testDecl{legolas}, + want: []*testDecl{legolas, gimli}, + }, + { + name: `Gimli`, + init: []*testDecl{gimli}, + want: []*testDecl{legolas, gimli}, + }, + { + name: `Boromir`, + init: []*testDecl{boromir}, + want: []*testDecl{boromir}, + }, + { + name: `Aragorn`, + init: []*testDecl{aragorn}, + want: []*testDecl{aragorn, boromir}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + for _, decl := range fellowship { + decl.Dce().alive = false + } + for _, decl := range tt.init { + decl.Dce().SetAsAlive() + } + + s := &Selector[*testDecl]{} + for _, decl := range fellowship { + s.Include(decl, false) + } + + selected := s.AliveDecls() + for _, decl := range tt.want { + if _, ok := selected[decl]; !ok { + t.Errorf(`expected %q to be alive`, decl.obj.String()) + } + delete(selected, decl) + } + for decl := range selected { + t.Errorf(`expected %q to be dead`, decl.obj.String()) + } + }) + } +} + +func Test_Selector_SpecificMethods(t *testing.T) { + objects := parseObjects(t, + `package pratchett + + type rincewind struct{} + func (r rincewind) Run() {} + func (r rincewind) hide() {} + + type Vimes struct{} + func (v Vimes) Run() {} + func (v Vimes) Read() {} + + func Vetinari() {}`) + + var ( + // Objects are in read order so pick the objects we want for this test + // while skipping over `r rincewind` and `v Vimes`. + rincewind = quickTestDecl(objects[0]) + rincewindRun = quickTestDecl(objects[2]) + rincewindHide = quickTestDecl(objects[4]) + vimes = quickTestDecl(objects[5]) + vimesRun = quickTestDecl(objects[7]) + vimesRead = quickTestDecl(objects[9]) + vetinari = quickTestDecl(objects[10]) + ) + allDecls := []*testDecl{rincewind, rincewindRun, rincewindHide, vimes, vimesRun, vimesRead, vetinari} + + c := Collector{} + c.CollectDCEDeps(rincewindRun, func() { + c.DeclareDCEDep(rincewind.obj) + }) + c.CollectDCEDeps(rincewindHide, func() { + c.DeclareDCEDep(rincewind.obj) + }) + c.CollectDCEDeps(vimesRun, func() { + c.DeclareDCEDep(vimes.obj) + }) + c.CollectDCEDeps(vimesRead, func() { + c.DeclareDCEDep(vimes.obj) + }) + vetinari.Dce().SetAsAlive() + + tests := []struct { + name string + deps []*testDecl // which decls are vetinari dependent on + want []*testDecl // which decls should be determined as alive + }{ + { + name: `no deps`, + deps: []*testDecl{}, + want: []*testDecl{vetinari}, + }, + { + name: `structs`, + deps: []*testDecl{rincewind, vimes}, + // rincewindHide is not included because it is not exported and not used. + want: []*testDecl{rincewind, rincewindRun, vimes, vimesRun, vimesRead, vetinari}, + }, + { + name: `exported method`, + deps: []*testDecl{rincewind, rincewindRun}, + want: []*testDecl{rincewind, rincewindRun, vetinari}, + }, + { + name: `unexported method`, + deps: []*testDecl{rincewind, rincewindHide}, + want: []*testDecl{rincewind, rincewindRun, rincewindHide, vetinari}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + vetinari.Dce().deps = nil // reset deps + c.CollectDCEDeps(vetinari, func() { + for _, decl := range tt.deps { + c.DeclareDCEDep(decl.obj) + } + }) + + s := Selector[*testDecl]{} + for _, decl := range allDecls { + s.Include(decl, false) + } + selected := s.AliveDecls() + for _, decl := range tt.want { + if _, ok := selected[decl]; !ok { + t.Errorf(`expected %q to be alive`, decl.obj.String()) + } + delete(selected, decl) + } + for decl := range selected { + t.Errorf(`expected %q to be dead`, decl.obj.String()) + } + }) + } +} + +type testDecl struct { + obj types.Object // should match the object used in Dce.SetName when set + dce Info +} + +func (d *testDecl) Dce() *Info { + return &d.dce +} + +func testPackage(name string) *types.Package { + return types.NewPackage(`path/to/`+name, name) +} + +func quickTestDecl(o types.Object) *testDecl { + d := &testDecl{obj: o} + d.Dce().SetName(o) + return d +} + +func quickVar(pkg *types.Package, name string) *types.Var { + return types.NewVar(token.NoPos, pkg, name, types.Typ[types.Int]) +} + +func newTypeInfo() *types.Info { + return &types.Info{ + Defs: map[*ast.Ident]types.Object{}, + Uses: map[*ast.Ident]types.Object{}, + Instances: map[*ast.Ident]types.Instance{}, + } +} + +func parseObject(t *testing.T, name, source string) types.Object { + t.Helper() + objects := parseObjects(t, source) + for _, obj := range objects { + if obj.Name() == name { + return obj + } + } + t.Fatalf(`object %q not found`, name) + return nil +} + +func parseObjects(t *testing.T, source string) []types.Object { + t.Helper() + fset := token.NewFileSet() + info := newTypeInfo() + parsePackage(t, source, fset, info) + objects := make([]types.Object, 0, len(info.Defs)) + for _, obj := range info.Defs { + if obj != nil { + objects = append(objects, obj) + } + } + sort.Slice(objects, func(i, j int) bool { + return objects[i].Pos() < objects[j].Pos() + }) + return objects +} + +func parseInstanceUse(t *testing.T, lineNo int, idName, source string) (types.Object, types.Instance) { + t.Helper() + fset := token.NewFileSet() + info := newTypeInfo() + parsePackage(t, source, fset, info) + for id, obj := range info.Uses { + if id.Name == idName && fset.Position(id.Pos()).Line == lineNo { + return obj, info.Instances[id] + } + } + t.Fatalf(`failed to find %s on line %d`, idName, lineNo) + return nil, types.Instance{} +} + +func parsePackage(t *testing.T, source string, fset *token.FileSet, info *types.Info) *types.Package { + t.Helper() + f, err := parser.ParseFile(fset, `test.go`, source, 0) + if err != nil { + t.Fatal(`parsing source:`, err) + } + + conf := types.Config{ + Importer: importer.Default(), + DisableUnusedImportCheck: true, + } + pkg, err := conf.Check(f.Name.Name, fset, []*ast.File{f}, info) + if err != nil { + t.Fatal(`type checking:`, err) + } + return pkg +} + +func capturePanic(t *testing.T, f func()) (err error) { + t.Helper() + defer func() { + t.Helper() + if r := recover(); r != nil { + if err2, ok := r.(error); ok { + err = err2 + return + } + t.Errorf(`expected an error to be panicked but got (%[1]T) %[1]#v`, r) + return + } + t.Error(`expected a panic but got none`) + }() + + f() + return nil +} + +func errorMatches(t *testing.T, err error, wantPattern string) { + t.Helper() + re := regexp.MustCompile(wantPattern) + if got := fmt.Sprint(err); !re.MatchString(got) { + t.Errorf(`expected error %q to match %q`, got, re.String()) + } +} + +func depCount(t *testing.T, decl *testDecl, want int) { + t.Helper() + if got := len(decl.Dce().deps); got != want { + t.Errorf(`expected %d deps but got %d`, want, got) + } +} + +func equal[T comparable](t *testing.T, got, want T) { + t.Helper() + if got != want { + t.Errorf("Unexpected value was gotten:\n\texp: %#v\n\tgot: %#v", want, got) + } +} + +func equalSlices[T comparable](t *testing.T, got, want []T) { + t.Helper() + if len(got) != len(want) { + t.Errorf("expected %d but got %d\n\texp: %#v\n\tgot: %#v", len(want), len(got), want, got) + return + } + for i, wantElem := range want { + equal(t, got[i], wantElem) + } +} diff --git a/compiler/internal/dce/filters.go b/compiler/internal/dce/filters.go new file mode 100644 index 000000000..420fd4310 --- /dev/null +++ b/compiler/internal/dce/filters.go @@ -0,0 +1,344 @@ +package dce + +import ( + "go/types" + "sort" + "strconv" + "strings" +) + +// getFilters determines the DCE filters for the given object. +// This will return an object filter and optionally return a method filter. +// +// Typically, the object filter will always be set and the method filter +// will be empty unless the object is an unexported method. +// However, when the object is a method invocation on an unnamed interface type +// the object filter will be empty and only the method filter will be set. +// The later shouldn't happen when naming a declaration but only when creating +// dependencies. +func getFilters(o types.Object, tArgs []types.Type) (objectFilter, methodFilter string) { + if f, ok := o.(*types.Func); ok { + sig := f.Type().(*types.Signature) + if recv := sig.Recv(); recv != nil { + // The object is a method so the object filter is the receiver type + // if the receiver type is named, otherwise it's an unnamed interface. + typ := recv.Type() + if ptrType, ok := typ.(*types.Pointer); ok { + typ = ptrType.Elem() + } + if len(tArgs) == 0 { + tArgs = getTypeArgs(typ) + } + if named, ok := typ.(*types.Named); ok { + objectFilter = getObjectFilter(named.Obj(), tArgs) + } + + // The method is not exported so we only need the method filter. + if !o.Exported() { + methodFilter = getMethodFilter(o, tArgs) + } + return + } + } + + // The object is not a method so we only need the object filter. + objectFilter = getObjectFilter(o, tArgs) + return +} + +// getObjectFilter returns the object filter that functions as the primary +// name when determining if a declaration is alive or not. +// See [naming design] for more information. +// +// [naming design]: https://github.com/gopherjs/gopherjs/compiler/internal/dce/README.md#naming +func getObjectFilter(o types.Object, tArgs []types.Type) string { + return (&filterGen{argTypeRemap: tArgs}).Object(o, tArgs) +} + +// getMethodFilter returns the method filter that functions as the secondary +// name when determining if a declaration is alive or not. +// See [naming design] for more information. +// +// [naming design]: https://github.com/gopherjs/gopherjs/compiler/internal/dce/README.md#naming +func getMethodFilter(o types.Object, tArgs []types.Type) string { + if sig, ok := o.Type().(*types.Signature); ok { + if len(tArgs) == 0 { + if recv := sig.Recv(); recv != nil { + tArgs = getTypeArgs(recv.Type()) + } + } + gen := &filterGen{argTypeRemap: tArgs} + return objectName(o) + gen.Signature(sig) + } + return `` +} + +// objectName returns the name part of a filter name, +// including the package path, if available. +// +// This is different from `o.Id` since it always includes the package path +// when available and doesn't add "_." when not available. +func objectName(o types.Object) string { + if o.Pkg() != nil { + return o.Pkg().Path() + `.` + o.Name() + } + return o.Name() +} + +// getTypeArgs gets the type arguments for the given type +// wether they are type arguments or type parameters. +func getTypeArgs(typ types.Type) []types.Type { + switch t := typ.(type) { + case *types.Pointer: + return getTypeArgs(t.Elem()) + case *types.Named: + if typeArgs := t.TypeArgs(); typeArgs != nil { + return typeListToSlice(typeArgs) + } + if typeParams := t.TypeParams(); typeParams != nil { + return typeParamListToSlice(typeParams) + } + case *types.Signature: + if typeParams := t.RecvTypeParams(); typeParams != nil { + return typeParamListToSlice(typeParams) + } + if typeParams := t.TypeParams(); typeParams != nil { + return typeParamListToSlice(typeParams) + } + } + return nil +} + +// typeListToSlice returns the list of type arguments for the type arguments. +func typeListToSlice(typeArgs *types.TypeList) []types.Type { + tArgs := make([]types.Type, typeArgs.Len()) + for i := range tArgs { + tArgs[i] = typeArgs.At(i) + } + return tArgs +} + +// typeParamListToSlice returns the list of type arguments for the type parameters. +func typeParamListToSlice(typeParams *types.TypeParamList) []types.Type { + tParams := make([]types.Type, typeParams.Len()) + for i := range tParams { + tParams[i] = typeParams.At(i).Constraint() + } + return tParams +} + +type processingGroup struct { + o types.Object + tArgs []types.Type +} + +func (p processingGroup) is(o types.Object, tArgs []types.Type) bool { + if len(p.tArgs) != len(tArgs) || p.o != o { + return false + } + for i, tArg := range tArgs { + if p.tArgs[i] != tArg { + return false + } + } + return true +} + +type filterGen struct { + // argTypeRemap is the type arguments in the same order as the + // type parameters in the top level object such that the type parameters + // index can be used to get the type argument. + argTypeRemap []types.Type + inProgress []processingGroup +} + +func (gen *filterGen) startProcessing(o types.Object, tArgs []types.Type) bool { + for _, p := range gen.inProgress { + if p.is(o, tArgs) { + return false + } + } + gen.inProgress = append(gen.inProgress, processingGroup{o, tArgs}) + return true +} + +func (gen *filterGen) stopProcessing() { + gen.inProgress = gen.inProgress[:len(gen.inProgress)-1] +} + +// Object returns an object filter or filter part for an object. +func (gen *filterGen) Object(o types.Object, tArgs []types.Type) string { + filter := objectName(o) + + // Add additional type information for generics and instances. + if len(tArgs) == 0 { + tArgs = getTypeArgs(o.Type()) + } + if len(tArgs) > 0 { + // Avoid infinite recursion in type arguments by + // tracking the current object and type arguments being processed + // and skipping if already in progress. + if gen.startProcessing(o, tArgs) { + filter += gen.TypeArgs(tArgs) + gen.stopProcessing() + } else { + filter += `[...]` + } + } + + return filter +} + +// Signature returns the filter part containing the signature +// parameters and results for a function or method, e.g. `(int)(bool,error)`. +func (gen *filterGen) Signature(sig *types.Signature) string { + filter := `(` + gen.Tuple(sig.Params(), sig.Variadic()) + `)` + switch sig.Results().Len() { + case 0: + break + case 1: + filter += ` ` + gen.Type(sig.Results().At(0).Type()) + default: + filter += `(` + gen.Tuple(sig.Results(), false) + `)` + } + return filter +} + +// TypeArgs returns the filter part containing the type +// arguments, e.g. `[any,int|string]`. +func (gen *filterGen) TypeArgs(tArgs []types.Type) string { + parts := make([]string, len(tArgs)) + for i, tArg := range tArgs { + parts[i] = gen.Type(tArg) + } + return `[` + strings.Join(parts, `, `) + `]` +} + +// Tuple returns the filter part containing parameter or result +// types for a function, e.g. `(int,string)`, `(int,...string)`. +func (gen *filterGen) Tuple(t *types.Tuple, variadic bool) string { + count := t.Len() + parts := make([]string, count) + for i := range parts { + argType := t.At(i).Type() + if i == count-1 && variadic { + if slice, ok := argType.(*types.Slice); ok { + argType = slice.Elem() + } + parts[i] = `...` + gen.Type(argType) + } else { + parts[i] = gen.Type(argType) + } + } + return strings.Join(parts, `, `) +} + +// Type returns the filter part for a single type. +func (gen *filterGen) Type(typ types.Type) string { + switch t := typ.(type) { + case types.Object: + return gen.Object(t, nil) + + case *types.Array: + return `[` + strconv.FormatInt(t.Len(), 10) + `]` + gen.Type(t.Elem()) + case *types.Chan: + return `chan ` + gen.Type(t.Elem()) + case *types.Interface: + return gen.Interface(t) + case *types.Map: + return `map[` + gen.Type(t.Key()) + `]` + gen.Type(t.Elem()) + case *types.Named: + // Get type args from named instance not generic object + return gen.Object(t.Obj(), getTypeArgs(t)) + case *types.Pointer: + return `*` + gen.Type(t.Elem()) + case *types.Signature: + return `func` + gen.Signature(t) + case *types.Slice: + return `[]` + gen.Type(t.Elem()) + case *types.Struct: + return gen.Struct(t) + case *types.TypeParam: + return gen.TypeParam(t) + default: + // Anything else, like basics, just stringify normally. + return t.String() + } +} + +// Union returns the filter part for a union of types from an type parameter +// constraint, e.g. `~string|int|~float64`. +func (gen *filterGen) Union(u *types.Union) string { + parts := make([]string, u.Len()) + for i := range parts { + term := u.Term(i) + part := gen.Type(term.Type()) + if term.Tilde() { + part = "~" + part + } + parts[i] = part + } + // Sort the union so that "string|int" matches "int|string". + sort.Strings(parts) + return strings.Join(parts, `|`) +} + +// Interface returns the filter part for an interface type or +// an interface for a type parameter constraint. +func (gen *filterGen) Interface(inter *types.Interface) string { + // Collect all method constraints with method names and signatures. + parts := make([]string, inter.NumMethods()) + for i := range parts { + fn := inter.Method(i) + parts[i] = fn.Id() + gen.Signature(fn.Type().(*types.Signature)) + } + // Add any union constraints. + for i := 0; i < inter.NumEmbeddeds(); i++ { + if union, ok := inter.EmbeddedType(i).(*types.Union); ok { + parts = append(parts, gen.Union(union)) + } + } + // Sort the parts of the interface since the order doesn't matter. + // e.g. `interface { a(); b() }` is the same as `interface { b(); a() }`. + sort.Strings(parts) + + if len(parts) == 0 { + return `any` + } + if inter.NumMethods() == 0 && len(parts) == 1 { + return parts[0] // single constraint union, i.e. `bool|~int|string` + } + return `interface{ ` + strings.Join(parts, `; `) + ` }` +} + +// Struct returns the filter part for a struct type. +func (gen *filterGen) Struct(s *types.Struct) string { + if s.NumFields() == 0 { + return `struct{}` + } + parts := make([]string, s.NumFields()) + for i := range parts { + f := s.Field(i) + // The field name and order is required to be part of the filter since + // struct matching rely on field names too. Tags are not needed. + // See https://go.dev/ref/spec#Conversions + parts[i] = f.Id() + ` ` + gen.Type(f.Type()) + } + return `struct{ ` + strings.Join(parts, `; `) + ` }` +} + +// TypeParam returns the filter part for a type parameter. +// If there is an argument remap, it will use the remapped type +// so long as it doesn't map to itself. +func (gen *filterGen) TypeParam(t *types.TypeParam) string { + index := t.Index() + if index >= 0 && index < len(gen.argTypeRemap) { + if inst := gen.argTypeRemap[index]; inst != t { + return gen.Type(inst) + } + } + if t.Constraint() == nil { + return `any` + } + return gen.Type(t.Constraint()) +} diff --git a/compiler/internal/dce/info.go b/compiler/internal/dce/info.go new file mode 100644 index 000000000..6a45e9ef3 --- /dev/null +++ b/compiler/internal/dce/info.go @@ -0,0 +1,157 @@ +package dce + +import ( + "bytes" + "encoding/gob" + "fmt" + "go/types" + "sort" + "strings" +) + +// Info contains information used by the dead-code elimination (DCE) logic to +// determine whether a declaration is alive or dead. +type Info struct { + // alive indicates if the declaration is marked as alive + // and will not be eliminated. + alive bool + + // objectFilter is the primary DCE name for a declaration. + // This will be the variable, function, or type identifier. + // For methods it is the receiver type identifier. + // If empty, the declaration is assumed to be alive. + objectFilter string + + // methodFilter is the secondary DCE name for a declaration. + // This will be empty if objectFilter is empty. + // This will be set to a qualified method name if the objectFilter + // can not determine if the declaration is alive on it's own. + // See ./README.md for more information. + methodFilter string + + // Set of fully qualified (including package path) DCE symbol + // and/or method names that this DCE declaration depends on. + deps map[string]struct{} +} + +// String gets a human-readable representation of the DCE info. +func (d *Info) String() string { + tags := `` + if d.alive { + tags += `[alive] ` + } + if d.unnamed() { + tags += `[unnamed] ` + } + names := []string{} + if len(d.objectFilter) > 0 { + names = append(names, d.objectFilter+` `) + } + if len(d.methodFilter) > 0 { + names = append(names, d.methodFilter+` `) + } + return tags + strings.Join(names, `& `) + `-> [` + strings.Join(d.getDeps(), `, `) + `]` +} + +// unnamed returns true if SetName has not been called for this declaration. +// This indicates that the DCE is not initialized. +func (d *Info) unnamed() bool { + return d.objectFilter == `` && d.methodFilter == `` +} + +// isAlive returns true if the declaration is marked as alive. +// +// Returns true if SetAsAlive was called on this declaration or +// if SetName was not called meaning the DCE is not initialized. +func (d *Info) isAlive() bool { + return d.alive || d.unnamed() +} + +// SetAsAlive marks the declaration as alive, meaning it will not be eliminated. +// +// This should be called by an entry point (like main() or init() functions) +// or a variable initializer which has a side effect, consider it live. +func (d *Info) SetAsAlive() { + d.alive = true +} + +// SetName sets the name used by DCE to represent the declaration +// this DCE info is attached to. +// +// The given optional type arguments are used to when the object is a +// function with type parameters or anytime the object doesn't carry them. +// If not given, this attempts to get the type arguments from the object. +func (d *Info) SetName(o types.Object, tArgs ...types.Type) { + if !d.unnamed() { + panic(fmt.Errorf(`may only set the name once for %s`, d.String())) + } + + // Determine name(s) for DCE. + d.objectFilter, d.methodFilter = getFilters(o, tArgs) +} + +// addDep add a declaration dependencies used by DCE +// for the declaration this DCE info is attached to. +func (d *Info) addDep(o types.Object, tArgs []types.Type) { + objectFilter, methodFilter := getFilters(o, tArgs) + d.addDepName(objectFilter) + d.addDepName(methodFilter) +} + +// addDepName adds a declaration dependency by name. +func (d *Info) addDepName(depName string) { + if len(depName) > 0 { + if d.deps == nil { + d.deps = make(map[string]struct{}) + } + d.deps[depName] = struct{}{} + } +} + +// getDeps gets the dependencies for the declaration sorted by name. +func (id *Info) getDeps() []string { + deps := make([]string, len(id.deps)) + i := 0 + for dep := range id.deps { + deps[i] = dep + i++ + } + sort.Strings(deps) + return deps +} + +type serializableInfo struct { + Alive bool + ObjectFilter string + MethodFilter string + Deps []string +} + +func (id *Info) GobEncode() ([]byte, error) { + si := serializableInfo{ + Alive: id.alive, + ObjectFilter: id.objectFilter, + MethodFilter: id.methodFilter, + Deps: id.getDeps(), + } + + buf := &bytes.Buffer{} + err := gob.NewEncoder(buf).Encode(si) + return buf.Bytes(), err +} + +func (id *Info) GobDecode(data []byte) error { + var si serializableInfo + if err := gob.NewDecoder(bytes.NewReader(data)).Decode(&si); err != nil { + return err + } + + id.alive = si.Alive + id.objectFilter = si.ObjectFilter + id.methodFilter = si.MethodFilter + id.deps = make(map[string]struct{}, len(si.Deps)) + for _, dep := range si.Deps { + id.deps[dep] = struct{}{} + } + return nil +} diff --git a/compiler/internal/dce/selector.go b/compiler/internal/dce/selector.go new file mode 100644 index 000000000..3dff49028 --- /dev/null +++ b/compiler/internal/dce/selector.go @@ -0,0 +1,93 @@ +package dce + +// DeclConstraint is type constraint for any code declaration that has +// dead-code elimination (DCE) information attached to it and will be +// used in a set. +type DeclConstraint interface { + Decl + comparable +} + +// Selector gathers all declarations that are still alive after dead-code elimination. +type Selector[D DeclConstraint] struct { + byFilter map[string][]*declInfo[D] + + // A queue of live decls to find other live decls. + pendingDecls []D +} + +type declInfo[D DeclConstraint] struct { + decl D + objectFilter string + methodFilter string +} + +// Include will add a new declaration to be checked as alive or not. +func (s *Selector[D]) Include(decl D, implementsLink bool) { + if s.byFilter == nil { + s.byFilter = make(map[string][]*declInfo[D]) + } + + dce := decl.Dce() + + if dce.isAlive() { + s.pendingDecls = append(s.pendingDecls, decl) + return + } + + if implementsLink { + s.pendingDecls = append(s.pendingDecls, decl) + } + + info := &declInfo[D]{decl: decl} + + if dce.objectFilter != `` { + info.objectFilter = dce.objectFilter + s.byFilter[info.objectFilter] = append(s.byFilter[info.objectFilter], info) + } + + if dce.methodFilter != `` { + info.methodFilter = dce.methodFilter + s.byFilter[info.methodFilter] = append(s.byFilter[info.methodFilter], info) + } +} + +func (s *Selector[D]) popPending() D { + max := len(s.pendingDecls) - 1 + d := s.pendingDecls[max] + s.pendingDecls = s.pendingDecls[:max] + return d +} + +// AliveDecls returns a set of declarations that are still alive +// after dead-code elimination. +// This should only be called once all declarations have been included. +func (s *Selector[D]) AliveDecls() map[D]struct{} { + dceSelection := make(map[D]struct{}) // Known live decls. + for len(s.pendingDecls) != 0 { + d := s.popPending() + dce := d.Dce() + + dceSelection[d] = struct{}{} // Mark the decl as live. + + // Consider all decls the current one is known to depend on and possible add + // them to the live queue. + for _, dep := range dce.getDeps() { + if infos, ok := s.byFilter[dep]; ok { + delete(s.byFilter, dep) + for _, info := range infos { + if info.objectFilter == dep { + info.objectFilter = `` + } + if info.methodFilter == dep { + info.methodFilter = `` + } + if info.objectFilter == `` && info.methodFilter == `` { + s.pendingDecls = append(s.pendingDecls, info.decl) + } + } + } + } + } + return dceSelection +} diff --git a/compiler/internal/symbol/symbol.go b/compiler/internal/symbol/symbol.go new file mode 100644 index 000000000..d460ea86d --- /dev/null +++ b/compiler/internal/symbol/symbol.go @@ -0,0 +1,65 @@ +package symbol + +import ( + "go/types" + "strings" +) + +// Name uniquely identifies a named symbol within a program. +// +// This is a logical equivalent of a symbol name used by traditional linkers. +// The following properties should hold true: +// +// - Each named symbol within a program has a unique Name. +// - Similarly named methods of different types will have different symbol names. +// - The string representation is opaque and should not be attempted to reversed +// to a struct form. +type Name struct { + PkgPath string // Full package import path. + Name string // Symbol name. +} + +// New constructs SymName for a given named symbol. +func New(o types.Object) Name { + pkgPath := `_` + if pkg := o.Pkg(); pkg != nil { + pkgPath = pkg.Path() + } + + if fun, ok := o.(*types.Func); ok { + sig := fun.Type().(*types.Signature) + if recv := sig.Recv(); recv != nil { + // Special case: disambiguate names for different types' methods. + typ := recv.Type() + if ptr, ok := typ.(*types.Pointer); ok { + return Name{ + PkgPath: pkgPath, + Name: "(*" + ptr.Elem().(*types.Named).Obj().Name() + ")." + o.Name(), + } + } + return Name{ + PkgPath: pkgPath, + Name: typ.(*types.Named).Obj().Name() + "." + o.Name(), + } + } + } + return Name{ + PkgPath: pkgPath, + Name: o.Name(), + } +} + +func (n Name) String() string { return n.PkgPath + "." + n.Name } + +func (n Name) IsMethod() (recv string, method string, ok bool) { + pos := strings.IndexByte(n.Name, '.') + if pos == -1 { + return + } + recv, method, ok = n.Name[:pos], n.Name[pos+1:], true + size := len(recv) + if size > 2 && recv[0] == '(' && recv[size-1] == ')' { + recv = recv[1 : size-1] + } + return +} diff --git a/compiler/internal/symbol/symbol_test.go b/compiler/internal/symbol/symbol_test.go new file mode 100644 index 000000000..778e3b1e0 --- /dev/null +++ b/compiler/internal/symbol/symbol_test.go @@ -0,0 +1,53 @@ +package symbol + +import ( + "go/types" + "testing" + + "github.com/gopherjs/gopherjs/internal/srctesting" +) + +func TestName(t *testing.T) { + const src = `package testcase + + func AFunction() {} + type AType struct {} + func (AType) AMethod() {} + func (*AType) APointerMethod() {} + var AVariable int32 + ` + + f := srctesting.New(t) + _, pkg := f.Check("pkg/test", f.Parse("test.go", src)) + + tests := []struct { + obj types.Object + want Name + }{ + { + obj: pkg.Scope().Lookup("AFunction"), + want: Name{PkgPath: "pkg/test", Name: "AFunction"}, + }, { + obj: pkg.Scope().Lookup("AType"), + want: Name{PkgPath: "pkg/test", Name: "AType"}, + }, { + obj: types.NewMethodSet(pkg.Scope().Lookup("AType").Type()).Lookup(pkg, "AMethod").Obj(), + want: Name{PkgPath: "pkg/test", Name: "AType.AMethod"}, + }, { + obj: types.NewMethodSet(types.NewPointer(pkg.Scope().Lookup("AType").Type())).Lookup(pkg, "APointerMethod").Obj(), + want: Name{PkgPath: "pkg/test", Name: "(*AType).APointerMethod"}, + }, { + obj: pkg.Scope().Lookup("AVariable"), + want: Name{PkgPath: "pkg/test", Name: "AVariable"}, + }, + } + + for _, test := range tests { + t.Run(test.obj.Name(), func(t *testing.T) { + got := New(test.obj) + if got != test.want { + t.Errorf("NewSymName(%q) returned %#v, want: %#v", test.obj.Name(), got, test.want) + } + }) + } +} diff --git a/compiler/internal/typeparams/collect.go b/compiler/internal/typeparams/collect.go new file mode 100644 index 000000000..940690e83 --- /dev/null +++ b/compiler/internal/typeparams/collect.go @@ -0,0 +1,394 @@ +package typeparams + +import ( + "fmt" + "go/ast" + "go/types" + "strings" + + "github.com/gopherjs/gopherjs/compiler/typesutil" + "github.com/gopherjs/gopherjs/internal/govendor/subst" +) + +// Resolver translates types defined in terms of type parameters into concrete +// types, given a mapping from type params to type arguments. +type Resolver struct { + tParams *types.TypeParamList + tArgs []types.Type + parent *Resolver + + // subster is the substitution helper that will perform the actual + // substitutions. This maybe nil when there are no substitutions but + // will still usable when nil. + subster *subst.Subster + selMemo map[typesutil.Selection]typesutil.Selection +} + +// NewResolver creates a new Resolver with tParams entries mapping to tArgs +// entries with the same index. +func NewResolver(tc *types.Context, tParams *types.TypeParamList, tArgs []types.Type, parent *Resolver) *Resolver { + r := &Resolver{ + tParams: tParams, + tArgs: tArgs, + parent: parent, + subster: subst.New(tc, tParams, tArgs), + selMemo: map[typesutil.Selection]typesutil.Selection{}, + } + return r +} + +// TypeParams is the list of type parameters that this resolver +// (not any parent) will substitute. +func (r *Resolver) TypeParams() *types.TypeParamList { + if r == nil { + return nil + } + return r.tParams +} + +// TypeArgs is the list of type arguments that this resolver +// (not any parent) will resolve to. +func (r *Resolver) TypeArgs() []types.Type { + if r == nil { + return nil + } + return r.tArgs +} + +// Parent is the resolver for the function or method that this resolver +// is nested in. This may be nil if the context for this resolver is not +// nested in another generic function or method. +func (r *Resolver) Parent() *Resolver { + if r == nil { + return nil + } + return r.parent +} + +// Substitute replaces references to type params in the provided type definition +// with the corresponding concrete types. +func (r *Resolver) Substitute(typ types.Type) types.Type { + if r == nil || typ == nil { + return typ // No substitutions to be made. + } + typ = r.subster.Type(typ) + typ = r.parent.Substitute(typ) + return typ +} + +// SubstituteAll same as Substitute, but accepts a TypeList are returns +// substitution results as a slice in the same order. +func (r *Resolver) SubstituteAll(list *types.TypeList) []types.Type { + result := make([]types.Type, list.Len()) + for i := range result { + result[i] = r.Substitute(list.At(i)) + } + return result +} + +// SubstituteSelection replaces a method of field selection on a generic type +// defined in terms of type parameters with a method selection on a concrete +// instantiation of the type. +func (r *Resolver) SubstituteSelection(sel typesutil.Selection) typesutil.Selection { + if r == nil || sel == nil { + return sel // No substitutions to be made. + } + if concrete, ok := r.selMemo[sel]; ok { + return concrete + } + + switch sel.Kind() { + case types.MethodExpr, types.MethodVal, types.FieldVal: + recv := r.Substitute(sel.Recv()) + if types.Identical(recv, sel.Recv()) { + return sel // Non-generic receiver, no substitution necessary. + } + + // Look up the method on the instantiated receiver. + pkg := sel.Obj().Pkg() + obj, index, _ := types.LookupFieldOrMethod(recv, true, pkg, sel.Obj().Name()) + if obj == nil { + panic(fmt.Errorf("failed to lookup field %q in type %v", sel.Obj().Name(), recv)) + } + typ := obj.Type() + + if sel.Kind() == types.MethodExpr { + typ = typesutil.RecvAsFirstArg(typ.(*types.Signature)) + } + concrete := typesutil.NewSelection(sel.Kind(), recv, index, obj, typ) + r.selMemo[sel] = concrete + return concrete + default: + panic(fmt.Errorf("unexpected selection kind %v: %v", sel.Kind(), sel)) + } +} + +// String gets a strings representation of the resolver for debugging. +func (r *Resolver) String() string { + if r == nil { + return `{}` + } + + parts := make([]string, 0, len(r.tArgs)) + for i, ta := range r.tArgs { + parts = append(parts, fmt.Sprintf("%s->%s", r.tParams.At(i), ta)) + } + + nestStr := `` + if r.parent != nil { + nestStr = r.parent.String() + `:` + } + return nestStr + `{` + strings.Join(parts, `, `) + `}` +} + +// visitor implements ast.Visitor and collects instances of generic types and +// functions into an InstanceSet. +// +// When traversing an AST subtree corresponding to a generic type, method or +// function, Resolver must be provided mapping the type parameters into concrete +// types. +type visitor struct { + instances *PackageInstanceSets + resolver *Resolver + info *types.Info + tNest []types.Type // The type arguments for a nested context. +} + +var _ ast.Visitor = &visitor{} + +func (c *visitor) Visit(n ast.Node) ast.Visitor { + if ident, ok := n.(*ast.Ident); ok { + c.visitIdent(ident) + } + return c +} + +func (c *visitor) visitIdent(ident *ast.Ident) { + if inst, ok := c.info.Instances[ident]; ok { + // Found the use of a generic type or function. + c.visitInstance(ident, inst) + } + + if len(c.resolver.TypeArgs()) > 0 { + if obj, ok := c.info.Defs[ident]; ok && obj != nil { + // Found instance of a type defined inside a generic context. + c.visitNestedType(obj) + } + } +} + +func (c *visitor) visitInstance(ident *ast.Ident, inst types.Instance) { + obj := c.info.Uses[ident] + tArgs := inst.TypeArgs + + // For types embedded in structs, the object the identifier resolves to is a + // *types.Var representing the implicitly declared struct field. However, the + // instance relates to the *types.TypeName behind the field type, which we + // obtain here. + typ := obj.Type() + if ptr, ok := typ.(*types.Pointer); ok { + typ = ptr.Elem() + } + if t, ok := typ.(*types.Named); ok { + obj = t.Obj() + } + + // If the object is defined in the same scope as the instance, + // then we apply the current nested type arguments. + var tNest []types.Type + if obj.Parent().Contains(ident.Pos()) { + tNest = c.tNest + } + + c.addInstance(obj, tArgs, tNest) +} + +func (c *visitor) visitNestedType(obj types.Object) { + if _, ok := obj.(*types.TypeName); !ok { + // Found a variable or function, not a type, so skip it. + return + } + + typ := obj.Type() + if ptr, ok := typ.(*types.Pointer); ok { + typ = ptr.Elem() + } + + t, ok := typ.(*types.Named) + if !ok || t.TypeParams().Len() > 0 { + // Found a generic type or an unnamed type (e.g. type parameter). + // Don't add generic types yet because they + // will be added when we find an instance of them. + return + } + + c.addInstance(obj, nil, c.resolver.TypeArgs()) +} + +func (c *visitor) addInstance(obj types.Object, tArgList *types.TypeList, tNest []types.Type) { + tArgs := c.resolver.SubstituteAll(tArgList) + if isGeneric(tArgs...) { + // Skip any instances that still have type parameters in them after + // substitution. This occurs when a type is defined while nested + // in a generic context and is not fully instantiated yet. + // We need to wait until we find a full instantiation of the type. + return + } + + c.instances.Add(Instance{ + Object: obj, + TArgs: tArgs, + TNest: tNest, + }) + + if t, ok := obj.Type().(*types.Named); ok { + for i := 0; i < t.NumMethods(); i++ { + method := t.Method(i) + c.instances.Add(Instance{ + Object: method.Origin(), + TArgs: tArgs, + TNest: tNest, + }) + } + } +} + +// seedVisitor implements ast.Visitor that collects information necessary to +// kickstart generic instantiation discovery. +// +// It serves double duty: +// - Builds a map from types.Object instances representing generic types, +// methods and functions to AST nodes that define them. +// - Collects an initial set of generic instantiations in the non-generic code. +type seedVisitor struct { + visitor + objMap map[types.Object]ast.Node + mapOnly bool // Only build up objMap, ignore any instances. +} + +var _ ast.Visitor = &seedVisitor{} + +func (c *seedVisitor) Visit(n ast.Node) ast.Visitor { + // Generic functions, methods and types require type arguments to scan for + // generic instantiations, remember their node for later and do not descend + // further. + switch n := n.(type) { + case *ast.FuncDecl: + obj := c.info.Defs[n.Name] + sig := obj.Type().(*types.Signature) + if sig.TypeParams().Len() != 0 || sig.RecvTypeParams().Len() != 0 { + c.objMap[obj] = n + return &seedVisitor{ + visitor: c.visitor, + objMap: c.objMap, + mapOnly: true, + } + } + case *ast.TypeSpec: + obj := c.info.Defs[n.Name] + named, ok := obj.Type().(*types.Named) + if !ok { + break + } + if named.TypeParams().Len() != 0 && named.TypeArgs().Len() == 0 { + c.objMap[obj] = n + return nil + } + } + + if !c.mapOnly { + // Otherwise check for fully defined instantiations and descend further into + // the AST tree. + c.visitor.Visit(n) + } + return c +} + +// Collector scans type-checked AST tree and adds discovered generic type and +// function instances to the InstanceSet. +// +// Collector will scan non-generic code for any instantiations of generic types +// or functions and add them to the InstanceSet. Then it will scan generic types +// and function with discovered sets of type arguments for more instantiations, +// until no new ones are discovered. +// +// InstanceSet may contain unprocessed instances of generic types and functions, +// which will be also scanned, for example found in depending packages. +// +// Note that instances of generic type methods are automatically added to the +// set whenever their receiver type instance is encountered. +type Collector struct { + TContext *types.Context + Info *types.Info + Instances *PackageInstanceSets +} + +// Scan package files for generic instances. +func (c *Collector) Scan(pkg *types.Package, files ...*ast.File) { + if c.Info.Instances == nil || c.Info.Defs == nil { + panic(fmt.Errorf("types.Info must have Instances and Defs populated")) + } + objMap := map[types.Object]ast.Node{} + + // Collect instances of generic objects in non-generic code in the package and + // add then to the existing InstanceSet. + sc := seedVisitor{ + visitor: visitor{ + instances: c.Instances, + resolver: nil, + info: c.Info, + }, + objMap: objMap, + } + for _, file := range files { + ast.Walk(&sc, file) + } + + for iset := c.Instances.Pkg(pkg); !iset.exhausted(); { + inst, _ := iset.next() + + switch typ := inst.Object.Type().(type) { + case *types.Signature: + c.scanSignature(inst, typ, objMap) + + case *types.Named: + c.scanNamed(inst, typ, objMap) + } + } +} + +func (c *Collector) scanSignature(inst Instance, typ *types.Signature, objMap map[types.Object]ast.Node) { + tParams := SignatureTypeParams(typ) + v := visitor{ + instances: c.Instances, + resolver: NewResolver(c.TContext, tParams, inst.TArgs, nil), + info: c.Info, + tNest: inst.TArgs, + } + ast.Walk(&v, objMap[inst.Object]) +} + +func (c *Collector) scanNamed(inst Instance, typ *types.Named, objMap map[types.Object]ast.Node) { + obj := typ.Obj() + node := objMap[obj] + if node == nil { + // Types without an entry in objMap are concrete types + // that are defined in a generic context. Skip them. + return + } + + var nestResolver *Resolver + if len(inst.TNest) > 0 { + fn := FindNestingFunc(inst.Object) + tp := SignatureTypeParams(fn.Type().(*types.Signature)) + nestResolver = NewResolver(c.TContext, tp, inst.TNest, nil) + } + + v := visitor{ + instances: c.Instances, + resolver: NewResolver(c.TContext, typ.TypeParams(), inst.TArgs, nestResolver), + info: c.Info, + tNest: inst.TNest, + } + ast.Walk(&v, node) +} diff --git a/compiler/internal/typeparams/collect_test.go b/compiler/internal/typeparams/collect_test.go new file mode 100644 index 000000000..6864e5ead --- /dev/null +++ b/compiler/internal/typeparams/collect_test.go @@ -0,0 +1,879 @@ +package typeparams + +import ( + "go/ast" + "go/token" + "go/types" + "strings" + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/gopherjs/gopherjs/internal/srctesting" + "golang.org/x/tools/go/ast/astutil" +) + +func TestVisitor(t *testing.T) { + // This test verifies that instance collector is able to discover + // instantiations of generic types and functions in all possible contexts. + const src = `package testcase + + type A struct{} + type B struct{} + type C struct{} + type D struct{} + type E struct{} + type F struct{} + type G struct{} + + type typ[T any, V any] []T + func (t *typ[T, V]) method(x T) {} + func fun[U any, W any](x U, y W) {} + + func entry1(arg typ[int8, A]) (result typ[int16, A]) { + fun(1, A{}) + fun[int8, A](1, A{}) + println(fun[int16, A]) + + t := typ[int, A]{} + t.method(0) + (*typ[int32, A]).method(nil, 0) + + type x struct{ T []typ[int64, A] } + type y[X any] struct{ T []typ[A, X] } + _ = y[int8]{} + _ = y[A]{} + + return + } + + func entry2[T any](arg typ[int8, T]) (result typ[int16, T]) { + var zeroT T + fun(1, zeroT) + fun[int8, T](1, zeroT) + println(fun[int16, T]) + + t := typ[int, T]{} + t.method(0) + (*typ[int32, T]).method(nil, 0) + + type x struct{ T []typ[int64, T] } + type y[X any] struct{ T []typ[T, X] } + _ = y[int8]{} + _ = y[T]{} + + return + } + + type entry3[T any] struct{ + typ[int, T] + field1 struct { field2 typ[int8, T] } + } + func (e entry3[T]) method(arg typ[int8, T]) (result typ[int16, T]) { + var zeroT T + fun(1, zeroT) + fun[int8, T](1, zeroT) + println(fun[int16, T]) + + t := typ[int, T]{} + t.method(0) + (*typ[int32, T]).method(nil, 0) + + type x struct{ T []typ[int64, T] } + type y[X any] struct{ T []typ[T, X] } + _ = y[int8]{} + _ = y[T]{} + + return + } + + type entry4 struct{ + typ[int, E] + field1 struct { field2 typ[int8, E] } + } + + type entry5 = typ[int, F] + ` + f := srctesting.New(t) + file := f.Parse("test.go", src) + info, pkg := f.Check("pkg/test", file) + + lookupObj := func(name string) types.Object { + return srctesting.LookupObj(pkg, name) + } + lookupType := func(name string) types.Type { return lookupObj(name).Type() } + lookupDecl := func(name string) ast.Node { + obj := lookupObj(name) + path, _ := astutil.PathEnclosingInterval(file, obj.Pos(), obj.Pos()) + for _, n := range path { + switch n.(type) { + case *ast.FuncDecl, *ast.TypeSpec: + return n + } + } + t.Fatalf("Could not find AST node representing %v", obj) + return nil + } + + // Generates a list of instances we expect to discover from functions and + // methods. Sentinel type is a type parameter we use uniquely within one + // context, which allows us to make sure that collection is not being tested + // against a wrong part of AST. + instancesInFunc := func(sentinel types.Type) []Instance { + return []Instance{ + { + // Called with type arguments inferred. + Object: lookupObj("fun"), + TArgs: []types.Type{types.Typ[types.Int], sentinel}, + }, { + // Called with type arguments explicitly specified. + Object: lookupObj("fun"), + TArgs: []types.Type{types.Typ[types.Int8], sentinel}, + }, { + // Passed as an argument. + Object: lookupObj("fun"), + TArgs: []types.Type{types.Typ[types.Int16], sentinel}, + }, { + // Literal expression. + Object: lookupObj("typ"), + TArgs: []types.Type{types.Typ[types.Int], sentinel}, + }, { + Object: lookupObj("typ.method"), + TArgs: []types.Type{types.Typ[types.Int], sentinel}, + }, { + // Function argument. + Object: lookupObj("typ"), + TArgs: []types.Type{types.Typ[types.Int8], sentinel}, + }, { + Object: lookupObj("typ.method"), + TArgs: []types.Type{types.Typ[types.Int8], sentinel}, + }, { + // Function return type. + Object: lookupObj("typ"), + TArgs: []types.Type{types.Typ[types.Int16], sentinel}, + }, { + Object: lookupObj("typ.method"), + TArgs: []types.Type{types.Typ[types.Int16], sentinel}, + }, { + // Method expression. + Object: lookupObj("typ"), + TArgs: []types.Type{types.Typ[types.Int32], sentinel}, + }, { + Object: lookupObj("typ.method"), + TArgs: []types.Type{types.Typ[types.Int32], sentinel}, + }, { + // Type decl statement. + Object: lookupObj("typ"), + TArgs: []types.Type{types.Typ[types.Int64], sentinel}, + }, { + Object: lookupObj("typ.method"), + TArgs: []types.Type{types.Typ[types.Int64], sentinel}, + }, + } + } + + // Generates a list of instances we expect to discover from type declarations. + // Sentinel type is a type parameter we use uniquely within one context, which + // allows us to make sure that collection is not being tested against a wrong + // part of AST. + instancesInType := func(sentinel types.Type) []Instance { + return []Instance{ + { + Object: lookupObj("typ"), + TArgs: []types.Type{types.Typ[types.Int], sentinel}, + }, { + Object: lookupObj("typ.method"), + TArgs: []types.Type{types.Typ[types.Int], sentinel}, + }, { + Object: lookupObj("typ"), + TArgs: []types.Type{types.Typ[types.Int8], sentinel}, + }, { + Object: lookupObj("typ.method"), + TArgs: []types.Type{types.Typ[types.Int8], sentinel}, + }, + } + } + + tests := []struct { + descr string + resolver *Resolver + node ast.Node + want []Instance + }{ + { + descr: "non-generic function", + resolver: nil, + node: lookupDecl("entry1"), + want: append( + instancesInFunc(lookupType("A")), + Instance{ + Object: lookupObj("entry1.y"), + TArgs: []types.Type{types.Typ[types.Int8]}, + }, + Instance{ + Object: lookupObj("entry1.y"), + TArgs: []types.Type{lookupType("A")}, + }, + ), + }, { + descr: "generic function", + resolver: NewResolver( + types.NewContext(), + lookupType("entry2").(*types.Signature).TypeParams(), + []types.Type{lookupType("B")}, + nil, + ), + node: lookupDecl("entry2"), + want: append( + instancesInFunc(lookupType("B")), + Instance{ + Object: lookupObj("entry2.x"), + TNest: []types.Type{lookupType("B")}, + }, + Instance{ + Object: lookupObj("entry1.y"), + TNest: []types.Type{lookupType("B")}, + TArgs: []types.Type{types.Typ[types.Int8]}, + }, + Instance{ + Object: lookupObj("entry2.y"), + TNest: []types.Type{lookupType("B")}, + TArgs: []types.Type{lookupType("B")}, + }, + ), + }, { + descr: "generic method", + resolver: NewResolver( + types.NewContext(), + lookupType("entry3.method").(*types.Signature).RecvTypeParams(), + []types.Type{lookupType("C")}, + nil, + ), + node: lookupDecl("entry3.method"), + want: append( + instancesInFunc(lookupType("C")), + Instance{ + Object: lookupObj("entry3"), + TArgs: []types.Type{lookupType("C")}, + }, + Instance{ + Object: lookupObj("entry3.method"), + TArgs: []types.Type{lookupType("C")}, + }, + Instance{ + Object: lookupObj("entry3.method.x"), + TNest: []types.Type{lookupType("C")}, + }, + Instance{ + Object: lookupObj("entry3.method.y"), + TNest: []types.Type{lookupType("C")}, + TArgs: []types.Type{types.Typ[types.Int8]}, + }, + Instance{ + Object: lookupObj("entry3.method.y"), + TNest: []types.Type{lookupType("C")}, + TArgs: []types.Type{lookupType("C")}, + }, + ), + }, { + descr: "generic type declaration", + resolver: NewResolver( + types.NewContext(), + lookupType("entry3").(*types.Named).TypeParams(), + []types.Type{lookupType("D")}, + nil, + ), + node: lookupDecl("entry3"), + want: instancesInType(lookupType("D")), + }, { + descr: "non-generic type declaration", + resolver: nil, + node: lookupDecl("entry4"), + want: instancesInType(lookupType("E")), + }, { + descr: "non-generic type alias", + resolver: nil, + node: lookupDecl("entry5"), + want: []Instance{ + { + Object: lookupObj("typ"), + TArgs: []types.Type{types.Typ[types.Int], lookupType("F")}, + }, + { + Object: lookupObj("typ.method"), + TArgs: []types.Type{types.Typ[types.Int], lookupType("F")}, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.descr, func(t *testing.T) { + v := visitor{ + instances: &PackageInstanceSets{}, + resolver: test.resolver, + info: info, + } + if test.resolver != nil { + // Since we know all the tests are for functions and methods, + // set the nested type to the type parameter from the resolver. + v.tNest = test.resolver.tArgs + } + ast.Walk(&v, test.node) + got := v.instances.Pkg(pkg).Values() + if diff := cmp.Diff(test.want, got, instanceOpts()); diff != "" { + t.Errorf("Discovered instance diff (-want,+got):\n%s", diff) + } + }) + } +} + +func TestSeedVisitor(t *testing.T) { + src := `package test + type typ[T any] int + func (t typ[T]) method(arg T) { var x typ[string]; _ = x } + func fun[T any](arg T) { var y typ[string]; _ = y } + + const a typ[int] = 1 + var b typ[int] + type c struct { field typ[int8] } + func (_ c) method() { var _ typ[int16] } + type d = typ[int32] + func e() { var _ typ[int64] } + ` + + f := srctesting.New(t) + file := f.Parse("test.go", src) + info, pkg := f.Check("pkg/test", file) + + sv := seedVisitor{ + visitor: visitor{ + instances: &PackageInstanceSets{}, + resolver: nil, + info: info, + }, + objMap: map[types.Object]ast.Node{}, + } + ast.Walk(&sv, file) + + tInst := func(tArg types.Type) Instance { + return Instance{ + Object: pkg.Scope().Lookup("typ"), + TArgs: []types.Type{tArg}, + } + } + mInst := func(tArg types.Type) Instance { + return Instance{ + Object: srctesting.LookupObj(pkg, "typ.method"), + TArgs: []types.Type{tArg}, + } + } + want := []Instance{ + tInst(types.Typ[types.Int]), + mInst(types.Typ[types.Int]), + tInst(types.Typ[types.Int8]), + mInst(types.Typ[types.Int8]), + tInst(types.Typ[types.Int16]), + mInst(types.Typ[types.Int16]), + tInst(types.Typ[types.Int32]), + mInst(types.Typ[types.Int32]), + tInst(types.Typ[types.Int64]), + mInst(types.Typ[types.Int64]), + } + got := sv.instances.Pkg(pkg).Values() + if diff := cmp.Diff(want, got, instanceOpts()); diff != "" { + t.Errorf("Instances from seedVisitor contain diff (-want,+got):\n%s", diff) + } +} + +func TestCollector(t *testing.T) { + src := `package test + type typ[T any] int + func (t typ[T]) method(arg T) { var _ typ[int]; fun[int8](0) } + func fun[T any](arg T) { + var _ typ[int16] + + type nested[U any] struct{} + _ = nested[T]{} + } + + type ignore = int + + func a() { + var _ typ[int32] + fun[int64](0) + } + ` + + f := srctesting.New(t) + file := f.Parse("test.go", src) + info, pkg := f.Check("pkg/test", file) + + c := Collector{ + TContext: types.NewContext(), + Info: info, + Instances: &PackageInstanceSets{}, + } + c.Scan(pkg, file) + + inst := func(name, tNest, tArg string) Instance { + return Instance{ + Object: srctesting.LookupObj(pkg, name), + TNest: evalTypeArgs(t, f.FileSet, pkg, tNest), + TArgs: evalTypeArgs(t, f.FileSet, pkg, tArg), + } + } + want := []Instance{ + inst(`typ`, ``, `int`), + inst(`typ.method`, ``, `int`), + inst(`fun`, ``, `int8`), + inst(`fun.nested`, `int8`, `int8`), + inst(`typ`, ``, `int16`), + inst(`typ.method`, ``, `int16`), + inst(`typ`, ``, `int32`), + inst(`typ.method`, ``, `int32`), + inst(`fun`, ``, `int64`), + inst(`fun.nested`, `int64`, `int64`), + } + got := c.Instances.Pkg(pkg).Values() + if diff := cmp.Diff(want, got, instanceOpts()); diff != "" { + t.Errorf("Instances from Collector contain diff (-want,+got):\n%s", diff) + } +} + +func TestCollector_MoreNesting(t *testing.T) { + src := `package test + + func fun[T any]() { + type nestedCon struct{ X T } + _ = nestedCon{} + + type nestedGen[U any] struct{ Y T; Z U } + _ = nestedGen[T]{} + _ = nestedGen[int8]{} + + type nestedCover[T any] struct{ W T } + _ = nestedCover[T]{} + _ = nestedCover[int16]{} + } + + func a() { + fun[int32]() + fun[int64]() + } + ` + + f := srctesting.New(t) + file := f.Parse(`test.go`, src) + info, pkg := f.Check(`pkg/test`, file) + + c := Collector{ + TContext: types.NewContext(), + Info: info, + Instances: &PackageInstanceSets{}, + } + c.Scan(pkg, file) + + inst := func(name, tNest, tArg string) Instance { + return Instance{ + Object: srctesting.LookupObj(pkg, name), + TNest: evalTypeArgs(t, f.FileSet, pkg, tNest), + TArgs: evalTypeArgs(t, f.FileSet, pkg, tArg), + } + } + want := []Instance{ + inst(`fun`, ``, `int32`), + inst(`fun`, ``, `int64`), + + inst(`fun.nestedCon`, `int32`, ``), + inst(`fun.nestedCon`, `int64`, ``), + + inst(`fun.nestedGen`, `int32`, `int32`), + inst(`fun.nestedGen`, `int32`, `int8`), + inst(`fun.nestedGen`, `int64`, `int64`), + inst(`fun.nestedGen`, `int64`, `int8`), + + inst(`fun.nestedCover`, `int32`, `int32`), + inst(`fun.nestedCover`, `int32`, `int16`), + inst(`fun.nestedCover`, `int64`, `int64`), + inst(`fun.nestedCover`, `int64`, `int16`), + } + got := c.Instances.Pkg(pkg).Values() + if diff := cmp.Diff(want, got, instanceOpts()); diff != `` { + t.Errorf("Instances from Collector contain diff (-want,+got):\n%s", diff) + } +} + +func TestCollector_NestingWithVars(t *testing.T) { + // This is loosely based off of go1.19.13/test/typeparam/issue47740b.go + // I was getting an error where `Q.print[int;]` was showing up when + // `Q.print` is not in a nesting context with `int` and this helped debug + // it. The problem was that `q` was being treated like a type not a var. + src := `package test + + type Q struct{ v any } + func (q Q) print() { + println(q.v) + } + + func newQ(v any) Q { + return Q{v} + } + + type S[T any] struct{ x T } + func (s S[T]) echo() { + q := newQ(s.x) + q.print() + } + + func a() { + s := S[int]{x: 0} + s.echo() + } + ` + + f := srctesting.New(t) + file := f.Parse(`test.go`, src) + info, pkg := f.Check(`pkg/test`, file) + + c := Collector{ + TContext: types.NewContext(), + Info: info, + Instances: &PackageInstanceSets{}, + } + c.Scan(pkg, file) + + inst := func(name, tNest, tArg string) Instance { + return Instance{ + Object: srctesting.LookupObj(pkg, name), + TNest: evalTypeArgs(t, f.FileSet, pkg, tNest), + TArgs: evalTypeArgs(t, f.FileSet, pkg, tArg), + } + } + want := []Instance{ + inst(`S`, ``, `int`), + inst(`S.echo`, ``, `int`), + } + got := c.Instances.Pkg(pkg).Values() + if diff := cmp.Diff(want, got, instanceOpts()); diff != `` { + t.Errorf("Instances from Collector contain diff (-want,+got):\n%s", diff) + } +} + +func TestCollector_RecursiveTypeParams(t *testing.T) { + // This is based off of part of go1.19.13/test/typeparam/nested.go + src := `package test + func F[A any]() {} + func main() { + type U[_ any] int + type X[A any] U[X[A]] + F[X[int]]() + } + ` + + f := srctesting.New(t) + file := f.Parse(`test.go`, src) + info, pkg := f.Check(`test`, file) + + c := Collector{ + TContext: types.NewContext(), + Info: info, + Instances: &PackageInstanceSets{}, + } + c.Scan(pkg, file) + + tInt := types.Typ[types.Int] + xAny := srctesting.LookupObj(pkg, `main.X`) + xInt, err := types.Instantiate(types.NewContext(), xAny.Type(), []types.Type{tInt}, true) + if err != nil { + t.Fatalf("Failed to instantiate X[int]: %v", err) + } + + want := []Instance{ + { + Object: srctesting.LookupObj(pkg, `F`), + TArgs: []types.Type{xInt}, + }, { + Object: srctesting.LookupObj(pkg, `main.U`), + TArgs: []types.Type{xInt}, + }, { + Object: xAny, + TArgs: []types.Type{tInt}, + }, + } + got := c.Instances.Pkg(pkg).Values() + if diff := cmp.Diff(want, got, instanceOpts()); diff != `` { + t.Errorf("Instances from Collector contain diff (-want,+got):\n%s", diff) + } +} + +func TestCollector_NestedRecursiveTypeParams(t *testing.T) { + t.Skip(`Skipping test due to known issue with nested recursive type parameters.`) + // TODO(grantnelson-wf): This test is failing because the type parameters + // inside of U are not being resolved to concrete types. This is because + // when instantiating X in the collector, we are not resolving the + // nested type of U that is X's type argument. This leave the A in U + // as a type parameter instead of resolving it to string. + + // This is based off of part of go1.19.13/test/typeparam/nested.go + src := `package test + func F[A any]() any { + type U[_ any] struct{ x A } + type X[B any] U[X[B]] + return X[int]{} + } + func main() { + print(F[string]()) + } + ` + + f := srctesting.New(t) + file := f.Parse(`test.go`, src) + info, pkg := f.Check(`test`, file) + + c := Collector{ + TContext: types.NewContext(), + Info: info, + Instances: &PackageInstanceSets{}, + } + c.Scan(pkg, file) + + xAny := srctesting.LookupObj(pkg, `F.X`) + xInt, err := types.Instantiate(types.NewContext(), xAny.Type(), []types.Type{types.Typ[types.Int]}, true) + if err != nil { + t.Fatalf("Failed to instantiate X[int]: %v", err) + } + // TODO(grantnelson-wf): Need to instantiate xInt to replace `A` with `int` in the struct. + if isGeneric(xInt) { + t.Errorf("Expected uInt to be non-generic, got %v", xInt.Underlying()) + } + + want := []Instance{ + { + Object: srctesting.LookupObj(pkg, `F`), + TArgs: []types.Type{types.Typ[types.String]}, + }, { + Object: srctesting.LookupObj(pkg, `F.U`), + TNest: []types.Type{types.Typ[types.String]}, + TArgs: []types.Type{xInt}, + }, { + Object: xAny, + TNest: []types.Type{types.Typ[types.String]}, + TArgs: []types.Type{types.Typ[types.Int]}, + }, + } + got := c.Instances.Pkg(pkg).Values() + if diff := cmp.Diff(want, got, instanceOpts()); diff != `` { + t.Errorf("Instances from Collector contain diff (-want,+got):\n%s", diff) + } +} + +func TestCollector_NestedTypeParams(t *testing.T) { + t.Skip(`Skipping test due to known issue with nested recursive type parameters.`) + // TODO(grantnelson-wf): This test is failing because the type parameters + // inside of U are not being resolved to concrete types. This is because + // when instantiating X in the collector, we are not resolving the + // nested type of U that is X's type argument. This leave the A in U + // as a type parameter instead of resolving it to string. + + // This is based off of part of go1.19.13/test/typeparam/nested.go + src := `package test + func F[A any]() any { + type T[B any] struct{} + type U[_ any] struct{ X A } + return T[U[A]]{} + } + func main() { + print(F[int]()) + } + ` + + f := srctesting.New(t) + file := f.Parse(`test.go`, src) + info, pkg := f.Check(`test`, file) + + c := Collector{ + TContext: types.NewContext(), + Info: info, + Instances: &PackageInstanceSets{}, + } + c.Scan(pkg, file) + + uAny := srctesting.LookupObj(pkg, `F.U`) + uInt, err := types.Instantiate(types.NewContext(), uAny.Type(), []types.Type{types.Typ[types.Int]}, true) + if err != nil { + t.Fatalf("Failed to instantiate U[int]: %v", err) + } + //TODO(grantnelson-wf): Need to instantiate uInt to replace `A` with `int` in the struct. + if isGeneric(uInt) { + t.Errorf("Expected uInt to be non-generic, got %v", uInt.Underlying()) + } + + want := []Instance{ + { + Object: srctesting.LookupObj(pkg, `F`), + TArgs: []types.Type{types.Typ[types.Int]}, + }, { + Object: srctesting.LookupObj(pkg, `F.U`), + TNest: []types.Type{types.Typ[types.Int]}, + TArgs: []types.Type{types.Typ[types.Int]}, + }, { + Object: srctesting.LookupObj(pkg, `F.T`), + TNest: []types.Type{types.Typ[types.Int]}, + TArgs: []types.Type{uInt}, + }, + } + got := c.Instances.Pkg(pkg).Values() + if diff := cmp.Diff(want, got, instanceOpts()); diff != `` { + t.Errorf("Instances from Collector contain diff (-want,+got):\n%s", diff) + } +} + +func evalTypeArgs(t *testing.T, fSet *token.FileSet, pkg *types.Package, expr string) []types.Type { + if len(expr) == 0 { + return nil + } + args := strings.Split(expr, ",") + targs := make([]types.Type, 0, len(args)) + for _, astr := range args { + tv, err := types.Eval(fSet, pkg, 0, astr) + if err != nil { + t.Fatalf("Eval(%s) failed: %v", astr, err) + } + targs = append(targs, tv.Type) + } + return targs +} + +func TestCollector_CrossPackage(t *testing.T) { + f := srctesting.New(t) + const src = `package foo + type X[T any] struct {Value T} + + func F[G any](g G) { + x := X[G]{} + println(x) + } + + func DoFoo() { + F(int8(8)) + } + ` + fooFile := f.Parse("foo.go", src) + _, fooPkg := f.Check("pkg/foo", fooFile) + + const src2 = `package bar + import "pkg/foo" + func FProxy[T any](t T) { + foo.F[T](t) + } + func DoBar() { + FProxy(int16(16)) + } + ` + barFile := f.Parse("bar.go", src2) + _, barPkg := f.Check("pkg/bar", barFile) + + c := Collector{ + TContext: types.NewContext(), + Info: f.Info, + Instances: &PackageInstanceSets{}, + } + c.Scan(barPkg, barFile) + c.Scan(fooPkg, fooFile) + + inst := func(pkg *types.Package, name string, tArg types.BasicKind) Instance { + return Instance{ + Object: srctesting.LookupObj(pkg, name), + TArgs: []types.Type{types.Typ[tArg]}, + } + } + + wantFooInstances := []Instance{ + inst(fooPkg, "F", types.Int16), // Found in "pkg/foo". + inst(fooPkg, "F", types.Int8), + inst(fooPkg, "X", types.Int16), // Found due to F[int16] found in "pkg/foo". + inst(fooPkg, "X", types.Int8), + } + gotFooInstances := c.Instances.Pkg(fooPkg).Values() + if diff := cmp.Diff(wantFooInstances, gotFooInstances, instanceOpts()); diff != "" { + t.Errorf("Instances from pkg/foo contain diff (-want,+got):\n%s", diff) + } + + wantBarInstances := []Instance{ + inst(barPkg, "FProxy", types.Int16), + } + gotBarInstances := c.Instances.Pkg(barPkg).Values() + if diff := cmp.Diff(wantBarInstances, gotBarInstances, instanceOpts()); diff != "" { + t.Errorf("Instances from pkg/foo contain diff (-want,+got):\n%s", diff) + } +} + +func TestResolver_SubstituteSelection(t *testing.T) { + tests := []struct { + descr string + src string + wantObj string + wantSig string + }{{ + descr: "type parameter method", + src: `package test + type stringer interface{ String() string } + + type x struct{} + func (_ x) String() string { return "" } + + type g[T stringer] struct{} + func (_ g[T]) Method(t T) string { + return t.String() + }`, + wantObj: "func (pkg/test.x).String() string", + wantSig: "func() string", + }, { + descr: "generic receiver type with type parameter", + src: `package test + type x struct{} + + type g[T any] struct{} + func (_ g[T]) Method(t T) string { + return g[T]{}.Method(t) + }`, + wantObj: "func (pkg/test.g[pkg/test.x]).Method(t pkg/test.x) string", + wantSig: "func(t pkg/test.x) string", + }, { + descr: "method expression", + src: `package test + type x struct{} + + type g[T any] struct{} + func (recv g[T]) Method(t T) string { + return g[T].Method(recv, t) + }`, + wantObj: "func (pkg/test.g[pkg/test.x]).Method(t pkg/test.x) string", + wantSig: "func(recv pkg/test.g[pkg/test.x], t pkg/test.x) string", + }} + + for _, test := range tests { + t.Run(test.descr, func(t *testing.T) { + f := srctesting.New(t) + file := f.Parse("test.go", test.src) + info, pkg := f.Check("pkg/test", file) + + method := srctesting.LookupObj(pkg, "g.Method").(*types.Func).Type().(*types.Signature) + resolver := NewResolver(nil, method.RecvTypeParams(), []types.Type{srctesting.LookupObj(pkg, "x").Type()}, nil) + + if l := len(info.Selections); l != 1 { + t.Fatalf("Got: %d selections. Want: 1", l) + } + for _, sel := range info.Selections { + gotObj := types.ObjectString(resolver.SubstituteSelection(sel).Obj(), nil) + if gotObj != test.wantObj { + t.Fatalf("Got: resolver.SubstituteSelection().Obj() = %q. Want: %q.", gotObj, test.wantObj) + } + gotSig := types.TypeString(resolver.SubstituteSelection(sel).Type(), nil) + if gotSig != test.wantSig { + t.Fatalf("Got: resolver.SubstituteSelection().Type() = %q. Want: %q.", gotSig, test.wantSig) + } + } + }) + } +} diff --git a/compiler/internal/typeparams/instance.go b/compiler/internal/typeparams/instance.go new file mode 100644 index 000000000..64c67b4b5 --- /dev/null +++ b/compiler/internal/typeparams/instance.go @@ -0,0 +1,245 @@ +package typeparams + +import ( + "fmt" + "go/types" + "strings" + + "github.com/gopherjs/gopherjs/compiler/internal/symbol" + "github.com/gopherjs/gopherjs/compiler/typesutil" +) + +// Instance of a generic type or function. +// +// Non-generic objects can be represented as an Instance with zero type params, +// they are instances of themselves. +type Instance struct { + Object types.Object // Object to be instantiated. + TArgs typesutil.TypeList // Type params to instantiate with. + + // TNest is the type params of the function this object was nested with-in. + // e.g. In `func A[X any]() { type B[Y any] struct {} }` the `X` + // from `A` is the context of `B[Y]` thus creating `B[X;Y]`. + TNest typesutil.TypeList +} + +// String returns a string representation of the Instance. +// +// Two semantically different instances may have the same string representation +// if the instantiated object or its type arguments shadow other types. +func (i Instance) String() string { + return i.symbolicName() + i.TypeParamsString(`<`, `>`) +} + +// TypeString returns a Go type string representing the instance (suitable for %T verb). +func (i Instance) TypeString() string { + return i.qualifiedName() + i.TypeParamsString(`[`, `]`) +} + +// symbolicName returns a string representation of the instance's name +// including the package name and pointer indicators but +// excluding the type parameters. +func (i Instance) symbolicName() string { + if i.Object == nil { + return `` + } + return symbol.New(i.Object).String() +} + +// qualifiedName returns a string representation of the instance's name +// including the package name but +// excluding the type parameters and pointer indicators. +func (i Instance) qualifiedName() string { + if i.Object == nil { + return `` + } + if i.Object.Pkg() == nil { + return i.Object.Name() + } + return fmt.Sprintf("%s.%s", i.Object.Pkg().Name(), i.Object.Name()) +} + +// TypeParamsString returns part of a Go type string that represents the type +// parameters of the instance including the nesting type parameters, e.g. [X;Y,Z]. +func (i Instance) TypeParamsString(open, close string) string { + hasNest := len(i.TNest) > 0 + hasArgs := len(i.TArgs) > 0 + buf := strings.Builder{} + if hasNest || hasArgs { + buf.WriteString(open) + if hasNest { + buf.WriteString(i.TNest.String()) + buf.WriteRune(';') + if hasArgs { + buf.WriteRune(' ') + } + } + if hasArgs { + buf.WriteString(i.TArgs.String()) + } + buf.WriteString(close) + } + return buf.String() +} + +// IsTrivial returns true if this is an instance of a non-generic object +// and it is not nested in a generic function. +func (i Instance) IsTrivial() bool { + return len(i.TArgs) == 0 && len(i.TNest) == 0 +} + +// Recv returns an instance of the receiver type of a method. +// +// Returns zero value if not a method. +func (i Instance) Recv() Instance { + sig, ok := i.Object.Type().(*types.Signature) + if !ok { + return Instance{} + } + recv := typesutil.RecvType(sig) + if recv == nil { + return Instance{} + } + return Instance{ + Object: recv.Obj(), + TArgs: i.TArgs, + } +} + +// InstanceSet allows collecting and processing unique Instances. +// +// Each Instance may be added to the set any number of times, but it will be +// returned for processing exactly once. Processing order is not specified. +type InstanceSet struct { + values []Instance + unprocessed int // Index in values for the next unprocessed element. + seen InstanceMap[int] // Maps instance to a unique numeric id. +} + +// Add instances to the set. Instances that have been previously added to the +// set won't be requeued for processing regardless of whether they have been +// processed already. +func (iset *InstanceSet) Add(instances ...Instance) *InstanceSet { + for _, inst := range instances { + if iset.seen.Has(inst) { + continue + } + iset.seen.Set(inst, iset.seen.Len()) + iset.values = append(iset.values, inst) + } + return iset +} + +// ID returns a unique numeric identifier assigned to an instance in the set. +// The ID is guaranteed to be unique among all instances of the same object +// within a given program. The ID will be consistent, as long as instances are +// added to the set in the same order. +// +// In order to have an ID assigned, the instance must have been previously added +// to the set. +// +// Note: these ids are used in the generated code as keys to the specific +// type/function instantiation in the type/function object. Using this has two +// advantages: +// +// - More compact generated code compared to string keys derived from type args. +// +// - Collision avoidance in case of two different types having the same name due +// to shadowing. +// +// Here's an example where it's very difficult to assign non-colliding +// name-based keys to the two different types T: +// +// func foo() { +// type T int +// { type T string } // Code block creates a new nested scope allowing for shadowing. +// } +func (iset *InstanceSet) ID(inst Instance) int { + id, ok := iset.seen.get(inst) + if !ok { + panic(fmt.Errorf("requesting ID of instance %v that hasn't been added to the set", inst)) + } + return id +} + +// next returns the next Instance to be processed. +// +// If there are no unprocessed instances, the second returned value will be false. +func (iset *InstanceSet) next() (Instance, bool) { + if iset.exhausted() { + return Instance{}, false + } + next := iset.values[iset.unprocessed] + iset.unprocessed++ + return next, true +} + +// exhausted returns true if there are no unprocessed instances in the set. +func (iset *InstanceSet) exhausted() bool { return len(iset.values) <= iset.unprocessed } + +// Values returns instances that are currently in the set. Order is not specified. +func (iset *InstanceSet) Values() []Instance { + return iset.values +} + +// ByObj returns instances grouped by object they belong to. Order is not specified. +func (iset *InstanceSet) ByObj() map[types.Object][]Instance { + result := map[types.Object][]Instance{} + for _, inst := range iset.values { + result[inst.Object] = append(result[inst.Object], inst) + } + return result +} + +// ForObj returns the instances that belong to the given object type. +// Order is not specified. This returns the same values as `ByObj()[obj]`. +func (iset *InstanceSet) ForObj(obj types.Object) []Instance { + result := []Instance{} + for _, inst := range iset.values { + if inst.Object == obj { + result = append(result, inst) + } + } + return result +} + +// ObjHasInstances returns true if there are any instances (either trivial +// or non-trivial) that belong to the given object type, otherwise false. +func (iset *InstanceSet) ObjHasInstances(obj types.Object) bool { + for _, inst := range iset.values { + if inst.Object == obj { + return true + } + } + return false +} + +// PackageInstanceSets stores an InstanceSet for each package in a program, keyed +// by import path. +type PackageInstanceSets map[string]*InstanceSet + +// Pkg returns InstanceSet for objects defined in the given package. +func (i PackageInstanceSets) Pkg(pkg *types.Package) *InstanceSet { + path := pkg.Path() + iset, ok := i[path] + if !ok { + iset = &InstanceSet{} + i[path] = iset + } + return iset +} + +// Add instances to the appropriate package's set. Automatically initialized +// new per-package sets upon a first encounter. +func (i PackageInstanceSets) Add(instances ...Instance) { + for _, inst := range instances { + i.Pkg(inst.Object.Pkg()).Add(inst) + } +} + +// ID returns a unique numeric identifier assigned to an instance in the set. +// +// See: InstanceSet.ID(). +func (i PackageInstanceSets) ID(inst Instance) int { + return i.Pkg(inst.Object.Pkg()).ID(inst) +} diff --git a/compiler/internal/typeparams/instance_test.go b/compiler/internal/typeparams/instance_test.go new file mode 100644 index 000000000..9b88c87b5 --- /dev/null +++ b/compiler/internal/typeparams/instance_test.go @@ -0,0 +1,275 @@ +package typeparams + +import ( + "go/types" + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/google/go-cmp/cmp/cmpopts" + "github.com/gopherjs/gopherjs/internal/srctesting" + "github.com/gopherjs/gopherjs/internal/testingx" +) + +func instanceOpts() cmp.Options { + return cmp.Options{ + // Instances are represented by their IDs for diffing purposes. + cmp.Transformer("Instance", func(i Instance) string { + return i.String() + }), + // Order of instances in a slice doesn't matter, sort them by ID. + cmpopts.SortSlices(func(a, b Instance) bool { + return a.String() < b.String() + }), + } +} + +func TestInstanceString(t *testing.T) { + const src = `package testcase + + type Ints []int + + type Typ[T any, V any] []T + func (t Typ[T, V]) Method(x T) {} + + type typ[T any, V any] []T + func (t typ[T, V]) method(x T) {} + + func Fun[U any, W any](x, y U) {} + func fun[U any, W any](x, y U) {} + ` + f := srctesting.New(t) + _, pkg := f.Check("pkg/test", f.Parse("test.go", src)) + mustType := testingx.Must[types.Type](t) + + tests := []struct { + descr string + instance Instance + wantStr string + wantTypeString string + }{{ + descr: "exported type", + instance: Instance{ + Object: pkg.Scope().Lookup("Typ"), + TArgs: []types.Type{types.Typ[types.Int], types.Typ[types.String]}, + }, + wantStr: "pkg/test.Typ", + wantTypeString: "testcase.Typ[int, string]", + }, { + descr: "exported method", + instance: Instance{ + Object: pkg.Scope().Lookup("Typ").Type().(*types.Named).Method(0), + TArgs: []types.Type{types.Typ[types.Int], types.Typ[types.String]}, + }, + wantStr: "pkg/test.Typ.Method", + }, { + descr: "exported function", + instance: Instance{ + Object: pkg.Scope().Lookup("Fun"), + TArgs: []types.Type{types.Typ[types.Int], types.Typ[types.String]}, + }, + wantStr: "pkg/test.Fun", + }, { + descr: "unexported type", + instance: Instance{ + Object: pkg.Scope().Lookup("typ"), + TArgs: []types.Type{types.Typ[types.Int], types.Typ[types.String]}, + }, + wantStr: "pkg/test.typ", + wantTypeString: "testcase.typ[int, string]", + }, { + descr: "unexported method", + instance: Instance{ + Object: pkg.Scope().Lookup("typ").Type().(*types.Named).Method(0), + TArgs: []types.Type{types.Typ[types.Int], types.Typ[types.String]}, + }, + wantStr: "pkg/test.typ.method", + }, { + descr: "unexported function", + instance: Instance{ + Object: pkg.Scope().Lookup("fun"), + TArgs: []types.Type{types.Typ[types.Int], types.Typ[types.String]}, + }, + wantStr: "pkg/test.fun", + }, { + descr: "no type params", + instance: Instance{ + Object: pkg.Scope().Lookup("Ints"), + }, + wantStr: "pkg/test.Ints", + wantTypeString: "testcase.Ints", + }, { + descr: "complex parameter type", + instance: Instance{ + Object: pkg.Scope().Lookup("fun"), + TArgs: []types.Type{ + types.NewSlice(types.Typ[types.Int]), + mustType(types.Instantiate(nil, pkg.Scope().Lookup("typ").Type(), []types.Type{ + types.Typ[types.Int], + types.Typ[types.String], + }, true)), + }, + }, + wantStr: "pkg/test.fun<[]int, pkg/test.typ[int, string]>", + }} + + for _, test := range tests { + t.Run(test.descr, func(t *testing.T) { + got := test.instance.String() + if got != test.wantStr { + t.Errorf("Got: instance string %q. Want: %q.", got, test.wantStr) + } + if test.wantTypeString != "" { + got = test.instance.TypeString() + if got != test.wantTypeString { + t.Errorf("Got: instance type string %q. Want: %q.", got, test.wantTypeString) + } + } + }) + } +} + +func TestInstanceQueue(t *testing.T) { + const src = `package test + type Typ[T any, V any] []T + func Fun[U any, W any](x, y U) {} + ` + f := srctesting.New(t) + _, pkg := f.Check("pkg/test", f.Parse("test.go", src)) + + i1 := Instance{ + Object: pkg.Scope().Lookup("Typ"), + TArgs: []types.Type{types.Typ[types.String], types.Typ[types.String]}, + } + i2 := Instance{ + Object: pkg.Scope().Lookup("Typ"), + TArgs: []types.Type{types.Typ[types.Int], types.Typ[types.Int]}, + } + i3 := Instance{ + Object: pkg.Scope().Lookup("Fun"), + TArgs: []types.Type{types.Typ[types.String], types.Typ[types.String]}, + } + + set := InstanceSet{} + set.Add(i1, i2) + + if ex := set.exhausted(); ex { + t.Errorf("Got: set.exhausted() = true. Want: false") + } + + gotValues := set.Values() + wantValues := []Instance{i1, i2} + if diff := cmp.Diff(wantValues, gotValues, instanceOpts()); diff != "" { + t.Errorf("set.Values() returned diff (-want,+got):\n%s", diff) + } + + p1, ok := set.next() + if !ok { + t.Errorf("Got: _, ok := set.next(); ok == false. Want: true.") + } + p2, ok := set.next() + if !ok { + t.Errorf("Got: _, ok := set.next(); ok == false. Want: true.") + } + if ex := set.exhausted(); !ex { + t.Errorf("Got: set.exhausted() = false. Want: true") + } + + _, ok = set.next() + if ok { + t.Errorf("Got: _, ok := set.next(); ok == true. Want: false.") + } + + set.Add(i1) // Has been enqueued before. + if ex := set.exhausted(); !ex { + t.Errorf("Got: set.exhausted() = false. Want: true") + } + + set.Add(i3) + p3, ok := set.next() + if !ok { + t.Errorf("Got: _, ok := set.next(); ok == false. Want: true.") + } + + added := []Instance{i1, i2, i3} + processed := []Instance{p1, p2, p3} + + diff := cmp.Diff(added, processed, instanceOpts()) + if diff != "" { + t.Errorf("Processed instances differ from added (-want,+got):\n%s", diff) + } + + gotValues = set.Values() + wantValues = []Instance{i1, i2, i3} + if diff := cmp.Diff(wantValues, gotValues, instanceOpts()); diff != "" { + t.Errorf("set.Values() returned diff (-want,+got):\n%s", diff) + } + + gotByObj := set.ByObj() + wantByObj := map[types.Object][]Instance{ + pkg.Scope().Lookup("Typ"): {i1, i2}, + pkg.Scope().Lookup("Fun"): {i3}, + } + if diff := cmp.Diff(wantByObj, gotByObj, instanceOpts()); diff != "" { + t.Errorf("set.ByObj() returned diff (-want,+got):\n%s", diff) + } +} + +func TestInstancesByPackage(t *testing.T) { + f := srctesting.New(t) + + const src1 = `package foo + type Typ[T any, V any] []T + ` + _, foo := f.Check("pkg/foo", f.Parse("foo.go", src1)) + + const src2 = `package bar + func Fun[U any, W any](x, y U) {} + ` + _, bar := f.Check("pkg/bar", f.Parse("bar.go", src2)) + + i1 := Instance{ + Object: foo.Scope().Lookup("Typ"), + TArgs: []types.Type{types.Typ[types.String], types.Typ[types.String]}, + } + i2 := Instance{ + Object: foo.Scope().Lookup("Typ"), + TArgs: []types.Type{types.Typ[types.Int], types.Typ[types.Int]}, + } + i3 := Instance{ + Object: bar.Scope().Lookup("Fun"), + TArgs: []types.Type{types.Typ[types.String], types.Typ[types.String]}, + } + + t.Run("Add", func(t *testing.T) { + instByPkg := PackageInstanceSets{} + instByPkg.Add(i1, i2, i3) + + gotFooInstances := instByPkg.Pkg(foo).Values() + wantFooInstances := []Instance{i1, i2} + if diff := cmp.Diff(wantFooInstances, gotFooInstances, instanceOpts()); diff != "" { + t.Errorf("instByPkg.Pkg(foo).Values() returned diff (-want,+got):\n%s", diff) + } + + gotValues := instByPkg.Pkg(bar).Values() + wantValues := []Instance{i3} + if diff := cmp.Diff(wantValues, gotValues, instanceOpts()); diff != "" { + t.Errorf("instByPkg.Pkg(bar).Values() returned diff (-want,+got):\n%s", diff) + } + }) + + t.Run("ID", func(t *testing.T) { + instByPkg := PackageInstanceSets{} + instByPkg.Add(i1, i2, i3) + + got := []int{ + instByPkg.ID(i1), + instByPkg.ID(i2), + instByPkg.ID(i3), + } + want := []int{0, 1, 0} + + if diff := cmp.Diff(want, got); diff != "" { + t.Errorf("unexpected instance IDs assigned (-want,+got):\n%s", diff) + } + }) +} diff --git a/compiler/internal/typeparams/map.go b/compiler/internal/typeparams/map.go new file mode 100644 index 000000000..7edbdc016 --- /dev/null +++ b/compiler/internal/typeparams/map.go @@ -0,0 +1,205 @@ +package typeparams + +import ( + "fmt" + "go/types" + "sort" + "strings" + + "golang.org/x/tools/go/types/typeutil" +) + +type ( + mapEntry[V any] struct { + key Instance + value V + } + mapBucket[V any] []*mapEntry[V] + mapBuckets[V any] map[uint32]mapBucket[V] +) + +// InstanceMap implements a map-like data structure keyed by instances. +// +// Zero value is an equivalent of an empty map. Methods are not thread-safe. +// +// Since Instance contains a slice and is not comparable, it can not be used as +// a regular map key, but we can compare its fields manually. When comparing +// instance equality, objects are compared by pointer equality, and type +// arguments with types.Identical(). To reduce access complexity, we bucket +// entries by a combined hash of type args. This type is generally inspired by +// [golang.org/x/tools/go/types/typeutil#Map] +type InstanceMap[V any] struct { + data map[types.Object]mapBuckets[V] + len int + hasher typeutil.Hasher +} + +// findIndex returns bucket and index of the entry with the given key. +// If the given key isn't found, an empty bucket and -1 are returned. +func (im *InstanceMap[V]) findIndex(key Instance) (mapBucket[V], int) { + if im != nil && im.data != nil { + bucket := im.data[key.Object][typeHash(im.hasher, key.TNest, key.TArgs)] + for i, candidate := range bucket { + if candidateArgsMatch(key, candidate) { + return bucket, i + } + } + } + return nil, -1 +} + +// get returns the stored value for the provided key and +// a bool indicating whether the key was present in the map or not. +func (im *InstanceMap[V]) get(key Instance) (V, bool) { + if bucket, i := im.findIndex(key); i >= 0 { + return bucket[i].value, true + } + var zero V + return zero, false +} + +// Get returns the stored value for the provided key. If the key is missing from +// the map, zero value is returned. +func (im *InstanceMap[V]) Get(key Instance) V { + val, _ := im.get(key) + return val +} + +// Has returns true if the given key is present in the map. +func (im *InstanceMap[V]) Has(key Instance) bool { + _, ok := im.get(key) + return ok +} + +// Set new value for the key in the map. Returns the previous value that was +// stored in the map, or zero value if the key wasn't present before. +func (im *InstanceMap[V]) Set(key Instance, value V) V { + if im.data == nil { + im.data = map[types.Object]mapBuckets[V]{} + im.hasher = typeutil.MakeHasher() + } + + if _, ok := im.data[key.Object]; !ok { + im.data[key.Object] = mapBuckets[V]{} + } + bucketID := typeHash(im.hasher, key.TNest, key.TArgs) + + // If there is already an identical key in the map, override the entry value. + hole := -1 + bucket := im.data[key.Object][bucketID] + for i, candidate := range bucket { + if candidate == nil { + hole = i + } else if candidateArgsMatch(key, candidate) { + old := candidate.value + candidate.value = value + return old + } + } + + // If there is a hole in the bucket, reuse it. + if hole >= 0 { + im.data[key.Object][bucketID][hole] = &mapEntry[V]{ + key: key, + value: value, + } + } else { + // Otherwise append a new entry. + im.data[key.Object][bucketID] = append(bucket, &mapEntry[V]{ + key: key, + value: value, + }) + } + im.len++ + var zero V + return zero +} + +// Len returns the number of elements in the map. +func (im *InstanceMap[V]) Len() int { + if im != nil { + return im.len + } + return 0 +} + +// Delete removes the entry with the given key, if any. +// It returns true if the entry was found. +func (im *InstanceMap[V]) Delete(key Instance) bool { + if bucket, i := im.findIndex(key); i >= 0 { + // We can't compact the bucket as it + // would disturb iterators. + bucket[i] = nil + im.len-- + return true + } + return false +} + +// Iterate calls function f on each entry in the map in unspecified order. +// +// Return true from f to continue the iteration, or false to stop it. +// +// If f should mutate the map, Iterate provides the same guarantees as +// Go maps: if f deletes a map entry that Iterate has not yet reached, +// f will not be invoked for it, but if f inserts a map entry that +// Iterate has not yet reached, whether or not f will be invoked for +// it is unspecified. +func (im *InstanceMap[V]) Iterate(f func(key Instance, value V)) { + if im != nil && im.data != nil { + for _, mapBucket := range im.data { + for _, bucket := range mapBucket { + for _, e := range bucket { + if e != nil { + f(e.key, e.value) + } + } + } + } + } +} + +// Keys returns a new slice containing the set of map keys. +// The order is unspecified. +func (im *InstanceMap[V]) Keys() []Instance { + keys := make([]Instance, 0, im.Len()) + im.Iterate(func(key Instance, _ V) { + keys = append(keys, key) + }) + return keys +} + +// String returns a string representation of the map's entries. +// The entries are sorted by string representation of the entry. +func (im *InstanceMap[V]) String() string { + entries := make([]string, 0, im.Len()) + im.Iterate(func(key Instance, value V) { + entries = append(entries, fmt.Sprintf("%v:%v", key, value)) + }) + sort.Strings(entries) + return `{` + strings.Join(entries, `, `) + `}` +} + +// candidateArgsMatch checks if the candidate entry has the same type +// arguments as the given key. +func candidateArgsMatch[V any](key Instance, candidate *mapEntry[V]) bool { + return candidate != nil && + candidate.key.TNest.Equal(key.TNest) && + candidate.key.TArgs.Equal(key.TArgs) +} + +// typeHash returns a combined hash of several types. +// +// Provided hasher is used to compute hashes of individual types, which are +// xor'ed together. Xor preserves bit distribution property, so the combined +// hash should be as good for bucketing, as the original. +func typeHash(hasher typeutil.Hasher, nestTypes, types []types.Type) uint32 { + var hash uint32 + for _, typ := range nestTypes { + hash ^= hasher.Hash(typ) + } + for _, typ := range types { + hash ^= hasher.Hash(typ) + } + return hash +} diff --git a/compiler/internal/typeparams/map_test.go b/compiler/internal/typeparams/map_test.go new file mode 100644 index 000000000..d67a1884d --- /dev/null +++ b/compiler/internal/typeparams/map_test.go @@ -0,0 +1,327 @@ +package typeparams + +import ( + "go/token" + "go/types" + "testing" +) + +func TestInstanceMap(t *testing.T) { + pkg := types.NewPackage(`testPkg`, `testPkg`) + + i1 := Instance{ + Object: types.NewTypeName(token.NoPos, pkg, "i1", nil), + TArgs: []types.Type{ + types.Typ[types.Int], + types.Typ[types.Int8], + }, + } + i1clone := Instance{ + Object: i1.Object, + TArgs: []types.Type{ + types.Typ[types.Int], + types.Typ[types.Int8], + }, + } + + i2 := Instance{ + Object: types.NewTypeName(token.NoPos, pkg, "i2", nil), // Different pointer. + TArgs: []types.Type{ + types.Typ[types.Int], + types.Typ[types.Int8], + }, + } + i3 := Instance{ + Object: i1.Object, + TArgs: []types.Type{ // Different type args, same number. + types.Typ[types.Int], + types.Typ[types.Int], + }, + } + i4 := Instance{ + Object: i1.Object, + TArgs: []types.Type{ // This hash matches i3's hash. + types.Typ[types.String], + types.Typ[types.String], + }, + } + i5 := Instance{ + Object: i1.Object, + TArgs: []types.Type{}, // This hash matches i3's hash. + } + + m := InstanceMap[string]{} + + // Check operations on a missing key. + t.Run("empty", func(t *testing.T) { + if got, want := m.String(), `{}`; got != want { + t.Errorf("Got: empty map string %q. Want: map string %q.", got, want) + } + if got := m.Has(i1); got { + t.Errorf("Got: empty map contains %s. Want: empty map contains nothing.", i1) + } + if got := m.Get(i1); got != "" { + t.Errorf("Got: getting missing key returned %q. Want: zero value.", got) + } + if got := m.Len(); got != 0 { + t.Errorf("Got: empty map length %d. Want: 0.", got) + } + if got := m.Set(i1, "abc"); got != "" { + t.Errorf("Got: setting a new key returned old value %q. Want: zero value", got) + } + if got := m.Len(); got != 1 { + t.Errorf("Got: map length %d. Want: 1.", got) + } + if got, want := m.String(), `{testPkg.i1:abc}`; got != want { + t.Errorf("Got: map string %q. Want: map string %q.", got, want) + } + if got, want := m.Keys(), []Instance{i1}; !keysMatch(got, want) { + t.Errorf("Got: map keys %v. Want: [i1].", got) + } + }) + + // Check operations on the existing key. + t.Run("first key", func(t *testing.T) { + if got := m.Set(i1, "def"); got != "abc" { + t.Errorf(`Got: setting an existing key returned old value %q. Want: "abc".`, got) + } + if got := m.Len(); got != 1 { + t.Errorf("Got: map length %d. Want: 1.", got) + } + if got := m.Has(i1); !got { + t.Errorf("Got: set map key is reported as missing. Want: key present.") + } + if got := m.Get(i1); got != "def" { + t.Errorf(`Got: getting set key returned %q. Want: "def"`, got) + } + if got := m.Get(i1clone); got != "def" { + t.Errorf(`Got: getting set key returned %q. Want: "def"`, got) + } + if got, want := m.String(), `{testPkg.i1:def}`; got != want { + t.Errorf("Got: map string %q. Want: map string %q.", got, want) + } + if got, want := m.Keys(), []Instance{i1}; !keysMatch(got, want) { + t.Errorf("Got: map keys %v. Want: [i1].", got) + } + }) + + // Check for key collisions with different object pointer. + t.Run("different object", func(t *testing.T) { + if got := m.Has(i2); got { + t.Errorf("Got: a new key %q is reported as present. Want: not present.", i2) + } + if got := m.Set(i2, "123"); got != "" { + t.Errorf("Got: a new key %q overrode an old value %q. Want: zero value.", i2, got) + } + if got := m.Get(i2); got != "123" { + t.Errorf(`Got: getting set key %q returned: %q. Want: "123"`, i2, got) + } + if got := m.Len(); got != 2 { + t.Errorf("Got: map length %d. Want: 2.", got) + } + }) + + // Check for collisions with different type arguments and different hash. + t.Run("different tArgs", func(t *testing.T) { + if got := m.Has(i3); got { + t.Errorf("Got: a new key %q is reported as present. Want: not present.", i3) + } + if got := m.Set(i3, "456"); got != "" { + t.Errorf("Got: a new key %q overrode an old value %q. Want: zero value.", i3, got) + } + if got := m.Get(i3); got != "456" { + t.Errorf(`Got: getting set key %q returned: %q. Want: "456"`, i3, got) + } + if got := m.Len(); got != 3 { + t.Errorf("Got: map length %d. Want: 3.", got) + } + }) + + // Check for collisions with different type arguments, same hash, count. + t.Run("different tArgs hash", func(t *testing.T) { + if got := m.Has(i4); got { + t.Errorf("Got: a new key %q is reported as present. Want: not present.", i3) + } + if got := m.Set(i4, "789"); got != "" { + t.Errorf("Got: a new key %q overrode an old value %q. Want: zero value.", i3, got) + } + if got := m.Get(i4); got != "789" { + t.Errorf(`Got: getting set key %q returned: %q. Want: "789"`, i3, got) + } + if got := m.Len(); got != 4 { + t.Errorf("Got: map length %d. Want: 4.", got) + } + }) + + // Check for collisions with different type arguments and same hash, but different count. + t.Run("different tArgs count", func(t *testing.T) { + if got := m.Has(i5); got { + t.Errorf("Got: a new key %q is reported as present. Want: not present.", i3) + } + if got := m.Set(i5, "ghi"); got != "" { + t.Errorf("Got: a new key %q overrode an old value %q. Want: zero value.", i3, got) + } + if got := m.Get(i5); got != "ghi" { + t.Errorf(`Got: getting set key %q returned: %q. Want: "ghi"`, i3, got) + } + if got := m.Len(); got != 5 { + t.Errorf("Got: map length %d. Want: 5.", got) + } + if got, want := m.String(), `{testPkg.i1:ghi, testPkg.i1:def, testPkg.i1:456, testPkg.i1:789, testPkg.i2:123}`; got != want { + t.Errorf("Got: map string %q. Want: map string %q.", got, want) + } + if got, want := m.Keys(), []Instance{i1, i2, i3, i4, i5}; !keysMatch(got, want) { + t.Errorf("Got: map keys %v. Want: [i1, i2, i3, i4, i5].", got) + } + }) + + // Check an existing entry can be deleted. + t.Run("delete existing", func(t *testing.T) { + if got := m.Delete(i3); !got { + t.Errorf("Got: deleting existing key %q returned not deleted. Want: found and deleted.", i3) + } + if got := m.Len(); got != 4 { + t.Errorf("Got: map length %d. Want: 4.", got) + } + if got := m.Has(i3); got { + t.Errorf("Got: a deleted key %q is reported as present. Want: not present.", i3) + } + if got, want := m.Keys(), []Instance{i1, i2, i4, i5}; !keysMatch(got, want) { + t.Errorf("Got: map keys %v. Want: [i1, i2, i4, i5].", got) + } + }) + + // Check deleting an existing entry has no effect. + t.Run("delete already deleted", func(t *testing.T) { + if got := m.Delete(i3); got { + t.Errorf("Got: deleting not present key %q returned as deleted. Want: not found.", i3) + } + if got := m.Len(); got != 4 { + t.Errorf("Got: map length %d. Want: 4.", got) + } + if got, want := m.Keys(), []Instance{i1, i2, i4, i5}; !keysMatch(got, want) { + t.Errorf("Got: map keys %v. Want: [i1, i2, i4, i5].", got) + } + }) + + // Check adding back a deleted value works (should fill hole in bucket). + t.Run("set deleted key", func(t *testing.T) { + if got := m.Set(i3, "jkl"); got != "" { + t.Errorf("Got: a new key %q overrode an old value %q. Want: zero value.", i3, got) + } + if got := m.Len(); got != 5 { + t.Errorf("Got: map length %d. Want: 5.", got) + } + if got, want := m.Keys(), []Instance{i1, i2, i3, i4, i5}; !keysMatch(got, want) { + t.Errorf("Got: map keys %v. Want: [i1, i2, i3, i4, i5].", got) + } + }) + + // Check deleting while iterating over the map. + t.Run("deleting while iterating", func(t *testing.T) { + notSeen := []Instance{i1, i2, i3, i4, i5} + seen := []Instance{} + kept := []Instance{} + var skipped Instance + m.Iterate(func(key Instance, value string) { + // update seen and not seen + seen = append(seen, key) + i := keyAt(notSeen, key) + if i < 0 { + t.Fatalf(`Got: failed to find current key %q in not seen. Want: it to be not seen yet.`, key) + } + notSeen = append(notSeen[:i], notSeen[i+1:]...) + + if len(seen) == 3 { + // delete the first seen key, the current key, and an unseen key + if got := m.Delete(seen[0]); !got { + t.Errorf("Got: deleting seen key %q returned not deleted. Want: found and deleted.", seen[0]) + } + if got := m.Delete(key); !got { + t.Errorf("Got: deleting current key %q returned not deleted. Want: found and deleted.", key) + } + skipped = notSeen[0] // skipped has not yet been seen so it should not be iterated over + if got := m.Delete(skipped); !got { + t.Errorf("Got: deleting not seen key %q returned not deleted. Want: found and deleted.", skipped) + } + kept = append(kept, seen[1], notSeen[1]) + } + }) + + if got := len(seen); got != 4 { + t.Errorf("Got: seen %d keys. Want: 4.", got) + } + if got := len(notSeen); got != 1 { + t.Errorf("Got: seen %d keys. Want: 1.", got) + } + if got := keyAt(notSeen, skipped); got != 0 { + t.Errorf("Got: a deleted unseen key %q was not the skipped key %q. Want: it to be skipped.", notSeen[0], skipped) + } + if got := m.Len(); got != 2 { + t.Errorf("Got: map length %d. Want: 2.", got) + } + if got := m.Keys(); !keysMatch(got, kept) { + t.Errorf("Got: map keys %v did not match kept keys. Want: %v.", got, kept) + } + }) +} + +func TestNilInstanceMap(t *testing.T) { + i1 := Instance{ + Object: types.NewTypeName(token.NoPos, nil, "i1", nil), + TArgs: []types.Type{ + types.Typ[types.Int], + types.Typ[types.Int8], + }, + } + + var m *InstanceMap[string] + if got, want := m.String(), `{}`; got != want { + t.Errorf("Got: nil map string %q. Want: map string %q.", got, want) + } + if got := m.Has(i1); got { + t.Errorf("Got: nil map contains %s. Want: nil map contains nothing.", i1) + } + if got := m.Get(i1); got != "" { + t.Errorf("Got: missing key returned %q. Want: zero value.", got) + } + if got := m.Len(); got != 0 { + t.Errorf("Got: nil map length %d. Want: 0.", got) + } + if got := m.Keys(); len(got) > 0 { + t.Errorf("Got: map keys %v did not match kept keys. Want: [].", got) + } + + // The only thing that a nil map can't safely handle is setting a key. + func() { + defer func() { + recover() + }() + m.Set(i1, "abc") + t.Errorf("Got: setting a new key on nil map did not panic, %s. Want: panic.", m.String()) + }() +} + +func keysMatch(a, b []Instance) bool { + if len(a) != len(b) { + return false + } + found := make([]bool, len(b)) + for _, v := range a { + i := keyAt(b, v) + if i < 0 || found[i] { + return false + } + found[i] = true + } + return true +} + +func keyAt(keys []Instance, target Instance) int { + for i, v := range keys { + if v.Object == target.Object && v.TArgs.Equal(target.TArgs) { + return i + } + } + return -1 +} diff --git a/compiler/internal/typeparams/utils.go b/compiler/internal/typeparams/utils.go new file mode 100644 index 000000000..ea528314e --- /dev/null +++ b/compiler/internal/typeparams/utils.go @@ -0,0 +1,141 @@ +package typeparams + +import ( + "errors" + "fmt" + "go/token" + "go/types" +) + +// SignatureTypeParams returns receiver type params for methods, or function +// type params for standalone functions, or nil for non-generic functions and +// methods. +func SignatureTypeParams(sig *types.Signature) *types.TypeParamList { + if tp := sig.RecvTypeParams(); tp != nil { + return tp + } else if tp := sig.TypeParams(); tp != nil { + return tp + } else { + return nil + } +} + +// FindNestingFunc returns the function or method that the given object +// is nested in, or nil if the object was defined at the package level. +func FindNestingFunc(obj types.Object) *types.Func { + objPos := obj.Pos() + if objPos == token.NoPos { + return nil + } + + scope := obj.Parent() + for scope != nil { + // Iterate over all declarations in the scope. + for _, name := range scope.Names() { + decl := scope.Lookup(name) + if fn, ok := decl.(*types.Func); ok { + // Check if the object's position is within the function's scope. + if objPos >= fn.Pos() && objPos <= fn.Scope().End() { + return fn + } + } + } + scope = scope.Parent() + } + return nil +} + +var ( + errInstantiatesGenerics = errors.New("instantiates generic type or function") + errDefinesGenerics = errors.New("defines generic type or function") +) + +// HasTypeParams returns true if object defines type parameters. +// +// Note: this function doe not check if the object definition actually uses the +// type parameters, neither its own, nor from the outer scope. +func HasTypeParams(typ types.Type) bool { + switch typ := typ.(type) { + case *types.Signature: + return typ.RecvTypeParams().Len() > 0 || typ.TypeParams().Len() > 0 + case *types.Named: + return typ.TypeParams().Len() > 0 + default: + return false + } +} + +// RequiresGenericsSupport returns an error if the type-checked code depends on +// generics support. +func RequiresGenericsSupport(info *types.Info) error { + for ident := range info.Instances { + // Any instantiation means dependency on generics. + return fmt.Errorf("%w: %v", errInstantiatesGenerics, info.ObjectOf(ident)) + } + + for _, obj := range info.Defs { + if obj == nil { + continue + } + if HasTypeParams(obj.Type()) { + return fmt.Errorf("%w: %v", errDefinesGenerics, obj) + } + } + + return nil +} + +// isGeneric will search all the given types and their subtypes for a +// *types.TypeParam. This will not check if a type could be generic, +// but if each instantiation is not completely concrete yet. +// +// This is useful to check for generics types like `X[B[T]]`, where +// `X` appears concrete because it is instantiated with the type argument `B[T]`, +// however the `T` inside `B[T]` is a type parameter making `X[B[T]]` a generic +// type since it required instantiation to a concrete type, e.g. `X[B[int]]`. +func isGeneric(typ ...types.Type) bool { + var containsTypeParam func(t types.Type) bool + + foreach := func(count int, getter func(index int) types.Type) bool { + for i := 0; i < count; i++ { + if containsTypeParam(getter(i)) { + return true + } + } + return false + } + + seen := make(map[types.Type]struct{}) + containsTypeParam = func(t types.Type) bool { + if _, ok := seen[t]; ok { + return false + } + seen[t] = struct{}{} + + switch t := t.(type) { + case *types.TypeParam: + return true + case *types.Named: + return t.TypeParams().Len() != t.TypeArgs().Len() || + foreach(t.TypeArgs().Len(), func(i int) types.Type { return t.TypeArgs().At(i) }) || + containsTypeParam(t.Underlying()) + case *types.Struct: + return foreach(t.NumFields(), func(i int) types.Type { return t.Field(i).Type() }) + case *types.Interface: + return foreach(t.NumMethods(), func(i int) types.Type { return t.Method(i).Type() }) + case *types.Signature: + return foreach(t.Params().Len(), func(i int) types.Type { return t.Params().At(i).Type() }) || + foreach(t.Results().Len(), func(i int) types.Type { return t.Results().At(i).Type() }) + case *types.Map: + return containsTypeParam(t.Key()) || containsTypeParam(t.Elem()) + case interface{ Elem() types.Type }: + // Handles *types.Pointer, *types.Slice, *types.Array, *types.Chan + return containsTypeParam(t.Elem()) + default: + // Other types (e.g., basic types) do not contain type parameters. + return false + } + } + + return foreach(len(typ), func(i int) types.Type { return typ[i] }) +} diff --git a/compiler/internal/typeparams/utils_test.go b/compiler/internal/typeparams/utils_test.go new file mode 100644 index 000000000..dda685273 --- /dev/null +++ b/compiler/internal/typeparams/utils_test.go @@ -0,0 +1,120 @@ +package typeparams + +import ( + "errors" + "go/token" + "go/types" + "testing" + + "github.com/gopherjs/gopherjs/internal/srctesting" +) + +func TestHasTypeParams(t *testing.T) { + pkg := types.NewPackage("test/pkg", "pkg") + empty := types.NewInterfaceType(nil, nil) + tParams := func() []*types.TypeParam { + return []*types.TypeParam{ + types.NewTypeParam(types.NewTypeName(token.NoPos, pkg, "T", types.Typ[types.String]), empty), + } + } + + tests := []struct { + descr string + typ types.Type + want bool + }{{ + descr: "generic function", + typ: types.NewSignatureType(nil, nil, tParams(), nil, nil, false), + want: true, + }, { + descr: "generic method", + typ: types.NewSignatureType(types.NewVar(token.NoPos, pkg, "t", nil), tParams(), nil, nil, nil, false), + want: true, + }, { + descr: "regular function", + typ: types.NewSignatureType(nil, nil, nil, nil, nil, false), + want: false, + }, { + descr: "generic type", + typ: func() types.Type { + typ := types.NewNamed(types.NewTypeName(token.NoPos, pkg, "Typ", nil), types.Typ[types.String], nil) + typ.SetTypeParams(tParams()) + return typ + }(), + want: true, + }, { + descr: "regular named type", + typ: types.NewNamed(types.NewTypeName(token.NoPos, pkg, "Typ", nil), types.Typ[types.String], nil), + want: false, + }, { + descr: "built-in type", + typ: types.Typ[types.String], + want: false, + }} + + for _, test := range tests { + t.Run(test.descr, func(t *testing.T) { + got := HasTypeParams(test.typ) + if got != test.want { + t.Errorf("Got: HasTypeParams(%v) = %v. Want: %v.", test.typ, got, test.want) + } + }) + } +} + +func TestRequiresGenericsSupport(t *testing.T) { + t.Run("generic func", func(t *testing.T) { + f := srctesting.New(t) + src := `package foo + func foo[T any](t T) {}` + info, _ := f.Check("pkg/foo", f.Parse("foo.go", src)) + + err := RequiresGenericsSupport(info) + if !errors.Is(err, errDefinesGenerics) { + t.Errorf("Got: RequiresGenericsSupport() = %v. Want: %v", err, errDefinesGenerics) + } + }) + + t.Run("generic type", func(t *testing.T) { + f := srctesting.New(t) + src := `package foo + type Foo[T any] struct{t T}` + info, _ := f.Check("pkg/foo", f.Parse("foo.go", src)) + + err := RequiresGenericsSupport(info) + if !errors.Is(err, errDefinesGenerics) { + t.Errorf("Got: RequiresGenericsSupport() = %v. Want: %v", err, errDefinesGenerics) + } + }) + + t.Run("imported generic instance", func(t *testing.T) { + f := srctesting.New(t) + f.Info = nil // Do not combine type checking info from different packages. + src1 := `package foo + type Foo[T any] struct{t T}` + f.Check("pkg/foo", f.Parse("foo.go", src1)) + + src2 := `package bar + import "pkg/foo" + func bar() { _ = foo.Foo[int]{} }` + info, _ := f.Check("pkg/bar", f.Parse("bar.go", src2)) + + err := RequiresGenericsSupport(info) + if !errors.Is(err, errInstantiatesGenerics) { + t.Errorf("Got: RequiresGenericsSupport() = %v. Want: %v", err, errInstantiatesGenerics) + } + }) + + t.Run("no generic usage", func(t *testing.T) { + f := srctesting.New(t) + src := `package foo + type Foo struct{} + func foo() { _ = Foo{} }` + info, _ := f.Check("pkg/foo", f.Parse("foo.go", src)) + + err := RequiresGenericsSupport(info) + if err != nil { + t.Errorf("Got: RequiresGenericsSupport() = %v. Want: nil", err) + } + }) +} diff --git a/compiler/jsFile/jsFile.go b/compiler/jsFile/jsFile.go new file mode 100644 index 000000000..b8ae9421f --- /dev/null +++ b/compiler/jsFile/jsFile.go @@ -0,0 +1,55 @@ +package jsFile + +import ( + "fmt" + "go/build" + "io" + "strings" + "time" + + "golang.org/x/tools/go/buildutil" +) + +// JSFile represents a *.inc.js file metadata and content. +type JSFile struct { + Path string // Full file path for the build context the file came from. + ModTime time.Time + Content []byte +} + +// JSFilesFromDir finds and loads any *.inc.js packages in the build context +// directory. +func JSFilesFromDir(bctx *build.Context, dir string) ([]JSFile, error) { + files, err := buildutil.ReadDir(bctx, dir) + if err != nil { + return nil, err + } + var jsFiles []JSFile + for _, file := range files { + if !strings.HasSuffix(file.Name(), ".inc.js") || file.IsDir() { + continue + } + if file.Name()[0] == '_' || file.Name()[0] == '.' { + continue // Skip "hidden" files that are typically ignored by the Go build system. + } + + path := buildutil.JoinPath(bctx, dir, file.Name()) + f, err := buildutil.OpenFile(bctx, path) + if err != nil { + return nil, fmt.Errorf("failed to open %s from %v: %w", path, bctx, err) + } + defer f.Close() + + content, err := io.ReadAll(f) + if err != nil { + return nil, fmt.Errorf("failed to read %s from %v: %w", path, bctx, err) + } + + jsFiles = append(jsFiles, JSFile{ + Path: path, + ModTime: file.ModTime(), + Content: content, + }) + } + return jsFiles, nil +} diff --git a/compiler/linkname.go b/compiler/linkname/linkname.go similarity index 65% rename from compiler/linkname.go rename to compiler/linkname/linkname.go index ae1e3ea2b..6c3a9623c 100644 --- a/compiler/linkname.go +++ b/compiler/linkname/linkname.go @@ -1,13 +1,14 @@ -package compiler +package linkname import ( "fmt" "go/ast" "go/token" - "go/types" "strings" "github.com/gopherjs/gopherjs/compiler/astutil" + "github.com/gopherjs/gopherjs/compiler/internal/symbol" + "github.com/gopherjs/gopherjs/internal/errorList" ) // GoLinkname describes a go:linkname compiler directive found in the source code. @@ -17,65 +18,11 @@ import ( // symbols referencing it. This is subtly different from the upstream Go // implementation, which simply overrides symbol name the linker will use. type GoLinkname struct { - Implementation SymName - Reference SymName + Implementation symbol.Name + Reference symbol.Name } -// SymName uniquely identifies a named submol within a program. -// -// This is a logical equivalent of a symbol name used by traditional linkers. -// The following properties should hold true: -// -// - Each named symbol within a program has a unique SymName. -// - Similarly named methods of different types will have different symbol names. -// - The string representation is opaque and should not be attempted to reversed -// to a struct form. -type SymName struct { - PkgPath string // Full package import path. - Name string // Symbol name. -} - -// newSymName constructs SymName for a given named symbol. -func newSymName(o types.Object) SymName { - if fun, ok := o.(*types.Func); ok { - sig := fun.Type().(*types.Signature) - if recv := sig.Recv(); recv != nil { - // Special case: disambiguate names for different types' methods. - typ := recv.Type() - if ptr, ok := typ.(*types.Pointer); ok { - return SymName{ - PkgPath: o.Pkg().Path(), - Name: "(*" + ptr.Elem().(*types.Named).Obj().Name() + ")." + o.Name(), - } - } - return SymName{ - PkgPath: o.Pkg().Path(), - Name: typ.(*types.Named).Obj().Name() + "." + o.Name(), - } - } - } - return SymName{ - PkgPath: o.Pkg().Path(), - Name: o.Name(), - } -} - -func (n SymName) String() string { return n.PkgPath + "." + n.Name } - -func (n SymName) IsMethod() (recv string, method string, ok bool) { - pos := strings.IndexByte(n.Name, '.') - if pos == -1 { - return - } - recv, method, ok = n.Name[:pos], n.Name[pos+1:], true - size := len(recv) - if size > 2 && recv[0] == '(' && recv[size-1] == ')' { - recv = recv[1 : size-1] - } - return -} - -// parseGoLinknames processed comments in a source file and extracts //go:linkname +// ParseGoLinknames processed comments in a source file and extracts //go:linkname // compiler directive from the comments. // // The following directive format is supported: @@ -91,8 +38,8 @@ func (n SymName) IsMethod() (recv string, method string, ok bool) { // - The local function referenced by the directive must have no body (in other // words, it can only "import" an external function implementation into the // local scope). -func parseGoLinknames(fset *token.FileSet, pkgPath string, file *ast.File) ([]GoLinkname, error) { - var errs ErrorList = nil +func ParseGoLinknames(fset *token.FileSet, pkgPath string, file *ast.File) ([]GoLinkname, error) { + var errs errorList.ErrorList = nil var directives []GoLinkname isUnsafe := astutil.ImportsUnsafe(file) @@ -152,8 +99,8 @@ func parseGoLinknames(fset *token.FileSet, pkgPath string, file *ast.File) ([]Go } // Local function has no body, treat it as a reference to an external implementation. directives = append(directives, GoLinkname{ - Reference: SymName{PkgPath: localPkg, Name: localName}, - Implementation: SymName{PkgPath: extPkg, Name: extName}, + Reference: symbol.Name{PkgPath: localPkg, Name: localName}, + Implementation: symbol.Name{PkgPath: extPkg, Name: extName}, }) return nil } @@ -161,28 +108,33 @@ func parseGoLinknames(fset *token.FileSet, pkgPath string, file *ast.File) ([]Go for _, cg := range file.Comments { for _, c := range cg.List { if err := processComment(c); err != nil { - errs = append(errs, ErrorAt(err, fset, c.Pos())) + errs = append(errs, errorAt(err, fset, c.Pos())) } } } - return directives, errs.Normalize() + return directives, errs.ErrOrNil() +} + +// errorAt annotates an error with a position in the source code. +func errorAt(err error, fset *token.FileSet, pos token.Pos) error { + return fmt.Errorf("%s: %w", fset.Position(pos), err) } -// goLinknameSet is a utility that enables quick lookup of whether a decl is +// GoLinknameSet is a utility that enables quick lookup of whether a decl is // affected by any go:linkname directive in the program. -type goLinknameSet struct { - byImplementation map[SymName][]GoLinkname - byReference map[SymName]GoLinkname +type GoLinknameSet struct { + byImplementation map[symbol.Name][]GoLinkname + byReference map[symbol.Name]GoLinkname } // Add more GoLinkname directives into the set. -func (gls *goLinknameSet) Add(entries []GoLinkname) error { +func (gls *GoLinknameSet) Add(entries []GoLinkname) error { if gls.byImplementation == nil { - gls.byImplementation = map[SymName][]GoLinkname{} + gls.byImplementation = map[symbol.Name][]GoLinkname{} } if gls.byReference == nil { - gls.byReference = map[SymName]GoLinkname{} + gls.byReference = map[symbol.Name]GoLinkname{} } for _, e := range entries { gls.byImplementation[e.Implementation] = append(gls.byImplementation[e.Implementation], e) @@ -197,7 +149,7 @@ func (gls *goLinknameSet) Add(entries []GoLinkname) error { // IsImplementation returns true if there is a directive referencing this symbol // as an implementation. -func (gls *goLinknameSet) IsImplementation(sym SymName) bool { +func (gls *GoLinknameSet) IsImplementation(sym symbol.Name) bool { _, found := gls.byImplementation[sym] return found } @@ -205,7 +157,7 @@ func (gls *goLinknameSet) IsImplementation(sym SymName) bool { // FindImplementation returns a symbol name, which provides the implementation // for the given symbol. The second value indicates whether the implementation // was found. -func (gls *goLinknameSet) FindImplementation(sym SymName) (SymName, bool) { +func (gls *GoLinknameSet) FindImplementation(sym symbol.Name) (symbol.Name, bool) { directive, found := gls.byReference[sym] return directive.Implementation, found } diff --git a/compiler/linkname_test.go b/compiler/linkname/linkname_test.go similarity index 68% rename from compiler/linkname_test.go rename to compiler/linkname/linkname_test.go index d0ce9c542..e2abc2825 100644 --- a/compiler/linkname_test.go +++ b/compiler/linkname/linkname_test.go @@ -1,4 +1,4 @@ -package compiler +package linkname import ( "go/ast" @@ -11,6 +11,7 @@ import ( "github.com/google/go-cmp/cmp" "github.com/google/go-cmp/cmp/cmpopts" + "github.com/gopherjs/gopherjs/compiler/internal/symbol" ) func parseSource(t *testing.T, src string) (*ast.File, *token.FileSet) { @@ -41,49 +42,6 @@ func makePackage(t *testing.T, src string) *types.Package { return pkg } -func TestSymName(t *testing.T) { - pkg := makePackage(t, - `package testcase - - func AFunction() {} - type AType struct {} - func (AType) AMethod() {} - func (*AType) APointerMethod() {} - var AVariable int32 - `) - - tests := []struct { - obj types.Object - want SymName - }{ - { - obj: pkg.Scope().Lookup("AFunction"), - want: SymName{PkgPath: "testcase", Name: "AFunction"}, - }, { - obj: pkg.Scope().Lookup("AType"), - want: SymName{PkgPath: "testcase", Name: "AType"}, - }, { - obj: types.NewMethodSet(pkg.Scope().Lookup("AType").Type()).Lookup(pkg, "AMethod").Obj(), - want: SymName{PkgPath: "testcase", Name: "AType.AMethod"}, - }, { - obj: types.NewMethodSet(types.NewPointer(pkg.Scope().Lookup("AType").Type())).Lookup(pkg, "APointerMethod").Obj(), - want: SymName{PkgPath: "testcase", Name: "(*AType).APointerMethod"}, - }, { - obj: pkg.Scope().Lookup("AVariable"), - want: SymName{PkgPath: "testcase", Name: "AVariable"}, - }, - } - - for _, test := range tests { - t.Run(test.obj.Name(), func(t *testing.T) { - got := newSymName(test.obj) - if got != test.want { - t.Errorf("NewSymName(%q) returned %#v, want: %#v", test.obj.Name(), got, test.want) - } - }) - } -} - func TestParseGoLinknames(t *testing.T) { tests := []struct { desc string @@ -114,8 +72,8 @@ func TestParseGoLinknames(t *testing.T) { `, wantDirectives: []GoLinkname{ { - Reference: SymName{PkgPath: "testcase", Name: "a"}, - Implementation: SymName{PkgPath: "other/package", Name: "testcase_a"}, + Reference: symbol.Name{PkgPath: "testcase", Name: "a"}, + Implementation: symbol.Name{PkgPath: "other/package", Name: "testcase_a"}, }, }, }, { @@ -132,11 +90,11 @@ func TestParseGoLinknames(t *testing.T) { `, wantDirectives: []GoLinkname{ { - Reference: SymName{PkgPath: "testcase", Name: "a"}, - Implementation: SymName{PkgPath: "other/package", Name: "a"}, + Reference: symbol.Name{PkgPath: "testcase", Name: "a"}, + Implementation: symbol.Name{PkgPath: "other/package", Name: "a"}, }, { - Reference: SymName{PkgPath: "testcase", Name: "b"}, - Implementation: SymName{PkgPath: "other/package", Name: "b"}, + Reference: symbol.Name{PkgPath: "testcase", Name: "b"}, + Implementation: symbol.Name{PkgPath: "other/package", Name: "b"}, }, }, }, { @@ -193,7 +151,7 @@ func TestParseGoLinknames(t *testing.T) { for _, test := range tests { t.Run(test.desc, func(t *testing.T) { file, fset := parseSource(t, test.src) - directives, err := parseGoLinknames(fset, "testcase", file) + directives, err := ParseGoLinknames(fset, "testcase", file) if test.wantError != "" { if err == nil { diff --git a/compiler/natives/src/compress/gzip/example_test.go b/compiler/natives/src/compress/gzip/example_test.go new file mode 100644 index 000000000..b3f6fbe43 --- /dev/null +++ b/compiler/natives/src/compress/gzip/example_test.go @@ -0,0 +1,13 @@ +//go:build js && wasm +// +build js,wasm + +package gzip_test + +import ( + "fmt" +) + +// The test relies on a local HTTP server, which is not supported under NodeJS. +func Example_compressingReader() { + fmt.Println("the data to be compressed") +} diff --git a/compiler/natives/src/crypto/ed25519/ed25519vectors_test.go b/compiler/natives/src/crypto/ed25519/ed25519vectors_test.go deleted file mode 100644 index 90f455b9d..000000000 --- a/compiler/natives/src/crypto/ed25519/ed25519vectors_test.go +++ /dev/null @@ -1,10 +0,0 @@ -//go:build js -// +build js - -package ed25519_test - -import "testing" - -func TestEd25519Vectors(t *testing.T) { - t.Skip("exec.Command() is not supported by GopherJS") -} diff --git a/compiler/natives/src/crypto/ed25519/internal/edwards25519/field/fe_test.go b/compiler/natives/src/crypto/ed25519/internal/edwards25519/field/fe_test.go deleted file mode 100644 index c448d519c..000000000 --- a/compiler/natives/src/crypto/ed25519/internal/edwards25519/field/fe_test.go +++ /dev/null @@ -1,10 +0,0 @@ -//go:build js -// +build js - -package field - -import ( - "testing/quick" -) - -var quickCheckConfig1024 = &quick.Config{MaxCount: 100} diff --git a/compiler/natives/src/crypto/ed25519/internal/edwards25519/scalar_test.go b/compiler/natives/src/crypto/ed25519/internal/edwards25519/scalar_test.go deleted file mode 100644 index 612663d1c..000000000 --- a/compiler/natives/src/crypto/ed25519/internal/edwards25519/scalar_test.go +++ /dev/null @@ -1,10 +0,0 @@ -//go:build js -// +build js - -package edwards25519 - -import ( - "testing/quick" -) - -var quickCheckConfig32 = &quick.Config{MaxCount: 100} diff --git a/compiler/natives/src/crypto/internal/edwards25519/field/fe_alias_test.go b/compiler/natives/src/crypto/internal/edwards25519/field/fe_alias_test.go new file mode 100644 index 000000000..db4af600d --- /dev/null +++ b/compiler/natives/src/crypto/internal/edwards25519/field/fe_alias_test.go @@ -0,0 +1,16 @@ +//go:build js + +package field + +import ( + "testing" + "testing/quick" +) + +//gopherjs:keep-original +func TestAliasing(t *testing.T) { + // The test heavily uses 64-bit math, which is slow under GopherJS. Reducing + // the number of iterations makes run time more manageable. + t.Cleanup(quick.GopherJSInternalMaxCountCap(100)) + _gopherjs_original_TestAliasing(t) +} diff --git a/compiler/natives/src/crypto/internal/edwards25519/field/fe_test.go b/compiler/natives/src/crypto/internal/edwards25519/field/fe_test.go new file mode 100644 index 000000000..9f8c898d5 --- /dev/null +++ b/compiler/natives/src/crypto/internal/edwards25519/field/fe_test.go @@ -0,0 +1,9 @@ +//go:build js + +package field + +import "testing/quick" + +// Tests in this package use 64-bit math, which is slow under GopherJS. To keep +// test run time reasonable, we reduce the number of test iterations. +var quickCheckConfig1024 = &quick.Config{MaxCountScale: 10} diff --git a/compiler/natives/src/crypto/internal/edwards25519/scalar_test.go b/compiler/natives/src/crypto/internal/edwards25519/scalar_test.go new file mode 100644 index 000000000..ec862a349 --- /dev/null +++ b/compiler/natives/src/crypto/internal/edwards25519/scalar_test.go @@ -0,0 +1,9 @@ +//go:build js + +package edwards25519 + +import "testing/quick" + +// Tests in this package use 64-bit math, which is slow under GopherJS. To keep +// test run time reasonable, we reduce the number of test iterations. +var quickCheckConfig1024 = &quick.Config{MaxCountScale: 1} diff --git a/compiler/natives/src/crypto/internal/edwards25519/scalarmult_test.go b/compiler/natives/src/crypto/internal/edwards25519/scalarmult_test.go new file mode 100644 index 000000000..9cacfb24c --- /dev/null +++ b/compiler/natives/src/crypto/internal/edwards25519/scalarmult_test.go @@ -0,0 +1,9 @@ +//go:build js + +package edwards25519 + +import "testing/quick" + +// Tests in this package use 64-bit math, which is slow under GopherJS. To keep +// test run time reasonable, we reduce the number of test iterations. +var quickCheckConfig32 = &quick.Config{MaxCountScale: 0.5} diff --git a/compiler/natives/src/crypto/tls/handshake_test.go b/compiler/natives/src/crypto/tls/handshake_test.go index c9e19d5ed..b5fe59a57 100644 --- a/compiler/natives/src/crypto/tls/handshake_test.go +++ b/compiler/natives/src/crypto/tls/handshake_test.go @@ -10,7 +10,7 @@ import ( // Same as upstream, except we check for GOARCH=ecmascript instead of wasm. // This override can be removed after https://github.com/golang/go/pull/51827 -// is available in the upstream (likely in Go 1.19). +// is available in the upstream (likely after Go 1.19). func TestServerHandshakeContextCancellation(t *testing.T) { c, s := localPipe(t) ctx, cancel := context.WithCancel(context.Background()) @@ -42,7 +42,7 @@ func TestServerHandshakeContextCancellation(t *testing.T) { // Same as upstream, except we check for GOARCH=ecmascript instead of wasm. // This override can be removed after https://github.com/golang/go/pull/51827 -// is available in the upstream (likely in Go 1.19). +// is available in the upstream (likely after Go 1.19). func TestClientHandshakeContextCancellation(t *testing.T) { c, s := localPipe(t) ctx, cancel := context.WithCancel(context.Background()) @@ -71,3 +71,15 @@ func TestClientHandshakeContextCancellation(t *testing.T) { t.Error("Client connection was not closed when the context was canceled") } } + +func TestVerifyConnection(t *testing.T) { + // This should be rechecked after upgrading to Go 1.20 or later. + // go1.19.13/src/crypto/tls/handshake_test.go:testRSACertificateIssuer has expired. + t.Skip("Skipping test that uses predefined certificate that expired in Jan 1st 2025") +} + +func TestResumptionKeepsOCSPAndSCT(t *testing.T) { + // This should be rechecked after upgrading to Go 1.20 or later. + // go1.19.13/src/crypto/tls/handshake_test.go:testRSACertificateIssuer has expired. + t.Skip("Skipping test that uses predefined certificate that expired in Jan 1st 2025") +} diff --git a/compiler/natives/src/crypto/x509/name_constraints_test.go b/compiler/natives/src/crypto/x509/name_constraints_test.go new file mode 100644 index 000000000..9b1190a6d --- /dev/null +++ b/compiler/natives/src/crypto/x509/name_constraints_test.go @@ -0,0 +1,16 @@ +//go:build js + +package x509 + +import "testing" + +//gopherjs:keep-original +func TestConstraintCases(t *testing.T) { + if testing.Short() { + // These tests are slow under GopherJS. Since GopherJS doesn't touch + // business logic behind them, there's little value in running them all. + // Instead, in the short mode we just just the first few as a smoke test. + nameConstraintsTests = nameConstraintsTests[0:5] + } + _gopherjs_original_TestConstraintCases(t) +} diff --git a/compiler/natives/src/image/gif/fuzz_test.go b/compiler/natives/src/image/gif/fuzz_test.go new file mode 100644 index 000000000..b79977bfc --- /dev/null +++ b/compiler/natives/src/image/gif/fuzz_test.go @@ -0,0 +1,14 @@ +//go:build js + +package gif + +import "testing" + +//gopherjs:keep-original +func FuzzDecode(t *testing.F) { + if testing.Short() { + t.Skip("FuzzDecode is slow, skipping in the short mode.") + } + + _gopherjs_original_FuzzDecode(t) +} diff --git a/compiler/natives/src/os/file.go b/compiler/natives/src/os/file.go index 37d4275f3..a3683b8b0 100644 --- a/compiler/natives/src/os/file.go +++ b/compiler/natives/src/os/file.go @@ -3,7 +3,7 @@ package os -// WriteString copied from Go 1.16, before it was made more peformant, and unsafe. +// WriteString copied from Go 1.16, before it was made more performant, and unsafe. func (f *File) WriteString(s string) (n int, err error) { return f.Write([]byte(s)) } diff --git a/compiler/natives/src/reflect/reflect.go b/compiler/natives/src/reflect/reflect.go index 47b93662e..81f4c7b08 100644 --- a/compiler/natives/src/reflect/reflect.go +++ b/compiler/natives/src/reflect/reflect.go @@ -1778,26 +1778,28 @@ func valueMethodName() string { var pc [5]uintptr n := runtime.Callers(1, pc[:]) frames := runtime.CallersFrames(pc[:n]) + valueTyp := TypeOf(Value{}) var frame runtime.Frame for more := true; more; { frame, more = frames.Next() name := frame.Function - // Function name extracted from the call stack can be different from // vanilla Go, so is not prefixed by "reflect.Value." as needed by the original. // See https://cs.opensource.google/go/go/+/refs/tags/go1.19.13:src/reflect/value.go;l=173-191 - // Here we try to fix stuff like "Object.$packages.reflect.Q.ptr.SetIterKey" - // into "reflect.Value.SetIterKey". // This workaround may become obsolete after // https://github.com/gopherjs/gopherjs/issues/1085 is resolved. - const prefix = `Object.$packages.reflect.` - if stringsHasPrefix(name, prefix) { - if idx := stringsLastIndex(name, '.'); idx >= 0 { - methodName := name[idx+1:] - if len(methodName) > 0 && 'A' <= methodName[0] && methodName[0] <= 'Z' { - return `reflect.Value.` + methodName - } + methodName := name + if idx := stringsLastIndex(name, '.'); idx >= 0 { + methodName = name[idx+1:] + } + + // Since function name in the call stack doesn't contain receiver name, + // we are looking for the first exported function name that matches a + // known Value method. + if _, ok := valueTyp.MethodByName(methodName); ok { + if len(methodName) > 0 && 'A' <= methodName[0] && methodName[0] <= 'Z' { + return `reflect.Value.` + methodName } } } diff --git a/compiler/natives/src/reflect/reflect_test.go b/compiler/natives/src/reflect/reflect_test.go index 79bbe5385..4c0bcd0be 100644 --- a/compiler/natives/src/reflect/reflect_test.go +++ b/compiler/natives/src/reflect/reflect_test.go @@ -298,3 +298,23 @@ func TestIssue50208(t *testing.T) { func TestStructOfTooLarge(t *testing.T) { t.Skip("This test is dependent on field alignment to determine if a struct size would exceed virtual address space.") } + +func TestSetLenCap(t *testing.T) { + t.Skip("Test depends on call stack function names: https://github.com/gopherjs/gopherjs/issues/1085") +} + +func TestSetPanic(t *testing.T) { + t.Skip("Test depends on call stack function names: https://github.com/gopherjs/gopherjs/issues/1085") +} + +func TestCallPanic(t *testing.T) { + t.Skip("Test depends on call stack function names: https://github.com/gopherjs/gopherjs/issues/1085") +} + +func TestValuePanic(t *testing.T) { + t.Skip("Test depends on call stack function names: https://github.com/gopherjs/gopherjs/issues/1085") +} + +func TestSetIter(t *testing.T) { + t.Skip("Test depends on call stack function names: https://github.com/gopherjs/gopherjs/issues/1085") +} diff --git a/compiler/natives/src/runtime/runtime.go b/compiler/natives/src/runtime/runtime.go index 41c60876c..9f8425af8 100644 --- a/compiler/natives/src/runtime/runtime.go +++ b/compiler/natives/src/runtime/runtime.go @@ -468,7 +468,7 @@ func StartTrace() error { return nil } func StopTrace() {} func ReadTrace() []byte -// We fake a cgo environment to catch errors. Therefor we have to implement this and always return 0 +// We fake a cgo environment to catch errors. Therefore we have to implement this and always return 0 func NumCgoCall() int64 { return 0 } diff --git a/compiler/natives/src/strings/strings.go b/compiler/natives/src/strings/strings.go index ebb1db1ef..2867872f6 100644 --- a/compiler/natives/src/strings/strings.go +++ b/compiler/natives/src/strings/strings.go @@ -68,7 +68,7 @@ func (b *Builder) copyCheck() { } func Clone(s string) string { - // Since in the JavaScript runtime we don't have access the the string's + // Since in the JavaScript runtime we don't have access the string's // baking memory, we let the engine's garbage collector deal with substring // memory overheads and simply return the string as-is. return s diff --git a/compiler/natives/src/testing/quick/quick.go b/compiler/natives/src/testing/quick/quick.go new file mode 100644 index 000000000..51fa843aa --- /dev/null +++ b/compiler/natives/src/testing/quick/quick.go @@ -0,0 +1,37 @@ +//go:build js + +package quick + +var maxCountCap int = 0 + +// GopherJSInternalMaxCountCap sets an upper bound of iterations quick test may +// perform. THIS IS GOPHERJS-INTERNAL API, DO NOT USE IT OUTSIDE OF THE GOPHERJS +// CODEBASE, IT MAY CHANGE OR DISAPPEAR WITHOUT NOTICE. +// +// This function can be used to limit run time of standard library tests which +// use testing/quick with too many iterations for GopherJS to complete in a +// reasonable amount of time. This is a better compromise than disabling a slow +// test entirely. +// +// //gopherjs:keep-original +// func TestFoo(t *testing.T) { +// t.Cleanup(quick.GopherJSInternalMaxCountCap(100)) +// _gopherjs_original_TestFoo(t) +// } + +func GopherJSInternalMaxCountCap(newCap int) (restore func()) { + previousCap := maxCountCap + maxCountCap = newCap + return func() { + maxCountCap = previousCap + } +} + +//gopherjs:keep-original +func (c *Config) getMaxCount() (maxCount int) { + maxCount = c._gopherjs_original_getMaxCount() + if maxCountCap > 0 && maxCount > maxCountCap { + maxCount = maxCountCap + } + return maxCount +} diff --git a/compiler/package.go b/compiler/package.go index ad918ba3e..bb94962da 100644 --- a/compiler/package.go +++ b/compiler/package.go @@ -1,129 +1,160 @@ package compiler import ( - "bytes" - "encoding/json" "fmt" "go/ast" - "go/constant" - "go/scanner" "go/token" "go/types" - "sort" "strings" - "time" - "github.com/gopherjs/gopherjs/compiler/analysis" - "github.com/gopherjs/gopherjs/compiler/astutil" - "github.com/neelance/astrewrite" - "golang.org/x/tools/go/gcexportdata" "golang.org/x/tools/go/types/typeutil" + + "github.com/gopherjs/gopherjs/compiler/internal/analysis" + "github.com/gopherjs/gopherjs/compiler/internal/dce" + "github.com/gopherjs/gopherjs/compiler/internal/typeparams" + "github.com/gopherjs/gopherjs/compiler/sources" + "github.com/gopherjs/gopherjs/compiler/typesutil" + "github.com/gopherjs/gopherjs/internal/errorList" ) // pkgContext maintains compiler context for a specific package. type pkgContext struct { *analysis.Info - additionalSelections map[*ast.SelectorExpr]selection - - typeNames []*types.TypeName + dce.Collector + additionalSelections map[*ast.SelectorExpr]typesutil.Selection + + typesCtx *types.Context + // List of type names declared in the package, including those defined inside + // functions. + typeNames typesutil.TypeNames + // Mapping from package import paths to JS variables that were assigned to an + // imported package and can be used to access it. pkgVars map[string]string - objectNames map[types.Object]string varPtrNames map[*types.Var]string anonTypes []*types.TypeName anonTypeMap typeutil.Map escapingVars map[*types.Var]bool indentation int - dependencies map[types.Object]bool minify bool fileSet *token.FileSet - errList ErrorList + errList errorList.ErrorList + instanceSet *typeparams.PackageInstanceSets } -func (p *pkgContext) SelectionOf(e *ast.SelectorExpr) (selection, bool) { - if sel, ok := p.Selections[e]; ok { - return sel, true - } - if sel, ok := p.additionalSelections[e]; ok { - return sel, true - } - return nil, false +// isMain returns true if this is the main package of the program. +func (pc *pkgContext) isMain() bool { + return pc.Pkg.Name() == "main" } -type selection interface { - Kind() types.SelectionKind - Recv() types.Type - Index() []int - Obj() types.Object - Type() types.Type -} - -type fakeSelection struct { - kind types.SelectionKind - recv types.Type - index []int - obj types.Object - typ types.Type -} - -func (sel *fakeSelection) Kind() types.SelectionKind { return sel.kind } -func (sel *fakeSelection) Recv() types.Type { return sel.recv } -func (sel *fakeSelection) Index() []int { return sel.index } -func (sel *fakeSelection) Obj() types.Object { return sel.obj } -func (sel *fakeSelection) Type() types.Type { return sel.typ } - -// funcContext maintains compiler context for a specific function (lexical scope?). +// funcContext maintains compiler context for a specific function. +// +// An instance of this type roughly corresponds to a lexical scope for generated +// JavaScript code (as defined for `var` declarations). type funcContext struct { *analysis.FuncInfo - pkgCtx *pkgContext - parent *funcContext - sig *types.Signature - allVars map[string]int - localVars []string - resultNames []ast.Expr - flowDatas map[*types.Label]*flowData - caseCounter int - labelCases map[*types.Label]int - output []byte + // Function instance this context corresponds to, or zero if the context is + // top-level or doesn't correspond to a function. For function literals, this + // is a synthetic object that assigns a unique identity to the function. + instance typeparams.Instance + // JavaScript identifier assigned to the function object (the word after the + // "function" keyword in the generated code). This identifier can be used + // within the function scope to reference the function object. It will also + // appear in the stack trace. + funcRef string + // Surrounding package context. + pkgCtx *pkgContext + // Function context, surrounding this function definition. For package-level + // functions or methods it is the package-level function context (even though + // it technically doesn't correspond to a function). nil for the package-level + // function context. + parent *funcContext + // Signature of the function this context corresponds to or nil for the + // package-level function context. For generic functions it is the original + // generic signature to make sure result variable identity in the signature + // matches the variable objects referenced in the function body. + sig *typesutil.Signature + // All variable names available in the current function scope. The key is a Go + // variable name and the value is the number of synonymous variable names + // visible from this scope (e.g. due to shadowing). This number is used to + // avoid conflicts when assigning JS variable names for Go variables. + allVars map[string]int + // Local JS variable names defined within this function context. This list + // contains JS variable names assigned to Go variables, as well as other + // auxiliary variables the compiler needs. It is used to generate `var` + // declaration at the top of the function, as well as context save/restore. + localVars []string + // AST expressions representing function's named return values. nil if the + // function has no return values or they are not named. + resultNames []ast.Expr + // Function's internal control flow graph used for generation of a "flattened" + // version of the function when the function is blocking or uses goto. + // TODO(nevkontakte): Describe the exact semantics of this map. + flowDatas map[*types.Label]*flowData + // Number of control flow blocks in a "flattened" function. + caseCounter int + // A mapping from Go labels statements (e.g. labelled loop) to the flow block + // id corresponding to it. + labelCases map[*types.Label]int + // Generated code buffer for the current function. + output []byte + // Generated code that should be emitted at the end of the JS statement. delayedOutput []byte - posAvailable bool - pos token.Pos -} - -type flowData struct { - postStmt func() - beginCase int - endCase int -} - -type ImportContext struct { - Packages map[string]*types.Package - Import func(string) (*Archive, error) + // Set to true if source position is available and should be emitted for the + // source map. + posAvailable bool + // Current position in the Go source code. + pos token.Pos + // For each instantiation of a generic function or method, contains the + // current mapping between type parameters and corresponding type arguments. + // The mapping is used to determine concrete types for expressions within the + // instance's context. Can be nil outside of the generic context, in which + // case calling its methods is safe and simply does no substitution. + typeResolver *typeparams.Resolver + // Mapping from function-level objects to JS variable names they have been assigned. + objectNames map[types.Object]string + // Number of function literals encountered within the current function context. + funcLitCounter int } -// packageImporter implements go/types.Importer interface. -type packageImporter struct { - importContext *ImportContext - importError *error // A pointer to importError in Compile. -} +func newRootCtx(tContext *types.Context, srcs *sources.Sources, minify bool) *funcContext { + funcCtx := &funcContext{ + FuncInfo: srcs.TypeInfo.InitFuncInfo, + pkgCtx: &pkgContext{ + Info: srcs.TypeInfo, + additionalSelections: make(map[*ast.SelectorExpr]typesutil.Selection), -func (pi packageImporter) Import(path string) (*types.Package, error) { - if path == "unsafe" { - return types.Unsafe, nil + typesCtx: tContext, + pkgVars: make(map[string]string), + varPtrNames: make(map[*types.Var]string), + escapingVars: make(map[*types.Var]bool), + indentation: 1, + minify: minify, + fileSet: srcs.FileSet, + instanceSet: srcs.TypeInfo.InstanceSets, + }, + allVars: make(map[string]int), + flowDatas: map[*types.Label]*flowData{nil: {}}, + caseCounter: 1, + labelCases: make(map[*types.Label]int), + objectNames: map[types.Object]string{}, } - - a, err := pi.importContext.Import(path) - if err != nil { - if *pi.importError == nil { - // If import failed, show first error of import only (https://github.com/gopherjs/gopherjs/issues/119). - *pi.importError = err - } - return nil, err + for name := range reservedKeywords { + funcCtx.allVars[name] = 1 } + return funcCtx +} - return pi.importContext.Packages[a.ImportPath], nil +type flowData struct { + postStmt func() + beginCase int + endCase int } -func Compile(importPath string, files []*ast.File, fileSet *token.FileSet, importContext *ImportContext, minify bool) (_ *Archive, err error) { +// Compile the provided Go sources as a single package. +// +// Provided sources must be prepared so that the type information has been determined, +// and the source files have been sorted by name to ensure reproducible JavaScript output. +func Compile(srcs *sources.Sources, tContext *types.Context, minify bool) (_ *Archive, err error) { defer func() { e := recover() if e == nil { @@ -131,490 +162,124 @@ func Compile(importPath string, files []*ast.File, fileSet *token.FileSet, impor } if fe, ok := bailingOut(e); ok { // Orderly bailout, return whatever clues we already have. - fmt.Fprintf(fe, `building package %q`, importPath) + fmt.Fprintf(fe, `building package %q`, srcs.ImportPath) err = fe return } // Some other unexpected panic, catch the stack trace and return as an error. - err = bailout(fmt.Errorf("unexpected compiler panic while building package %q: %v", importPath, e)) + err = bailout(fmt.Errorf("unexpected compiler panic while building package %q: %v", srcs.ImportPath, e)) }() - // Files must be in the same order to get reproducible JS - sort.Slice(files, func(i, j int) bool { - return fileSet.File(files[i].Pos()).Name() > fileSet.File(files[j].Pos()).Name() - }) - - typesInfo := &types.Info{ - Types: make(map[ast.Expr]types.TypeAndValue), - Defs: make(map[*ast.Ident]types.Object), - Uses: make(map[*ast.Ident]types.Object), - Implicits: make(map[ast.Node]types.Object), - Selections: make(map[*ast.SelectorExpr]*types.Selection), - Scopes: make(map[ast.Node]*types.Scope), - } + rootCtx := newRootCtx(tContext, srcs, minify) - var errList ErrorList + importedPaths, importDecls := rootCtx.importDecls() - // Extract all go:linkname compiler directives from the package source. - goLinknames := []GoLinkname{} - for _, file := range files { - found, err := parseGoLinknames(fileSet, importPath, file) - if err != nil { - if errs, ok := err.(ErrorList); ok { - errList = append(errList, errs...) - } else { - errList = append(errList, err) - } - } - goLinknames = append(goLinknames, found...) - } + vars, functions, typeNames := rootCtx.topLevelObjects(srcs) + // More named types may be added to the list when function bodies are processed. + rootCtx.pkgCtx.typeNames = typeNames - var importError error - var previousErr error - config := &types.Config{ - Importer: packageImporter{ - importContext: importContext, - importError: &importError, - }, - Sizes: sizes32, - Error: func(err error) { - if previousErr != nil && previousErr.Error() == err.Error() { - return - } - errList = append(errList, err) - previousErr = err - }, - } - typesPkg, err := config.Check(importPath, fileSet, files, typesInfo) - if importError != nil { - return nil, importError - } - if errList != nil { - if len(errList) > 10 { - pos := token.NoPos - if last, ok := errList[9].(types.Error); ok { - pos = last.Pos - } - errList = append(errList[:10], types.Error{Fset: fileSet, Pos: pos, Msg: "too many errors"}) - } - return nil, errList - } + // Translate functions and variables. + varDecls := rootCtx.varDecls(vars) + funcDecls, err := rootCtx.funcDecls(functions) if err != nil { return nil, err } - importContext.Packages[importPath] = typesPkg - exportData := new(bytes.Buffer) - if err := gcexportdata.Write(exportData, nil, typesPkg); err != nil { - return nil, fmt.Errorf("failed to write export data: %v", err) - } - encodedFileSet := new(bytes.Buffer) - if err := fileSet.Write(json.NewEncoder(encodedFileSet).Encode); err != nil { + // It is important that we translate types *after* we've processed all + // functions to make sure we've discovered all types declared inside function + // bodies. + typeDecls, err := rootCtx.namedTypeDecls(rootCtx.pkgCtx.typeNames) + if err != nil { return nil, err } - simplifiedFiles := make([]*ast.File, len(files)) - for i, file := range files { - simplifiedFiles[i] = astrewrite.Simplify(file, typesInfo, false) - } + // Finally, anonymous types are translated the last, to make sure we've + // discovered all of them referenced in functions, variable and type + // declarations. + typeDecls = append(typeDecls, rootCtx.anonTypeDecls(rootCtx.pkgCtx.anonTypes)...) - isBlocking := func(f *types.Func) bool { - archive, err := importContext.Import(f.Pkg().Path()) - if err != nil { - panic(err) - } - fullName := f.FullName() - for _, d := range archive.Declarations { - if string(d.FullName) == fullName { - return d.Blocking - } - } - panic(fullName) - } - pkgInfo := analysis.AnalyzePkg(simplifiedFiles, fileSet, typesInfo, typesPkg, isBlocking) - funcCtx := &funcContext{ - FuncInfo: pkgInfo.InitFuncInfo, - pkgCtx: &pkgContext{ - Info: pkgInfo, - additionalSelections: make(map[*ast.SelectorExpr]selection), - - pkgVars: make(map[string]string), - objectNames: make(map[types.Object]string), - varPtrNames: make(map[*types.Var]string), - escapingVars: make(map[*types.Var]bool), - indentation: 1, - dependencies: make(map[types.Object]bool), - minify: minify, - fileSet: fileSet, - }, - allVars: make(map[string]int), - flowDatas: map[*types.Label]*flowData{nil: {}}, - caseCounter: 1, - labelCases: make(map[*types.Label]int), - } - for name := range reservedKeywords { - funcCtx.allVars[name] = 1 - } + // Combine all decls in a single list in the order they must appear in the + // final program. + allDecls := append(append(append(importDecls, typeDecls...), varDecls...), funcDecls...) - // imports - var importDecls []*Decl - var importedPaths []string - for _, importedPkg := range typesPkg.Imports() { - if importedPkg == types.Unsafe { - // Prior to Go 1.9, unsafe import was excluded by Imports() method, - // but now we do it here to maintain previous behavior. - continue + if minify { + for _, d := range allDecls { + *d = d.minify() } - funcCtx.pkgCtx.pkgVars[importedPkg.Path()] = funcCtx.newVariableWithLevel(importedPkg.Name(), true) - importedPaths = append(importedPaths, importedPkg.Path()) - } - sort.Strings(importedPaths) - for _, impPath := range importedPaths { - id := funcCtx.newIdent(fmt.Sprintf(`%s.$init`, funcCtx.pkgCtx.pkgVars[impPath]), types.NewSignatureType(nil, nil, nil, nil, nil, false)) - call := &ast.CallExpr{Fun: id} - funcCtx.Blocking[call] = true - funcCtx.Flattened[call] = true - importDecls = append(importDecls, &Decl{ - Vars: []string{funcCtx.pkgCtx.pkgVars[impPath]}, - DeclCode: []byte(fmt.Sprintf("\t%s = $packages[\"%s\"];\n", funcCtx.pkgCtx.pkgVars[impPath], impPath)), - InitCode: funcCtx.CatchOutput(1, func() { funcCtx.translateStmt(&ast.ExprStmt{X: call}, nil) }), - }) } - var functions []*ast.FuncDecl - var vars []*types.Var - for _, file := range simplifiedFiles { - for _, decl := range file.Decls { - switch d := decl.(type) { - case *ast.FuncDecl: - sig := funcCtx.pkgCtx.Defs[d.Name].(*types.Func).Type().(*types.Signature) - if sig.Recv() == nil { - funcCtx.objectName(funcCtx.pkgCtx.Defs[d.Name].(*types.Func)) // register toplevel name - } - if !isBlank(d.Name) { - functions = append(functions, d) - } - case *ast.GenDecl: - switch d.Tok { - case token.TYPE: - for _, spec := range d.Specs { - o := funcCtx.pkgCtx.Defs[spec.(*ast.TypeSpec).Name].(*types.TypeName) - funcCtx.pkgCtx.typeNames = append(funcCtx.pkgCtx.typeNames, o) - funcCtx.objectName(o) // register toplevel name - } - case token.VAR: - for _, spec := range d.Specs { - for _, name := range spec.(*ast.ValueSpec).Names { - if !isBlank(name) { - o := funcCtx.pkgCtx.Defs[name].(*types.Var) - vars = append(vars, o) - funcCtx.objectName(o) // register toplevel name - } - } - } - case token.CONST: - // skip, constants are inlined - } - } - } + if len(rootCtx.pkgCtx.errList) != 0 { + return nil, rootCtx.pkgCtx.errList } - collectDependencies := func(f func()) []string { - funcCtx.pkgCtx.dependencies = make(map[types.Object]bool) - f() - var deps []string - for o := range funcCtx.pkgCtx.dependencies { - qualifiedName := o.Pkg().Path() + "." + o.Name() - if f, ok := o.(*types.Func); ok && f.Type().(*types.Signature).Recv() != nil { - deps = append(deps, qualifiedName+"~") - continue - } - deps = append(deps, qualifiedName) - } - sort.Strings(deps) - return deps - } + return &Archive{ + ImportPath: srcs.ImportPath, + Name: srcs.Package.Name(), + Imports: importedPaths, + Package: srcs.Package, + Declarations: allDecls, + FileSet: srcs.FileSet, + Minified: minify, + GoLinknames: srcs.GoLinknames, + }, nil +} - // variables - var varDecls []*Decl - varsWithInit := make(map[*types.Var]bool) - for _, init := range funcCtx.pkgCtx.InitOrder { - for _, o := range init.Lhs { - varsWithInit[o] = true - } - } - for _, o := range vars { - var d Decl - if !o.Exported() { - d.Vars = []string{funcCtx.objectName(o)} - } - if funcCtx.pkgCtx.HasPointer[o] && !o.Exported() { - d.Vars = append(d.Vars, funcCtx.varPtrName(o)) - } - if _, ok := varsWithInit[o]; !ok { - d.DceDeps = collectDependencies(func() { - d.InitCode = []byte(fmt.Sprintf("\t\t%s = %s;\n", funcCtx.objectName(o), funcCtx.translateExpr(funcCtx.zeroValue(o.Type())).String())) - }) - } - d.DceObjectFilter = o.Name() - varDecls = append(varDecls, &d) - } - for _, init := range funcCtx.pkgCtx.InitOrder { - lhs := make([]ast.Expr, len(init.Lhs)) - for i, o := range init.Lhs { - ident := ast.NewIdent(o.Name()) - ident.NamePos = o.Pos() - funcCtx.pkgCtx.Defs[ident] = o - lhs[i] = funcCtx.setType(ident, o.Type()) - varsWithInit[o] = true - } - var d Decl - d.DceDeps = collectDependencies(func() { - funcCtx.localVars = nil - d.InitCode = funcCtx.CatchOutput(1, func() { - funcCtx.translateStmt(&ast.AssignStmt{ - Lhs: lhs, - Tok: token.DEFINE, - Rhs: []ast.Expr{init.Rhs}, - }, nil) - }) - d.Vars = append(d.Vars, funcCtx.localVars...) - }) - if len(init.Lhs) == 1 { - if !analysis.HasSideEffect(init.Rhs, funcCtx.pkgCtx.Info.Info) { - d.DceObjectFilter = init.Lhs[0].Name() - } - } - varDecls = append(varDecls, &d) +// PrepareAllSources prepares all sources for compilation by +// parsing go linknames, type checking, sorting, simplifying, and +// performing cross package analysis. +// The results are stored in the provided sources. +// +// All sources must be given at the same time for cross package analysis to +// work correctly. For consistency, the sources should be sorted by import path. +func PrepareAllSources(allSources []*sources.Sources, importer sources.Importer, tContext *types.Context) error { + // Sort the files by name in each source to ensure consistent order of processing. + for _, srcs := range allSources { + srcs.Sort() } - // functions - var funcDecls []*Decl - var mainFunc *types.Func - for _, fun := range functions { - o := funcCtx.pkgCtx.Defs[fun.Name].(*types.Func) - - if fun.Type.TypeParams.NumFields() > 0 { - return nil, scanner.Error{ - Pos: fileSet.Position(fun.Type.TypeParams.Pos()), - Msg: fmt.Sprintf("function %s: type parameters are not supported by GopherJS: https://github.com/gopherjs/gopherjs/issues/1013", o.Name()), - } - } - funcInfo := funcCtx.pkgCtx.FuncDeclInfos[o] - d := Decl{ - FullName: o.FullName(), - Blocking: len(funcInfo.Blocking) != 0, - } - d.LinkingName = newSymName(o) - if fun.Recv == nil { - d.Vars = []string{funcCtx.objectName(o)} - d.DceObjectFilter = o.Name() - switch o.Name() { - case "main": - mainFunc = o - d.DceObjectFilter = "" - case "init": - d.InitCode = funcCtx.CatchOutput(1, func() { - id := funcCtx.newIdent("", types.NewSignatureType(nil, nil, nil, nil, nil, false)) - funcCtx.pkgCtx.Uses[id] = o - call := &ast.CallExpr{Fun: id} - if len(funcCtx.pkgCtx.FuncDeclInfos[o].Blocking) != 0 { - funcCtx.Blocking[call] = true - } - funcCtx.translateStmt(&ast.ExprStmt{X: call}, nil) - }) - d.DceObjectFilter = "" - } - } else { - recvType := o.Type().(*types.Signature).Recv().Type() - ptr, isPointer := recvType.(*types.Pointer) - namedRecvType, _ := recvType.(*types.Named) - if isPointer { - namedRecvType = ptr.Elem().(*types.Named) - } - if namedRecvType.TypeParams() != nil { - return nil, scanner.Error{ - Pos: fileSet.Position(o.Pos()), - Msg: fmt.Sprintf("type %s: type parameters are not supported by GopherJS: https://github.com/gopherjs/gopherjs/issues/1013", o.FullName()), - } - } - name := funcCtx.objectName(namedRecvType.Obj()) - d.NamedRecvType = name - d.DceObjectFilter = namedRecvType.Obj().Name() - if !fun.Name.IsExported() { - d.DceMethodFilter = o.Name() + "~" - } + // This will be performed recursively for all dependencies + // to get the packages for the sources. + // Since some packages might not be recursively reached via the root sources, + // e.g. runtime, we need to try to TypeCheck all of them here. + // Any sources that have already been type checked will no-op. + for _, srcs := range allSources { + if err := srcs.TypeCheck(importer, sizes32, tContext); err != nil { + return err } - - d.DceDeps = collectDependencies(func() { - d.DeclCode = funcCtx.translateToplevelFunction(fun, funcInfo) - }) - funcDecls = append(funcDecls, &d) - } - if typesPkg.Name() == "main" { - if mainFunc == nil { - return nil, fmt.Errorf("missing main function") - } - id := funcCtx.newIdent("", types.NewSignatureType(nil, nil, nil, nil, nil, false)) - funcCtx.pkgCtx.Uses[id] = mainFunc - call := &ast.CallExpr{Fun: id} - ifStmt := &ast.IfStmt{ - Cond: funcCtx.newIdent("$pkg === $mainPkg", types.Typ[types.Bool]), - Body: &ast.BlockStmt{ - List: []ast.Stmt{ - &ast.ExprStmt{X: call}, - &ast.AssignStmt{ - Lhs: []ast.Expr{funcCtx.newIdent("$mainFinished", types.Typ[types.Bool])}, - Tok: token.ASSIGN, - Rhs: []ast.Expr{funcCtx.newConst(types.Typ[types.Bool], constant.MakeBool(true))}, - }, - }, - }, - } - if len(funcCtx.pkgCtx.FuncDeclInfos[mainFunc].Blocking) != 0 { - funcCtx.Blocking[call] = true - funcCtx.Flattened[ifStmt] = true - } - funcDecls = append(funcDecls, &Decl{ - InitCode: funcCtx.CatchOutput(1, func() { - funcCtx.translateStmt(ifStmt, nil) - }), - }) } - // named types - var typeDecls []*Decl - for _, o := range funcCtx.pkgCtx.typeNames { - if o.IsAlias() { - continue - } - typeName := funcCtx.objectName(o) - - if named, ok := o.Type().(*types.Named); ok && named.TypeParams().Len() > 0 { - return nil, scanner.Error{ - Pos: fileSet.Position(o.Pos()), - Msg: fmt.Sprintf("type %s: type parameters are not supported by GopherJS: https://github.com/gopherjs/gopherjs/issues/1013", o.Name()), - } - } - - d := Decl{ - Vars: []string{typeName}, - DceObjectFilter: o.Name(), + // Extract all go:linkname compiler directives from the package source. + for _, srcs := range allSources { + if err := srcs.ParseGoLinknames(); err != nil { + return err } - d.DceDeps = collectDependencies(func() { - d.DeclCode = funcCtx.CatchOutput(0, func() { - typeName := funcCtx.objectName(o) - lhs := typeName - if isPkgLevel(o) { - lhs += " = $pkg." + encodeIdent(o.Name()) - } - size := int64(0) - constructor := "null" - switch t := o.Type().Underlying().(type) { - case *types.Struct: - params := make([]string, t.NumFields()) - for i := 0; i < t.NumFields(); i++ { - params[i] = fieldName(t, i) + "_" - } - constructor = fmt.Sprintf("function(%s) {\n\t\tthis.$val = this;\n\t\tif (arguments.length === 0) {\n", strings.Join(params, ", ")) - for i := 0; i < t.NumFields(); i++ { - constructor += fmt.Sprintf("\t\t\tthis.%s = %s;\n", fieldName(t, i), funcCtx.translateExpr(funcCtx.zeroValue(t.Field(i).Type())).String()) - } - constructor += "\t\t\treturn;\n\t\t}\n" - for i := 0; i < t.NumFields(); i++ { - constructor += fmt.Sprintf("\t\tthis.%[1]s = %[1]s_;\n", fieldName(t, i)) - } - constructor += "\t}" - case *types.Basic, *types.Array, *types.Slice, *types.Chan, *types.Signature, *types.Interface, *types.Pointer, *types.Map: - size = sizes32.Sizeof(t) - } - if tPointer, ok := o.Type().Underlying().(*types.Pointer); ok { - if _, ok := tPointer.Elem().Underlying().(*types.Array); ok { - // Array pointers have non-default constructors to support wrapping - // of the native objects. - constructor = "$arrayPtrCtor()" - } - } - funcCtx.Printf(`%s = $newType(%d, %s, "%s.%s", %t, "%s", %t, %s);`, lhs, size, typeKind(o.Type()), o.Pkg().Name(), o.Name(), o.Name() != "", o.Pkg().Path(), o.Exported(), constructor) - }) - d.MethodListCode = funcCtx.CatchOutput(0, func() { - named := o.Type().(*types.Named) - if _, ok := named.Underlying().(*types.Interface); ok { - return - } - var methods []string - var ptrMethods []string - for i := 0; i < named.NumMethods(); i++ { - method := named.Method(i) - name := method.Name() - if reservedKeywords[name] { - name += "$" - } - pkgPath := "" - if !method.Exported() { - pkgPath = method.Pkg().Path() - } - t := method.Type().(*types.Signature) - entry := fmt.Sprintf(`{prop: "%s", name: %s, pkg: "%s", typ: $funcType(%s)}`, name, encodeString(method.Name()), pkgPath, funcCtx.initArgs(t)) - if _, isPtr := t.Recv().Type().(*types.Pointer); isPtr { - ptrMethods = append(ptrMethods, entry) - continue - } - methods = append(methods, entry) - } - if len(methods) > 0 { - funcCtx.Printf("%s.methods = [%s];", funcCtx.typeName(named), strings.Join(methods, ", ")) - } - if len(ptrMethods) > 0 { - funcCtx.Printf("%s.methods = [%s];", funcCtx.typeName(types.NewPointer(named)), strings.Join(ptrMethods, ", ")) - } - }) - switch t := o.Type().Underlying().(type) { - case *types.Array, *types.Chan, *types.Interface, *types.Map, *types.Pointer, *types.Slice, *types.Signature, *types.Struct: - d.TypeInitCode = funcCtx.CatchOutput(0, func() { - funcCtx.Printf("%s.init(%s);", funcCtx.objectName(o), funcCtx.initArgs(t)) - }) - } - }) - typeDecls = append(typeDecls, &d) } - // anonymous types - for _, t := range funcCtx.pkgCtx.anonTypes { - d := Decl{ - Vars: []string{t.Name()}, - DceObjectFilter: t.Name(), - } - d.DceDeps = collectDependencies(func() { - d.DeclCode = []byte(fmt.Sprintf("\t%s = $%sType(%s);\n", t.Name(), strings.ToLower(typeKind(t.Type())[5:]), funcCtx.initArgs(t.Type()))) - }) - typeDecls = append(typeDecls, &d) + // Simply the source files. + for _, srcs := range allSources { + srcs.Simplify() } - var allDecls []*Decl - for _, d := range append(append(append(importDecls, typeDecls...), varDecls...), funcDecls...) { - d.DeclCode = removeWhitespace(d.DeclCode, minify) - d.MethodListCode = removeWhitespace(d.MethodListCode, minify) - d.TypeInitCode = removeWhitespace(d.TypeInitCode, minify) - d.InitCode = removeWhitespace(d.InitCode, minify) - allDecls = append(allDecls, d) + // Collect all the generic type instances from all the packages. + // This must be done for all sources prior to any analysis. + instances := &typeparams.PackageInstanceSets{} + for _, srcs := range allSources { + srcs.CollectInstances(tContext, instances) } - if len(funcCtx.pkgCtx.errList) != 0 { - return nil, funcCtx.pkgCtx.errList + // Analyze the package to determine type parameters instances, blocking, + // and other type information. This will not populate the information. + for _, srcs := range allSources { + srcs.Analyze(importer, tContext, instances) } - return &Archive{ - ImportPath: importPath, - Name: typesPkg.Name(), - Imports: importedPaths, - ExportData: exportData.Bytes(), - Declarations: allDecls, - FileSet: encodedFileSet.Bytes(), - Minified: minify, - GoLinknames: goLinknames, - BuildTime: time.Now(), - }, nil + // Propagate the analysis information across all packages. + allInfo := make([]*analysis.Info, len(allSources)) + for i, src := range allSources { + allInfo[i] = src.TypeInfo + } + analysis.PropagateAnalysis(allInfo) + return nil } func (fc *funcContext) initArgs(ty types.Type) string { @@ -658,232 +323,20 @@ func (fc *funcContext) initArgs(ty types.Type) string { if !field.Exported() { pkgPath = field.Pkg().Path() } - fields[i] = fmt.Sprintf(`{prop: "%s", name: %s, embedded: %t, exported: %t, typ: %s, tag: %s}`, fieldName(t, i), encodeString(field.Name()), field.Anonymous(), field.Exported(), fc.typeName(field.Type()), encodeString(t.Tag(i))) + ft := fc.fieldType(t, i) + fields[i] = fmt.Sprintf(`{prop: "%s", name: %s, embedded: %t, exported: %t, typ: %s, tag: %s}`, + fieldName(t, i), encodeString(field.Name()), field.Anonymous(), field.Exported(), fc.typeName(ft), encodeString(t.Tag(i))) } return fmt.Sprintf(`"%s", [%s]`, pkgPath, strings.Join(fields, ", ")) case *types.TypeParam: - err := bailout(fmt.Errorf(`%v has unexpected generic type parameter %T`, ty, ty)) + tr := fc.typeResolver.Substitute(ty) + if tr != ty { + return fc.initArgs(tr) + } + err := bailout(fmt.Errorf(`"%v" has unexpected generic type parameter %T`, ty, ty)) panic(err) default: err := bailout(fmt.Errorf("%v has unexpected type %T", ty, ty)) panic(err) } } - -func (fc *funcContext) translateToplevelFunction(fun *ast.FuncDecl, info *analysis.FuncInfo) []byte { - o := fc.pkgCtx.Defs[fun.Name].(*types.Func) - sig := o.Type().(*types.Signature) - var recv *ast.Ident - if fun.Recv != nil && fun.Recv.List[0].Names != nil { - recv = fun.Recv.List[0].Names[0] - } - - var joinedParams string - primaryFunction := func(funcRef string) []byte { - if fun.Body == nil { - return []byte(fmt.Sprintf("\t%s = function() {\n\t\t$throwRuntimeError(\"native function not implemented: %s\");\n\t};\n", funcRef, o.FullName())) - } - - params, fun := translateFunction(fun.Type, recv, fun.Body, fc, sig, info, funcRef) - joinedParams = strings.Join(params, ", ") - return []byte(fmt.Sprintf("\t%s = %s;\n", funcRef, fun)) - } - - code := bytes.NewBuffer(nil) - - if fun.Recv == nil { - funcRef := fc.objectName(o) - code.Write(primaryFunction(funcRef)) - if fun.Name.IsExported() { - fmt.Fprintf(code, "\t$pkg.%s = %s;\n", encodeIdent(fun.Name.Name), funcRef) - } - return code.Bytes() - } - - recvType := sig.Recv().Type() - ptr, isPointer := recvType.(*types.Pointer) - namedRecvType, _ := recvType.(*types.Named) - if isPointer { - namedRecvType = ptr.Elem().(*types.Named) - } - typeName := fc.objectName(namedRecvType.Obj()) - funName := fun.Name.Name - if reservedKeywords[funName] { - funName += "$" - } - - if _, isStruct := namedRecvType.Underlying().(*types.Struct); isStruct { - code.Write(primaryFunction(typeName + ".ptr.prototype." + funName)) - fmt.Fprintf(code, "\t%s.prototype.%s = function(%s) { return this.$val.%s(%s); };\n", typeName, funName, joinedParams, funName, joinedParams) - return code.Bytes() - } - - if isPointer { - if _, isArray := ptr.Elem().Underlying().(*types.Array); isArray { - code.Write(primaryFunction(typeName + ".prototype." + funName)) - fmt.Fprintf(code, "\t$ptrType(%s).prototype.%s = function(%s) { return (new %s(this.$get())).%s(%s); };\n", typeName, funName, joinedParams, typeName, funName, joinedParams) - return code.Bytes() - } - return primaryFunction(fmt.Sprintf("$ptrType(%s).prototype.%s", typeName, funName)) - } - - value := "this.$get()" - if isWrapped(recvType) { - value = fmt.Sprintf("new %s(%s)", typeName, value) - } - code.Write(primaryFunction(typeName + ".prototype." + funName)) - fmt.Fprintf(code, "\t$ptrType(%s).prototype.%s = function(%s) { return %s.%s(%s); };\n", typeName, funName, joinedParams, value, funName, joinedParams) - return code.Bytes() -} - -func translateFunction(typ *ast.FuncType, recv *ast.Ident, body *ast.BlockStmt, outerContext *funcContext, sig *types.Signature, info *analysis.FuncInfo, funcRef string) ([]string, string) { - if info == nil { - panic("nil info") - } - - c := &funcContext{ - FuncInfo: info, - pkgCtx: outerContext.pkgCtx, - parent: outerContext, - sig: sig, - allVars: make(map[string]int, len(outerContext.allVars)), - localVars: []string{}, - flowDatas: map[*types.Label]*flowData{nil: {}}, - caseCounter: 1, - labelCases: make(map[*types.Label]int), - } - for k, v := range outerContext.allVars { - c.allVars[k] = v - } - prevEV := c.pkgCtx.escapingVars - - var params []string - for _, param := range typ.Params.List { - if len(param.Names) == 0 { - params = append(params, c.newVariable("param")) - continue - } - for _, ident := range param.Names { - if isBlank(ident) { - params = append(params, c.newVariable("param")) - continue - } - params = append(params, c.objectName(c.pkgCtx.Defs[ident])) - } - } - - bodyOutput := string(c.CatchOutput(1, func() { - if len(c.Blocking) != 0 { - c.pkgCtx.Scopes[body] = c.pkgCtx.Scopes[typ] - c.handleEscapingVars(body) - } - - if c.sig != nil && c.sig.Results().Len() != 0 && c.sig.Results().At(0).Name() != "" { - c.resultNames = make([]ast.Expr, c.sig.Results().Len()) - for i := 0; i < c.sig.Results().Len(); i++ { - result := c.sig.Results().At(i) - c.Printf("%s = %s;", c.objectName(result), c.translateExpr(c.zeroValue(result.Type())).String()) - id := ast.NewIdent("") - c.pkgCtx.Uses[id] = result - c.resultNames[i] = c.setType(id, result.Type()) - } - } - - if recv != nil && !isBlank(recv) { - this := "this" - if isWrapped(c.pkgCtx.TypeOf(recv)) { - this = "this.$val" // Unwrap receiver value. - } - c.Printf("%s = %s;", c.translateExpr(recv), this) - } - - c.translateStmtList(body.List) - if len(c.Flattened) != 0 && !astutil.EndsWithReturn(body.List) { - c.translateStmt(&ast.ReturnStmt{}, nil) - } - })) - - sort.Strings(c.localVars) - - var prefix, suffix, functionName string - - if len(c.Flattened) != 0 { - c.localVars = append(c.localVars, "$s") - prefix = prefix + " $s = $s || 0;" - } - - if c.HasDefer { - c.localVars = append(c.localVars, "$deferred") - suffix = " }" + suffix - if len(c.Blocking) != 0 { - suffix = " }" + suffix - } - } - - localVarDefs := "" // Function-local var declaration at the top. - - if len(c.Blocking) != 0 { - if funcRef == "" { - funcRef = "$b" - functionName = " $b" - } - - localVars := append([]string{}, c.localVars...) - // There are several special variables involved in handling blocking functions: - // $r is sometimes used as a temporary variable to store blocking call result. - // $c indicates that a function is being resumed after a blocking call when set to true. - // $f is an object used to save and restore function context for blocking calls. - localVars = append(localVars, "$r") - // If a blocking function is being resumed, initialize local variables from the saved context. - localVarDefs = fmt.Sprintf("var {%s, $c} = $restore(this, {%s});\n", strings.Join(localVars, ", "), strings.Join(params, ", ")) - // If the function gets blocked, save local variables for future. - saveContext := fmt.Sprintf("var $f = {$blk: "+funcRef+", $c: true, $r, %s};", strings.Join(c.localVars, ", ")) - - suffix = " " + saveContext + "return $f;" + suffix - } else if len(c.localVars) > 0 { - // Non-blocking functions simply declare local variables with no need for restore support. - localVarDefs = fmt.Sprintf("var %s;\n", strings.Join(c.localVars, ", ")) - } - - if c.HasDefer { - prefix = prefix + " var $err = null; try {" - deferSuffix := " } catch(err) { $err = err;" - if len(c.Blocking) != 0 { - deferSuffix += " $s = -1;" - } - if c.resultNames == nil && c.sig.Results().Len() > 0 { - deferSuffix += fmt.Sprintf(" return%s;", c.translateResults(nil)) - } - deferSuffix += " } finally { $callDeferred($deferred, $err);" - if c.resultNames != nil { - deferSuffix += fmt.Sprintf(" if (!$curGoroutine.asleep) { return %s; }", c.translateResults(c.resultNames)) - } - if len(c.Blocking) != 0 { - deferSuffix += " if($curGoroutine.asleep) {" - } - suffix = deferSuffix + suffix - } - - if len(c.Flattened) != 0 { - prefix = prefix + " s: while (true) { switch ($s) { case 0:" - suffix = " } return; }" + suffix - } - - if c.HasDefer { - prefix = prefix + " $deferred = []; $curGoroutine.deferStack.push($deferred);" - } - - if prefix != "" { - bodyOutput = strings.Repeat("\t", c.pkgCtx.indentation+1) + "/* */" + prefix + "\n" + bodyOutput - } - if suffix != "" { - bodyOutput = bodyOutput + strings.Repeat("\t", c.pkgCtx.indentation+1) + "/* */" + suffix + "\n" - } - if localVarDefs != "" { - bodyOutput = strings.Repeat("\t", c.pkgCtx.indentation+1) + localVarDefs + bodyOutput - } - - c.pkgCtx.escapingVars = prevEV - - return params, fmt.Sprintf("function%s(%s) {\n%s%s}", functionName, strings.Join(params, ", "), bodyOutput, strings.Repeat("\t", c.pkgCtx.indentation)) -} diff --git a/compiler/prelude/jsmapping.js b/compiler/prelude/jsmapping.js index b22454bc3..f5317d626 100644 --- a/compiler/prelude/jsmapping.js +++ b/compiler/prelude/jsmapping.js @@ -236,6 +236,9 @@ var $internalize = (v, t, recv, seen, makeWrapper) => { case $kindFloat64: return parseFloat(v); case $kindArray: + if (v === null || v === undefined) { + $throwRuntimeError("cannot internalize "+v+" as a "+t.string); + } if (v.length !== t.len) { $throwRuntimeError("got array with wrong size from JavaScript native"); } @@ -331,6 +334,9 @@ var $internalize = (v, t, recv, seen, makeWrapper) => { return $internalize(v, t.elem, makeWrapper); } case $kindSlice: + if (v == null) { + return t.zero(); + } return new t($mapArray(v, e => { return $internalize(e, t.elem, makeWrapper); })); case $kindString: v = String(v); diff --git a/compiler/prelude/types.js b/compiler/prelude/types.js index 61475454e..9570b2fed 100644 --- a/compiler/prelude/types.js +++ b/compiler/prelude/types.js @@ -59,7 +59,7 @@ var $idKey = x => { }; // Creates constructor functions for array pointer types. Returns a new function -// instace each time to make sure each type is independent of the other. +// instance each time to make sure each type is independent of the other. var $arrayPtrCtor = () => { return function (array) { this.$get = () => { return array; }; diff --git a/compiler/sources/sources.go b/compiler/sources/sources.go new file mode 100644 index 000000000..8e2d12946 --- /dev/null +++ b/compiler/sources/sources.go @@ -0,0 +1,284 @@ +package sources + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + "sort" + "strings" + + "github.com/neelance/astrewrite" + + "github.com/gopherjs/gopherjs/compiler/internal/analysis" + "github.com/gopherjs/gopherjs/compiler/internal/typeparams" + "github.com/gopherjs/gopherjs/compiler/jsFile" + "github.com/gopherjs/gopherjs/compiler/linkname" + "github.com/gopherjs/gopherjs/internal/errorList" + "github.com/gopherjs/gopherjs/internal/experiments" +) + +// Sources is a slice of parsed Go sources and additional data for a package. +// +// Note that the sources would normally belong to a single logical Go package, +// but they don't have to be a real Go package (i.e. found on the file system) +// or represent a complete package (i.e. it could be only a few source files +// compiled by `gopherjs build foo.go bar.go`). +type Sources struct { + // ImportPath representing the sources, if exists. + // + // May be empty for "virtual" + // packages like testmain or playground-generated package. + // Otherwise this must be the absolute import path for a package. + ImportPath string + + // Dir is the directory containing package sources + Dir string + + // Files is the parsed and augmented Go AST files for the package. + Files []*ast.File + + // FileSet is the file set for the parsed files. + FileSet *token.FileSet + + // JSFiles is the JavaScript files that are part of the package. + JSFiles []jsFile.JSFile + + // TypeInfo is the type information this package. + // This is nil until set by Analyze. + TypeInfo *analysis.Info + + // baseInfo is the base type information this package. + // This is nil until set by TypeCheck. + baseInfo *types.Info + + // Package is the types package for these source files. + // This is nil until set by TypeCheck. + Package *types.Package + + // GoLinknames is the set of Go linknames for this package. + // This is nil until set by ParseGoLinknames. + GoLinknames []linkname.GoLinkname +} + +type Importer func(path, srcDir string) (*Sources, error) + +// sort the Go files slice by the original source name to ensure consistent order +// of processing. This is required for reproducible JavaScript output. +// +// Note this function mutates the original Files slice. +func (s *Sources) Sort() { + sort.Slice(s.Files, func(i, j int) bool { + return s.getFileName(s.Files[i]) > s.getFileName(s.Files[j]) + }) +} + +func (s *Sources) getFileName(file *ast.File) string { + return s.FileSet.File(file.Pos()).Name() +} + +// Simplify processed each Files entry with astrewrite.Simplify. +// +// Note this function mutates the original Files slice. +// This must be called after TypeCheck and before analyze since +// this will change the pointers in the AST. For example, the pointers +// to function literals will change, making it impossible to find them +// in the type information, if analyze is called first. +func (s *Sources) Simplify() { + for i, file := range s.Files { + s.Files[i] = astrewrite.Simplify(file, s.baseInfo, false) + } +} + +// TypeCheck the sources. Returns information about declared package types and +// type information for the supplied AST. +// This will set the Package field on the Sources. +// +// If the Package field is not nil, e.g. this function has already been run, +// this will be a no-op. +// +// This must be called prior to simplify to get the types.Info used by simplify. +func (s *Sources) TypeCheck(importer Importer, sizes types.Sizes, tContext *types.Context) error { + if s.Package != nil && s.baseInfo != nil { + // type checking has already been done so return early. + return nil + } + + const errLimit = 10 // Max number of type checking errors to return. + + typesInfo := &types.Info{ + Types: make(map[ast.Expr]types.TypeAndValue), + Defs: make(map[*ast.Ident]types.Object), + Uses: make(map[*ast.Ident]types.Object), + Implicits: make(map[ast.Node]types.Object), + Selections: make(map[*ast.SelectorExpr]*types.Selection), + Scopes: make(map[ast.Node]*types.Scope), + Instances: make(map[*ast.Ident]types.Instance), + } + + var typeErrs errorList.ErrorList + + pkgImporter := &packageImporter{ + srcDir: s.Dir, + importer: importer, + sizes: sizes, + tContext: tContext, + } + + config := &types.Config{ + Context: tContext, + Importer: pkgImporter, + Sizes: sizes, + Error: func(err error) { typeErrs = typeErrs.AppendDistinct(err) }, + } + typesPkg, err := config.Check(s.ImportPath, s.FileSet, s.Files, typesInfo) + // If we encountered any import errors, it is likely that the other type errors + // are not meaningful and would be resolved by fixing imports. Return them + // separately, if any. https://github.com/gopherjs/gopherjs/issues/119. + if pkgImporter.Errors.ErrOrNil() != nil { + return pkgImporter.Errors.Trim(errLimit).ErrOrNil() + } + // Return any other type errors. + if typeErrs.ErrOrNil() != nil { + return typeErrs.Trim(errLimit).ErrOrNil() + } + // Any general errors that may have occurred during type checking. + if err != nil { + return err + } + + // If generics are not enabled, ensure the package does not requires generics support. + if !experiments.Env.Generics { + if genErr := typeparams.RequiresGenericsSupport(typesInfo); genErr != nil { + return fmt.Errorf("some packages requires generics support (https://github.com/gopherjs/gopherjs/issues/1013): %w", genErr) + } + } + + s.baseInfo = typesInfo + s.Package = typesPkg + return nil +} + +// CollectInstances will determine the type parameters instances for the package. +// +// This must be called before Analyze to have the type parameters instances +// needed during analysis. +func (s *Sources) CollectInstances(tContext *types.Context, instances *typeparams.PackageInstanceSets) { + tc := typeparams.Collector{ + TContext: tContext, + Info: s.baseInfo, + Instances: instances, + } + tc.Scan(s.Package, s.Files...) +} + +// Analyze will determine the type parameters instances, blocking, +// and other type information for the package. +// This will set the TypeInfo and Instances fields on the Sources. +// +// This must be called after to simplify to ensure the pointers +// in the AST are still valid. +// The instances must be collected prior to this call. +// +// Note that at the end of this call the analysis information +// has NOT been propagated across packages yet. +func (s *Sources) Analyze(importer Importer, tContext *types.Context, instances *typeparams.PackageInstanceSets) { + infoImporter := func(path string) (*analysis.Info, error) { + srcs, err := importer(path, s.Dir) + if err != nil { + return nil, err + } + return srcs.TypeInfo, nil + } + s.TypeInfo = analysis.AnalyzePkg(s.Files, s.FileSet, s.baseInfo, tContext, s.Package, instances, infoImporter) +} + +// ParseGoLinknames extracts all //go:linkname compiler directive from the sources. +// +// This will set the GoLinknames field on the Sources. +func (s *Sources) ParseGoLinknames() error { + goLinknames := []linkname.GoLinkname{} + var errs errorList.ErrorList + for _, file := range s.Files { + found, err := linkname.ParseGoLinknames(s.FileSet, s.ImportPath, file) + errs = errs.Append(err) + goLinknames = append(goLinknames, found...) + } + if err := errs.ErrOrNil(); err != nil { + return err + } + s.GoLinknames = goLinknames + return nil +} + +// UnresolvedImports calculates the import paths of the package's dependencies +// based on all the imports in the augmented Go AST files. +// +// This is used to determine the unresolved imports that weren't in the +// PackageData.Imports slice since they were added during augmentation or +// during template generation. +// +// The given skip paths (typically those imports from PackageData.Imports) +// will not be returned in the results. +// This will not return any `*_test` packages in the results. +func (s *Sources) UnresolvedImports(skip ...string) []string { + seen := make(map[string]struct{}) + for _, sk := range skip { + seen[sk] = struct{}{} + } + imports := []string{} + for _, file := range s.Files { + for _, imp := range file.Imports { + path := strings.Trim(imp.Path.Value, `"`) + if _, ok := seen[path]; !ok { + if !strings.HasSuffix(path, "_test") { + imports = append(imports, path) + } + seen[path] = struct{}{} + } + } + } + sort.Strings(imports) + return imports +} + +// packageImporter implements go/types.Importer interface and +// wraps it to collect import errors. +type packageImporter struct { + srcDir string + importer Importer + sizes types.Sizes + tContext *types.Context + Errors errorList.ErrorList +} + +func (pi *packageImporter) Import(path string) (*types.Package, error) { + if path == "unsafe" { + return types.Unsafe, nil + } + + srcs, err := pi.importer(path, pi.srcDir) + if err != nil { + pi.Errors = pi.Errors.AppendDistinct(err) + return nil, err + } + + // If the sources doesn't have the package determined yet, get it now, + // otherwise this will be a no-op. + // This will recursively get the packages for all of it's dependencies too. + err = srcs.TypeCheck(pi.importer, pi.sizes, pi.tContext) + if err != nil { + pi.Errors = pi.Errors.AppendDistinct(err) + return nil, err + } + + return srcs.Package, nil +} + +// SortedSourcesSlice in place sorts the given slice of Sources by ImportPath. +// This will not change the order of the files within any Sources. +func SortedSourcesSlice(sourcesSlice []*Sources) { + sort.Slice(sourcesSlice, func(i, j int) bool { + return sourcesSlice[i].ImportPath < sourcesSlice[j].ImportPath + }) +} diff --git a/compiler/statements.go b/compiler/statements.go index 8518f9b71..17ed8b746 100644 --- a/compiler/statements.go +++ b/compiler/statements.go @@ -9,9 +9,9 @@ import ( "go/types" "strings" - "github.com/gopherjs/gopherjs/compiler/analysis" "github.com/gopherjs/gopherjs/compiler/astutil" "github.com/gopherjs/gopherjs/compiler/filter" + "github.com/gopherjs/gopherjs/compiler/internal/analysis" "github.com/gopherjs/gopherjs/compiler/typesutil" ) @@ -100,7 +100,7 @@ func (fc *funcContext) translateStmt(stmt ast.Stmt, label *types.Label) { data.endCase = fc.caseCounter fc.caseCounter++ - fc.Indent(func() { + fc.Indented(func() { fc.translateStmtList(clause.Body) }) fc.Printf("case %d:", data.endCase) @@ -112,7 +112,7 @@ func (fc *funcContext) translateStmt(stmt ast.Stmt, label *types.Label) { fc.Printf("%s:", label.Name()) } fc.Printf("switch (0) { default:") - fc.Indent(func() { + fc.Indented(func() { fc.translateStmtList(clause.Body) }) fc.Printf("}") @@ -125,7 +125,7 @@ func (fc *funcContext) translateStmt(stmt ast.Stmt, label *types.Label) { if s.Init != nil { fc.translateStmt(s.Init, nil) } - refVar := fc.newVariable("_ref") + refVar := fc.newLocalVariable("_ref") var expr ast.Expr switch a := s.Assign.(type) { case *ast.AssignStmt: @@ -135,10 +135,10 @@ func (fc *funcContext) translateStmt(stmt ast.Stmt, label *types.Label) { } fc.Printf("%s = %s;", refVar, fc.translateExpr(expr)) translateCond := func(cond ast.Expr) *expression { - if types.Identical(fc.pkgCtx.TypeOf(cond), types.Typ[types.UntypedNil]) { + if types.Identical(fc.typeOf(cond), types.Typ[types.UntypedNil]) { return fc.formatExpr("%s === $ifaceNil", refVar) } - return fc.formatExpr("$assertType(%s, %s, true)[1]", refVar, fc.typeName(fc.pkgCtx.TypeOf(cond))) + return fc.formatExpr("$assertType(%s, %s, true)[1]", refVar, fc.typeName(fc.typeOf(cond))) } var caseClauses []*ast.CaseClause var defaultClause *ast.CaseClause @@ -146,16 +146,17 @@ func (fc *funcContext) translateStmt(stmt ast.Stmt, label *types.Label) { clause := cc.(*ast.CaseClause) var bodyPrefix []ast.Stmt if implicit := fc.pkgCtx.Implicits[clause]; implicit != nil { + typ := fc.typeResolver.Substitute(implicit.Type()) value := refVar - if typesutil.IsJsObject(implicit.Type().Underlying()) { + if typesutil.IsJsObject(typ.Underlying()) { value += ".$val.object" - } else if _, ok := implicit.Type().Underlying().(*types.Interface); !ok { + } else if _, ok := typ.Underlying().(*types.Interface); !ok { value += ".$val" } bodyPrefix = []ast.Stmt{&ast.AssignStmt{ - Lhs: []ast.Expr{fc.newIdent(fc.objectName(implicit), implicit.Type())}, + Lhs: []ast.Expr{fc.newIdent(fc.objectName(implicit), typ)}, Tok: token.DEFINE, - Rhs: []ast.Expr{fc.newIdent(value, implicit.Type())}, + Rhs: []ast.Expr{fc.newIdent(value, typ)}, }} } c := &ast.CaseClause{ @@ -187,14 +188,14 @@ func (fc *funcContext) translateStmt(stmt ast.Stmt, label *types.Label) { }, label, fc.Flattened[s]) case *ast.RangeStmt: - refVar := fc.newVariable("_ref") + refVar := fc.newLocalVariable("_ref") fc.Printf("%s = %s;", refVar, fc.translateExpr(s.X)) - switch t := fc.pkgCtx.TypeOf(s.X).Underlying().(type) { + switch t := fc.typeOf(s.X).Underlying().(type) { case *types.Basic: - iVar := fc.newVariable("_i") + iVar := fc.newLocalVariable("_i") fc.Printf("%s = 0;", iVar) - runeVar := fc.newVariable("_rune") + runeVar := fc.newLocalVariable("_rune") fc.translateLoopingStmt(func() string { return iVar + " < " + refVar + ".length" }, s.Body, func() { fc.Printf("%s = $decodeRune(%s, %s);", runeVar, refVar, iVar) if !isBlank(s.Key) { @@ -208,16 +209,16 @@ func (fc *funcContext) translateStmt(stmt ast.Stmt, label *types.Label) { }, label, fc.Flattened[s]) case *types.Map: - iVar := fc.newVariable("_i") + iVar := fc.newLocalVariable("_i") fc.Printf("%s = 0;", iVar) - keysVar := fc.newVariable("_keys") + keysVar := fc.newLocalVariable("_keys") fc.Printf("%s = %s ? %s.keys() : undefined;", keysVar, refVar, refVar) - sizeVar := fc.newVariable("_size") + sizeVar := fc.newLocalVariable("_size") fc.Printf("%s = %s ? %s.size : 0;", sizeVar, refVar, refVar) fc.translateLoopingStmt(func() string { return iVar + " < " + sizeVar }, s.Body, func() { - keyVar := fc.newVariable("_key") - entryVar := fc.newVariable("_entry") + keyVar := fc.newLocalVariable("_key") + entryVar := fc.newLocalVariable("_entry") fc.Printf("%s = %s.next().value;", keyVar, keysVar) fc.Printf("%s = %s.get(%s);", entryVar, refVar, keyVar) fc.translateStmt(&ast.IfStmt{ @@ -248,7 +249,7 @@ func (fc *funcContext) translateStmt(stmt ast.Stmt, label *types.Label) { length = refVar + ".$length" elemType = t2.Elem() } - iVar := fc.newVariable("_i") + iVar := fc.newLocalVariable("_i") fc.Printf("%s = 0;", iVar) fc.translateLoopingStmt(func() string { return iVar + " < " + length }, s.Body, func() { if !isBlank(s.Key) { @@ -265,7 +266,7 @@ func (fc *funcContext) translateStmt(stmt ast.Stmt, label *types.Label) { }, label, fc.Flattened[s]) case *types.Chan: - okVar := fc.newIdent(fc.newVariable("_ok"), types.Typ[types.Bool]) + okVar := fc.newIdent(fc.newLocalVariable("_ok"), types.Typ[types.Bool]) key := s.Key tok := s.Tok if key == nil { @@ -354,7 +355,7 @@ func (fc *funcContext) translateStmt(stmt ast.Stmt, label *types.Label) { if rVal != "" { // If returned expression is non empty, evaluate and store it in a // variable to avoid double-execution in case a deferred function blocks. - rVar := fc.newVariable("$r") + rVar := fc.newLocalVariable("$r") fc.Printf("%s =%s;", rVar, rVal) rVal = " " + rVar } @@ -380,15 +381,15 @@ func (fc *funcContext) translateStmt(stmt ast.Stmt, label *types.Label) { case len(s.Lhs) == 1 && len(s.Rhs) == 1: lhs := astutil.RemoveParens(s.Lhs[0]) if isBlank(lhs) { - fc.Printf("$unused(%s);", fc.translateImplicitConversion(s.Rhs[0], fc.pkgCtx.TypeOf(s.Lhs[0]))) + fc.Printf("$unused(%s);", fc.translateImplicitConversion(s.Rhs[0], fc.typeOf(s.Lhs[0]))) return } fc.Printf("%s", fc.translateAssign(lhs, s.Rhs[0], s.Tok == token.DEFINE)) case len(s.Lhs) > 1 && len(s.Rhs) == 1: - tupleVar := fc.newVariable("_tuple") + tupleVar := fc.newLocalVariable("_tuple") fc.Printf("%s = %s;", tupleVar, fc.translateExpr(s.Rhs[0])) - tuple := fc.pkgCtx.TypeOf(s.Rhs[0]).(*types.Tuple) + tuple := fc.typeOf(s.Rhs[0]).(*types.Tuple) for i, lhs := range s.Lhs { lhs = astutil.RemoveParens(lhs) if !isBlank(lhs) { @@ -398,17 +399,17 @@ func (fc *funcContext) translateStmt(stmt ast.Stmt, label *types.Label) { case len(s.Lhs) == len(s.Rhs): tmpVars := make([]string, len(s.Rhs)) for i, rhs := range s.Rhs { - tmpVars[i] = fc.newVariable("_tmp") + tmpVars[i] = fc.newLocalVariable("_tmp") if isBlank(astutil.RemoveParens(s.Lhs[i])) { fc.Printf("$unused(%s);", fc.translateExpr(rhs)) continue } - fc.Printf("%s", fc.translateAssign(fc.newIdent(tmpVars[i], fc.pkgCtx.TypeOf(s.Lhs[i])), rhs, true)) + fc.Printf("%s", fc.translateAssign(fc.newIdent(tmpVars[i], fc.typeOf(s.Lhs[i])), rhs, true)) } for i, lhs := range s.Lhs { lhs = astutil.RemoveParens(lhs) if !isBlank(lhs) { - fc.Printf("%s", fc.translateAssign(lhs, fc.newIdent(tmpVars[i], fc.pkgCtx.TypeOf(lhs)), s.Tok == token.DEFINE)) + fc.Printf("%s", fc.translateAssign(lhs, fc.newIdent(tmpVars[i], fc.typeOf(lhs)), s.Tok == token.DEFINE)) } } @@ -431,7 +432,7 @@ func (fc *funcContext) translateStmt(stmt ast.Stmt, label *types.Label) { if len(rhs) == 0 { rhs = make([]ast.Expr, len(lhs)) for i, e := range lhs { - rhs[i] = fc.zeroValue(fc.pkgCtx.TypeOf(e)) + rhs[i] = fc.zeroValue(fc.typeOf(e)) } } fc.translateStmt(&ast.AssignStmt{ @@ -442,10 +443,10 @@ func (fc *funcContext) translateStmt(stmt ast.Stmt, label *types.Label) { } case token.TYPE: for _, spec := range decl.Specs { - o := fc.pkgCtx.Defs[spec.(*ast.TypeSpec).Name].(*types.TypeName) - fc.pkgCtx.typeNames = append(fc.pkgCtx.typeNames, o) - fc.pkgCtx.objectNames[o] = fc.newVariableWithLevel(o.Name(), true) - fc.pkgCtx.dependencies[o] = true + id := spec.(*ast.TypeSpec).Name + o := fc.pkgCtx.Defs[id].(*types.TypeName) + fc.pkgCtx.typeNames.Add(o) + fc.pkgCtx.DeclareDCEDep(o) } case token.CONST: // skip, constants are inlined @@ -469,7 +470,7 @@ func (fc *funcContext) translateStmt(stmt ast.Stmt, label *types.Label) { fc.Printf("$go(%s, %s);", callable, arglist) case *ast.SendStmt: - chanType := fc.pkgCtx.TypeOf(s.Chan).Underlying().(*types.Chan) + chanType := fc.typeOf(s.Chan).Underlying().(*types.Chan) call := &ast.CallExpr{ Fun: fc.newIdent("$send", types.NewSignatureType(nil, nil, nil, types.NewTuple(types.NewVar(0, nil, "", chanType), types.NewVar(0, nil, "", chanType.Elem())), nil, false)), Args: []ast.Expr{s.Chan, fc.newIdent(fc.translateImplicitConversionWithCloning(s.Value, chanType.Elem()).String(), chanType.Elem())}, @@ -478,7 +479,7 @@ func (fc *funcContext) translateStmt(stmt ast.Stmt, label *types.Label) { fc.translateStmt(&ast.ExprStmt{X: call}, label) case *ast.SelectStmt: - selectionVar := fc.newVariable("_selection") + selectionVar := fc.newLocalVariable("_selection") var channels []string var caseClauses []*ast.CaseClause flattened := false @@ -494,7 +495,7 @@ func (fc *funcContext) translateStmt(stmt ast.Stmt, label *types.Label) { case *ast.AssignStmt: channels = append(channels, fc.formatExpr("[%e]", astutil.RemoveParens(comm.Rhs[0]).(*ast.UnaryExpr).X).String()) case *ast.SendStmt: - chanType := fc.pkgCtx.TypeOf(comm.Chan).Underlying().(*types.Chan) + chanType := fc.typeOf(comm.Chan).Underlying().(*types.Chan) channels = append(channels, fc.formatExpr("[%e, %s]", comm.Chan, fc.translateImplicitConversionWithCloning(comm.Value, chanType.Elem())).String()) default: panic(fmt.Sprintf("unhandled: %T", comm)) @@ -505,7 +506,7 @@ func (fc *funcContext) translateStmt(stmt ast.Stmt, label *types.Label) { var bodyPrefix []ast.Stmt if assign, ok := clause.Comm.(*ast.AssignStmt); ok { - switch rhsType := fc.pkgCtx.TypeOf(assign.Rhs[0]).(type) { + switch rhsType := fc.typeOf(assign.Rhs[0]).(type) { case *types.Tuple: bodyPrefix = []ast.Stmt{&ast.AssignStmt{Lhs: assign.Lhs, Rhs: []ast.Expr{fc.newIdent(selectionVar+"[1]", rhsType)}, Tok: assign.Tok}} default: @@ -614,7 +615,7 @@ func (fc *funcContext) translateBranchingStmt(caseClauses []*ast.CaseClause, def for i, clause := range caseClauses { fc.SetPos(clause.Pos()) fc.PrintCond(!flatten, fmt.Sprintf("%sif (%s) {", prefix, condStrs[i]), fmt.Sprintf("case %d:", caseOffset+i)) - fc.Indent(func() { + fc.Indented(func() { fc.translateStmtList(clause.Body) if flatten && (i < len(caseClauses)-1 || defaultClause != nil) && !astutil.EndsWithReturn(clause.Body) { fc.Printf("$s = %d; continue;", endCase) @@ -625,7 +626,7 @@ func (fc *funcContext) translateBranchingStmt(caseClauses []*ast.CaseClause, def if defaultClause != nil { fc.PrintCond(!flatten, prefix+"{", fmt.Sprintf("case %d:", caseOffset+len(caseClauses))) - fc.Indent(func() { + fc.Indented(func() { fc.translateStmtList(defaultClause.Body) }) } @@ -655,7 +656,7 @@ func (fc *funcContext) translateLoopingStmt(cond func() string, body *ast.BlockS } isTerminated := false fc.PrintCond(!flatten, "while (true) {", fmt.Sprintf("case %d:", data.beginCase)) - fc.Indent(func() { + fc.Indented(func() { condStr := cond() if condStr != "true" { fc.PrintCond(!flatten, fmt.Sprintf("if (!(%s)) { break; }", condStr), fmt.Sprintf("if(!(%s)) { $s = %d; continue; }", condStr, data.endCase)) @@ -700,11 +701,11 @@ func (fc *funcContext) translateAssign(lhs, rhs ast.Expr, define bool) string { } if l, ok := lhs.(*ast.IndexExpr); ok { - if t, ok := fc.pkgCtx.TypeOf(l.X).Underlying().(*types.Map); ok { - if typesutil.IsJsObject(fc.pkgCtx.TypeOf(l.Index)) { + if t, ok := fc.typeOf(l.X).Underlying().(*types.Map); ok { + if typesutil.IsJsObject(fc.typeOf(l.Index)) { fc.pkgCtx.errList = append(fc.pkgCtx.errList, types.Error{Fset: fc.pkgCtx.fileSet, Pos: l.Index.Pos(), Msg: "cannot use js.Object as map key"}) } - keyVar := fc.newVariable("_key") + keyVar := fc.newLocalVariable("_key") return fmt.Sprintf( `%s = %s; (%s || $throwRuntimeError("assignment to entry in nil map")).set(%s.keyFor(%s), { k: %s, v: %s });`, keyVar, @@ -718,7 +719,7 @@ func (fc *funcContext) translateAssign(lhs, rhs ast.Expr, define bool) string { } } - lhsType := fc.pkgCtx.TypeOf(lhs) + lhsType := fc.typeOf(lhs) rhsExpr := fc.translateConversion(rhs, lhsType) if _, ok := rhs.(*ast.CompositeLit); ok && define { return fmt.Sprintf("%s = %s;", fc.translateExpr(lhs), rhsExpr) // skip $copy @@ -742,7 +743,7 @@ func (fc *funcContext) translateAssign(lhs, rhs ast.Expr, define bool) string { case *ast.Ident: return fmt.Sprintf("%s = %s;", fc.objectName(fc.pkgCtx.ObjectOf(l)), rhsExpr) case *ast.SelectorExpr: - sel, ok := fc.pkgCtx.SelectionOf(l) + sel, ok := fc.selectionOf(l) if !ok { // qualified identifier return fmt.Sprintf("%s = %s;", fc.objectName(fc.pkgCtx.Uses[l.Sel]), rhsExpr) @@ -755,7 +756,7 @@ func (fc *funcContext) translateAssign(lhs, rhs ast.Expr, define bool) string { case *ast.StarExpr: return fmt.Sprintf("%s.$set(%s);", fc.translateExpr(l.X), rhsExpr) case *ast.IndexExpr: - switch t := fc.pkgCtx.TypeOf(l.X).Underlying().(type) { + switch t := fc.typeOf(l.X).Underlying().(type) { case *types.Array, *types.Pointer: pattern := rangeCheck("%1e[%2f] = %3s", fc.pkgCtx.Types[l.Index].Value != nil, true) if _, ok := t.(*types.Pointer); ok { // check pointer for nil (attribute getter causes a panic) @@ -773,7 +774,7 @@ func (fc *funcContext) translateAssign(lhs, rhs ast.Expr, define bool) string { } func (fc *funcContext) translateResults(results []ast.Expr) string { - tuple := fc.sig.Results() + tuple := fc.typeResolver.Substitute(fc.sig.Sig.Results()).(*types.Tuple) switch tuple.Len() { case 0: return "" @@ -787,7 +788,7 @@ func (fc *funcContext) translateResults(results []ast.Expr) string { return " " + v.String() default: if len(results) == 1 { - resultTuple := fc.pkgCtx.TypeOf(results[0]).(*types.Tuple) + resultTuple := fc.typeOf(results[0]).(*types.Tuple) if resultTuple.Len() != tuple.Len() { panic("invalid tuple return assignment") @@ -799,7 +800,7 @@ func (fc *funcContext) translateResults(results []ast.Expr) string { return " " + resultExpr } - tmpVar := fc.newVariable("_returncast") + tmpVar := fc.newLocalVariable("_returncast") fc.Printf("%s = %s;", tmpVar, resultExpr) // Not all the return types matched, map everything out for implicit casting diff --git a/compiler/typesutil/map.go b/compiler/typesutil/map.go new file mode 100644 index 000000000..146f09765 --- /dev/null +++ b/compiler/typesutil/map.go @@ -0,0 +1,34 @@ +package typesutil + +import ( + "go/types" + + "golang.org/x/tools/go/types/typeutil" +) + +// Map is a type-safe wrapper around golang.org/x/tools/go/types/typeutil.Map. +type Map[Val any] struct{ impl typeutil.Map } + +func (m *Map[Val]) At(key types.Type) Val { + val := m.impl.At(key) + if val != nil { + return val.(Val) + } + var zero Val + return zero +} + +func (m *Map[Val]) Set(key types.Type, value Val) (prev Val) { + old := m.impl.Set(key, value) + if old != nil { + return old.(Val) + } + var zero Val + return zero +} + +func (m *Map[Val]) Delete(key types.Type) bool { return m.impl.Delete(key) } + +func (m *Map[Val]) Len() int { return m.impl.Len() } + +func (m *Map[Val]) String() string { return m.impl.String() } diff --git a/compiler/typesutil/signature.go b/compiler/typesutil/signature.go new file mode 100644 index 000000000..0a79432cb --- /dev/null +++ b/compiler/typesutil/signature.go @@ -0,0 +1,67 @@ +package typesutil + +import ( + "fmt" + "go/types" +) + +// Signature is a helper that provides convenient access to function +// signature type information. +type Signature struct { + Sig *types.Signature +} + +// RequiredParams returns the number of required parameters in the function signature. +func (st Signature) RequiredParams() int { + l := st.Sig.Params().Len() + if st.Sig.Variadic() { + return l - 1 // Last parameter is a slice of variadic params. + } + return l +} + +// VariadicType returns the slice-type corresponding to the signature's variadic +// parameter, or nil of the signature is not variadic. With the exception of +// the special-case `append([]byte{}, "string"...)`, the returned type is +// `*types.Slice` and `.Elem()` method can be used to get the type of individual +// arguments. +func (st Signature) VariadicType() types.Type { + if !st.Sig.Variadic() { + return nil + } + return st.Sig.Params().At(st.Sig.Params().Len() - 1).Type() +} + +// Param returns the expected argument type for the i'th argument position. +// +// This function is able to return correct expected types for variadic calls +// both when ellipsis syntax (e.g. myFunc(requiredArg, optionalArgSlice...)) +// is used and when optional args are passed individually. +// +// The returned types may differ from the actual argument expression types if +// there is an implicit type conversion involved (e.g. passing a struct into a +// function that expects an interface). +func (st Signature) Param(i int, ellipsis bool) types.Type { + if i < st.RequiredParams() { + return st.Sig.Params().At(i).Type() + } + if !st.Sig.Variadic() { + // This should never happen if the code was type-checked successfully. + panic(fmt.Errorf("tried to access parameter %d of a non-variadic signature %s", i, st.Sig)) + } + if ellipsis { + return st.VariadicType() + } + return st.VariadicType().(*types.Slice).Elem() +} + +// HasResults returns true if the function signature returns something. +func (st Signature) HasResults() bool { + return st.Sig.Results().Len() > 0 +} + +// HasNamedResults returns true if the function signature returns something and +// returned results are names (e.g. `func () (val int, err error)`). +func (st Signature) HasNamedResults() bool { + return st.HasResults() && st.Sig.Results().At(0).Name() != "" +} diff --git a/compiler/typesutil/signature_test.go b/compiler/typesutil/signature_test.go new file mode 100644 index 000000000..a6d159687 --- /dev/null +++ b/compiler/typesutil/signature_test.go @@ -0,0 +1,166 @@ +package typesutil + +import ( + "go/token" + "go/types" + "testing" +) + +func TestSignature_RequiredParams(t *testing.T) { + tests := []struct { + descr string + sig *types.Signature + want int + }{{ + descr: "regular signature", + sig: types.NewSignatureType(nil, nil, nil, types.NewTuple( + types.NewVar(token.NoPos, nil, "a", types.Typ[types.Int]), + types.NewVar(token.NoPos, nil, "b", types.Typ[types.String]), + types.NewVar(token.NoPos, nil, "c", types.NewSlice(types.Typ[types.String])), + ), nil, false), + want: 3, + }, { + descr: "variadic signature", + sig: types.NewSignatureType(nil, nil, nil, types.NewTuple( + types.NewVar(token.NoPos, nil, "a", types.Typ[types.Int]), + types.NewVar(token.NoPos, nil, "b", types.Typ[types.String]), + types.NewVar(token.NoPos, nil, "c", types.NewSlice(types.Typ[types.String])), + ), nil, true /*variadic*/), + want: 2, + }} + + for _, test := range tests { + t.Run(test.descr, func(t *testing.T) { + sig := Signature{Sig: test.sig} + got := sig.RequiredParams() + if got != test.want { + t.Errorf("Got: {%s}.RequiredParams() = %d. Want: %d.", test.sig, got, test.want) + } + }) + } +} + +func TestSignature_VariadicType(t *testing.T) { + tests := []struct { + descr string + sig *types.Signature + want types.Type + }{{ + descr: "regular signature", + sig: types.NewSignatureType(nil, nil, nil, types.NewTuple( + types.NewVar(token.NoPos, nil, "a", types.Typ[types.Int]), + types.NewVar(token.NoPos, nil, "b", types.Typ[types.String]), + types.NewVar(token.NoPos, nil, "c", types.NewSlice(types.Typ[types.String])), + ), nil, false), + want: nil, + }, { + descr: "variadic signature", + sig: types.NewSignatureType(nil, nil, nil, types.NewTuple( + types.NewVar(token.NoPos, nil, "a", types.Typ[types.Int]), + types.NewVar(token.NoPos, nil, "b", types.Typ[types.String]), + types.NewVar(token.NoPos, nil, "c", types.NewSlice(types.Typ[types.String])), + ), nil, true /*variadic*/), + want: types.NewSlice(types.Typ[types.String]), + }} + + for _, test := range tests { + t.Run(test.descr, func(t *testing.T) { + sig := Signature{Sig: test.sig} + got := sig.VariadicType() + if !types.Identical(got, test.want) { + t.Errorf("Got: {%s}.VariadicType() = %v. Want: %v.", test.sig, got, test.want) + } + }) + } +} + +func TestSignature_Param(t *testing.T) { + sig := types.NewSignatureType(nil, nil, nil, types.NewTuple( + types.NewVar(token.NoPos, nil, "a", types.Typ[types.Int]), + types.NewVar(token.NoPos, nil, "b", types.Typ[types.Byte]), + types.NewVar(token.NoPos, nil, "c", types.NewSlice(types.Typ[types.String])), + ), nil, true /*variadic*/) + + tests := []struct { + descr string + param int + ellipsis bool + want types.Type + }{{ + descr: "required param", + param: 1, + want: types.Typ[types.Byte], + }, { + descr: "variadic param", + param: 2, + want: types.Typ[types.String], + }, { + descr: "variadic param repeated", + param: 3, + want: types.Typ[types.String], + }, { + descr: "variadic param with ellipsis", + param: 2, + ellipsis: true, + want: types.NewSlice(types.Typ[types.String]), + }} + + for _, test := range tests { + t.Run(test.descr, func(t *testing.T) { + sig := Signature{Sig: sig} + got := sig.Param(test.param, test.ellipsis) + if !types.Identical(got, test.want) { + t.Errorf("Got: {%s}.Param(%v, %v) = %v. Want: %v.", sig, test.param, test.ellipsis, got, test.want) + } + }) + } +} + +func TestSignature_HasXResults(t *testing.T) { + tests := []struct { + descr string + sig *types.Signature + hasResults bool + hasNamedResults bool + }{{ + descr: "no results", + sig: types.NewSignatureType(nil, nil, nil, nil, types.NewTuple(), false), + hasResults: false, + hasNamedResults: false, + }, { + descr: "anonymous result", + sig: types.NewSignatureType(nil, nil, nil, nil, types.NewTuple( + types.NewVar(token.NoPos, nil, "", types.Typ[types.String]), + ), false), + hasResults: true, + hasNamedResults: false, + }, { + descr: "named result", + sig: types.NewSignatureType(nil, nil, nil, nil, types.NewTuple( + types.NewVar(token.NoPos, nil, "s", types.Typ[types.String]), + ), false), + hasResults: true, + hasNamedResults: true, + }, { + descr: "underscore named result", + sig: types.NewSignatureType(nil, nil, nil, nil, types.NewTuple( + types.NewVar(token.NoPos, nil, "_", types.Typ[types.String]), + ), false), + hasResults: true, + hasNamedResults: true, + }} + + for _, test := range tests { + t.Run(test.descr, func(t *testing.T) { + sig := Signature{Sig: test.sig} + gotHasResults := sig.HasResults() + if gotHasResults != test.hasResults { + t.Errorf("Got: {%s}.HasResults() = %v. Want: %v.", test.sig, gotHasResults, test.hasResults) + } + gotHasNamedResults := sig.HasNamedResults() + if gotHasNamedResults != test.hasNamedResults { + t.Errorf("Got: {%s}.HasResults() = %v. Want: %v.", test.sig, gotHasNamedResults, test.hasNamedResults) + } + }) + } +} diff --git a/compiler/typesutil/typelist.go b/compiler/typesutil/typelist.go new file mode 100644 index 000000000..768677365 --- /dev/null +++ b/compiler/typesutil/typelist.go @@ -0,0 +1,33 @@ +package typesutil + +import ( + "go/types" + "strings" +) + +// TypeList an ordered list of types. +type TypeList []types.Type + +func (tl TypeList) String() string { + buf := strings.Builder{} + for i, typ := range tl { + if i != 0 { + buf.WriteString(", ") + } + buf.WriteString(types.TypeString(typ, nil)) + } + return buf.String() +} + +// Equal returns true if both lists of type arguments are identical. +func (tl TypeList) Equal(other TypeList) bool { + if len(tl) != len(other) { + return false + } + for i := range tl { + if !types.Identical(tl[i], other[i]) { + return false + } + } + return true +} diff --git a/compiler/typesutil/typenames.go b/compiler/typesutil/typenames.go new file mode 100644 index 000000000..2f5ac6186 --- /dev/null +++ b/compiler/typesutil/typenames.go @@ -0,0 +1,30 @@ +package typesutil + +import "go/types" + +// TypeNames implements an ordered set of *types.TypeName pointers. +// +// The set is ordered to ensure deterministic behavior across compiler runs. +type TypeNames struct { + known map[*types.TypeName]struct{} + order []*types.TypeName +} + +// Add a type name to the set. If the type name has been previously added, +// this operation is a no-op. Two type names are considered equal iff they have +// the same memory address. +func (tn *TypeNames) Add(name *types.TypeName) { + if _, ok := tn.known[name]; ok { + return + } + if tn.known == nil { + tn.known = map[*types.TypeName]struct{}{} + } + tn.order = append(tn.order, name) + tn.known[name] = struct{}{} +} + +// Slice returns set elements in the order they were first added to the set. +func (tn *TypeNames) Slice() []*types.TypeName { + return tn.order +} diff --git a/compiler/typesutil/typenames_test.go b/compiler/typesutil/typenames_test.go new file mode 100644 index 000000000..1e8a4b994 --- /dev/null +++ b/compiler/typesutil/typenames_test.go @@ -0,0 +1,45 @@ +package typesutil + +import ( + "go/types" + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/gopherjs/gopherjs/internal/srctesting" +) + +func typeNameOpts() cmp.Options { + return cmp.Options{ + cmp.Transformer("TypeName", func(name *types.TypeName) string { + return types.ObjectString(name, nil) + }), + } +} + +func TestTypeNames(t *testing.T) { + src := `package test + + type A int + type B int + type C int + ` + f := srctesting.New(t) + _, pkg := f.Check("pkg/test", f.Parse("test.go", src)) + A := srctesting.LookupObj(pkg, "A").(*types.TypeName) + B := srctesting.LookupObj(pkg, "B").(*types.TypeName) + C := srctesting.LookupObj(pkg, "C").(*types.TypeName) + + tn := TypeNames{} + tn.Add(A) + tn.Add(B) + tn.Add(A) + tn.Add(C) + tn.Add(B) + + got := tn.Slice() + want := []*types.TypeName{A, B, C} + + if diff := cmp.Diff(want, got, typeNameOpts()); diff != "" { + t.Errorf("tn.Slice() returned diff (-want,+got):\n%s", diff) + } +} diff --git a/compiler/typesutil/typesutil.go b/compiler/typesutil/typesutil.go index 600925b81..bce656f3b 100644 --- a/compiler/typesutil/typesutil.go +++ b/compiler/typesutil/typesutil.go @@ -1,6 +1,9 @@ package typesutil -import "go/types" +import ( + "fmt" + "go/types" +) func IsJsPackage(pkg *types.Package) bool { return pkg != nil && pkg.Path() == "github.com/gopherjs/gopherjs/js" @@ -14,3 +17,99 @@ func IsJsObject(t types.Type) bool { named, isNamed := ptr.Elem().(*types.Named) return isNamed && IsJsPackage(named.Obj().Pkg()) && named.Obj().Name() == "Object" } + +// RecvType returns a named type of a method receiver, or nil if it's not a method. +// +// For methods on a pointer receiver, the underlying named type is returned. +func RecvType(sig *types.Signature) *types.Named { + recv := sig.Recv() + if recv == nil { + return nil + } + + typ := recv.Type() + if ptrType, ok := typ.(*types.Pointer); ok { + typ = ptrType.Elem() + } + + return typ.(*types.Named) +} + +// RecvAsFirstArg takes a method signature and returns a function +// signature with receiver as the first parameter. +func RecvAsFirstArg(sig *types.Signature) *types.Signature { + params := make([]*types.Var, 0, 1+sig.Params().Len()) + params = append(params, sig.Recv()) + for i := 0; i < sig.Params().Len(); i++ { + params = append(params, sig.Params().At(i)) + } + return types.NewSignatureType(nil, nil, nil, types.NewTuple(params...), sig.Results(), sig.Variadic()) +} + +// Selection is a common interface for go/types.Selection and our custom-constructed +// method and field selections. +type Selection interface { + Kind() types.SelectionKind + Recv() types.Type + Index() []int + Obj() types.Object + Type() types.Type +} + +// NewSelection creates a new selection. +func NewSelection(kind types.SelectionKind, recv types.Type, index []int, obj types.Object, typ types.Type) Selection { + return &selectionImpl{ + kind: kind, + recv: recv, + index: index, + obj: obj, + typ: typ, + } +} + +type selectionImpl struct { + kind types.SelectionKind + recv types.Type + index []int + obj types.Object + typ types.Type +} + +func (sel *selectionImpl) Kind() types.SelectionKind { return sel.kind } +func (sel *selectionImpl) Recv() types.Type { return sel.recv } +func (sel *selectionImpl) Index() []int { return sel.index } +func (sel *selectionImpl) Obj() types.Object { return sel.obj } +func (sel *selectionImpl) Type() types.Type { return sel.typ } + +func fieldsOf(s *types.Struct) []*types.Var { + fields := make([]*types.Var, s.NumFields()) + for i := 0; i < s.NumFields(); i++ { + fields[i] = s.Field(i) + } + return fields +} + +// OffsetOf returns byte offset of a struct field specified by the provided +// selection. +// +// Adapted from go/types.Config.offsetof(). +func OffsetOf(sizes types.Sizes, sel Selection) int64 { + if sel.Kind() != types.FieldVal { + panic(fmt.Errorf("byte offsets are only defined for struct fields")) + } + typ := sel.Recv() + var o int64 + for _, idx := range sel.Index() { + s := typ.Underlying().(*types.Struct) + o += sizes.Offsetsof(fieldsOf(s))[idx] + typ = s.Field(idx).Type() + } + + return o +} + +// IsMethod returns true if the passed object is a method. +func IsMethod(o types.Object) bool { + f, ok := o.(*types.Func) + return ok && f.Type().(*types.Signature).Recv() != nil +} diff --git a/compiler/utils.go b/compiler/utils.go index 058437f67..7d286f447 100644 --- a/compiler/utils.go +++ b/compiler/utils.go @@ -18,10 +18,29 @@ import ( "text/template" "unicode" - "github.com/gopherjs/gopherjs/compiler/analysis" + "github.com/gopherjs/gopherjs/compiler/internal/analysis" + "github.com/gopherjs/gopherjs/compiler/internal/typeparams" "github.com/gopherjs/gopherjs/compiler/typesutil" ) +// We use this character as a separator in synthetic identifiers instead of a +// regular dot. This character is safe for use in JS identifiers and helps to +// visually separate components of the name when it appears in a stack trace. +const midDot = "·" + +// root returns the topmost function context corresponding to the package scope. +func (fc *funcContext) root() *funcContext { + if fc.isRoot() { + return fc + } + return fc.parent.root() +} + +// isRoot returns true for the package-level context. +func (fc *funcContext) isRoot() bool { + return fc.parent == nil +} + func (fc *funcContext) Write(b []byte) (int, error) { fc.writePos() fc.output = append(fc.output, b...) @@ -29,7 +48,7 @@ func (fc *funcContext) Write(b []byte) (int, error) { } func (fc *funcContext) Printf(format string, values ...interface{}) { - fc.Write([]byte(strings.Repeat("\t", fc.pkgCtx.indentation))) + fc.Write([]byte(fc.Indentation(0))) fmt.Fprintf(fc, format, values...) fc.Write([]byte{'\n'}) fc.Write(fc.delayedOutput) @@ -57,12 +76,21 @@ func (fc *funcContext) writePos() { } } -func (fc *funcContext) Indent(f func()) { +// Indented increases generated code indentation level by 1 for the code emitted +// from the callback f. +func (fc *funcContext) Indented(f func()) { fc.pkgCtx.indentation++ f() fc.pkgCtx.indentation-- } +// Indentation returns a sequence of "\t" characters appropriate to the current +// generated code indentation level. The `extra` parameter provides relative +// indentation adjustment. +func (fc *funcContext) Indentation(extra int) string { + return strings.Repeat("\t", fc.pkgCtx.indentation+extra) +} + func (fc *funcContext) CatchOutput(indent int, f func()) []byte { origoutput := fc.output fc.output = nil @@ -101,12 +129,12 @@ func (fc *funcContext) expandTupleArgs(argExprs []ast.Expr) []ast.Expr { return argExprs } - tuple, isTuple := fc.pkgCtx.TypeOf(argExprs[0]).(*types.Tuple) + tuple, isTuple := fc.typeOf(argExprs[0]).(*types.Tuple) if !isTuple { return argExprs } - tupleVar := fc.newVariable("_tuple") + tupleVar := fc.newLocalVariable("_tuple") fc.Printf("%s = %s;", tupleVar, fc.translateExpr(argExprs[0])) argExprs = make([]ast.Expr, tuple.Len()) for i := range argExprs { @@ -118,7 +146,7 @@ func (fc *funcContext) expandTupleArgs(argExprs []ast.Expr) []ast.Expr { func (fc *funcContext) translateArgs(sig *types.Signature, argExprs []ast.Expr, ellipsis bool) []string { argExprs = fc.expandTupleArgs(argExprs) - sigTypes := signatureTypes{Sig: sig} + sigTypes := typesutil.Signature{Sig: sig} if sig.Variadic() && len(argExprs) == 0 { return []string{fmt.Sprintf("%s.nil", fc.typeName(sigTypes.VariadicType()))} @@ -134,7 +162,7 @@ func (fc *funcContext) translateArgs(sig *types.Signature, argExprs []ast.Expr, arg := fc.translateImplicitConversionWithCloning(argExpr, sigTypes.Param(i, ellipsis)).String() if preserveOrder && fc.pkgCtx.Types[argExpr].Value == nil { - argVar := fc.newVariable("_arg") + argVar := fc.newLocalVariable("_arg") fc.Printf("%s = %s;", argVar, arg) arg = argVar } @@ -158,7 +186,7 @@ func (fc *funcContext) translateArgs(sig *types.Signature, argExprs []ast.Expr, return args } -func (fc *funcContext) translateSelection(sel selection, pos token.Pos) ([]string, string) { +func (fc *funcContext) translateSelection(sel typesutil.Selection, pos token.Pos) ([]string, string) { var fields []string t := sel.Recv() for _, index := range sel.Index() { @@ -170,7 +198,7 @@ func (fc *funcContext) translateSelection(sel selection, pos token.Pos) ([]strin jsFieldName := s.Field(index).Name() for { fields = append(fields, fieldName(s, 0)) - ft := s.Field(0).Type() + ft := fc.fieldType(s, 0) if typesutil.IsJsObject(ft) { return fields, jsTag } @@ -187,7 +215,7 @@ func (fc *funcContext) translateSelection(sel selection, pos token.Pos) ([]strin } } fields = append(fields, fieldName(s, index)) - t = s.Field(index).Type() + t = fc.fieldType(s, index) } return fields, "" } @@ -229,11 +257,26 @@ func (fc *funcContext) newConst(t types.Type, value constant.Value) ast.Expr { return id } -func (fc *funcContext) newVariable(name string) string { - return fc.newVariableWithLevel(name, false) +// newLocalVariable assigns a new JavaScript variable name for the given Go +// local variable name. In this context "local" means "in scope of the current" +// functionContext. +func (fc *funcContext) newLocalVariable(name string) string { + return fc.newVariable(name, false) } -func (fc *funcContext) newVariableWithLevel(name string, pkgLevel bool) string { +// newVariable assigns a new JavaScript variable name for the given Go variable +// or type. +// +// If there is already a variable with the same name visible in the current +// function context (e.g. due to shadowing), the returned name will be suffixed +// with a number to prevent conflict. This is necessary because Go name +// resolution scopes differ from var declarations in JS. +// +// If pkgLevel is true, the variable is declared at the package level and added +// to this functionContext, as well as all parents, but not to the list of local +// variables. If false, it is added to this context only, as well as the list of +// local vars. +func (fc *funcContext) newVariable(name string, pkgLevel bool) string { if name == "" { panic("newVariable: empty name") } @@ -278,12 +321,20 @@ func (fc *funcContext) newVariableWithLevel(name string, pkgLevel bool) string { return varName } +// newIdent declares a new Go variable with the given name and type and returns +// an *ast.Ident referring to that object. func (fc *funcContext) newIdent(name string, t types.Type) *ast.Ident { - ident := ast.NewIdent(name) - fc.setType(ident, t) obj := types.NewVar(0, fc.pkgCtx.Pkg, name, t) + fc.objectNames[obj] = name + return fc.newIdentFor(obj) +} + +// newIdentFor creates a new *ast.Ident referring to the given Go object. +func (fc *funcContext) newIdentFor(obj types.Object) *ast.Ident { + ident := ast.NewIdent(obj.Name()) + ident.NamePos = obj.Pos() fc.pkgCtx.Uses[ident] = obj - fc.pkgCtx.objectNames[obj] = name + fc.setType(ident, obj.Type()) return ident } @@ -293,6 +344,25 @@ func (fc *funcContext) newTypeIdent(name string, obj types.Object) *ast.Ident { return ident } +// newLitFuncName generates a new synthetic name for a function literal. +func (fc *funcContext) newLitFuncName() string { + fc.funcLitCounter++ + name := &strings.Builder{} + + // If function literal is defined inside another function, qualify its + // synthetic name with the outer function to make it easier to identify. + if fc.instance.Object != nil { + if recvType := typesutil.RecvType(fc.sig.Sig); recvType != nil { + name.WriteString(recvType.Obj().Name()) + name.WriteString(midDot) + } + name.WriteString(fc.instance.Object.Name()) + name.WriteString(midDot) + } + fmt.Fprintf(name, "func%d", fc.funcLitCounter) + return name.String() +} + func (fc *funcContext) setType(e ast.Expr, t types.Type) ast.Expr { fc.pkgCtx.Types[e] = types.TypeAndValue{Type: t} return e @@ -319,22 +389,48 @@ func isVarOrConst(o types.Object) bool { } func isPkgLevel(o types.Object) bool { - return o.Parent() != nil && o.Parent().Parent() == types.Universe + // Note: named types are always assigned a variable at package level to be + // initialized with the rest of the package types, even the types declared + // in a statement inside a function. + _, isType := o.(*types.TypeName) + return (o.Parent() != nil && o.Parent().Parent() == types.Universe) || isType +} + +// assignedObjectName checks if the object has been previously assigned a name +// in this or one of the parent contexts. If not, found will be false. +func (fc *funcContext) assignedObjectName(o types.Object) (name string, found bool) { + if fc == nil { + return "", false + } + if name, found := fc.parent.assignedObjectName(o); found { + return name, true + } + + name, found = fc.objectNames[o] + return name, found } +// objectName returns a JS expression that refers to the given object. If the +// object hasn't been previously assigned a JS variable name, it will be +// allocated as needed. func (fc *funcContext) objectName(o types.Object) string { if isPkgLevel(o) { - fc.pkgCtx.dependencies[o] = true + fc.pkgCtx.DeclareDCEDep(o) if o.Pkg() != fc.pkgCtx.Pkg || (isVarOrConst(o) && o.Exported()) { return fc.pkgVar(o.Pkg()) + "." + o.Name() } } - name, ok := fc.pkgCtx.objectNames[o] + name, ok := fc.assignedObjectName(o) if !ok { - name = fc.newVariableWithLevel(o.Name(), isPkgLevel(o)) - fc.pkgCtx.objectNames[o] = name + pkgLevel := isPkgLevel(o) + name = fc.newVariable(o.Name(), pkgLevel) + if pkgLevel { + fc.root().objectNames[o] = name + } else { + fc.objectNames[o] = name + } } if v, ok := o.(*types.Var); ok && fc.pkgCtx.escapingVars[v] { @@ -343,6 +439,48 @@ func (fc *funcContext) objectName(o types.Object) string { return name } +// knownInstances returns a list of known instantiations of the object. +// +// For objects without type params and not nested in a generic function or +// method, this always returns a single trivial instance. +// If the object is generic, or in a generic function or method, but there are +// no instances, then the object is unused and an empty list is returned. +func (fc *funcContext) knownInstances(o types.Object) []typeparams.Instance { + instances := fc.pkgCtx.instanceSet.Pkg(o.Pkg()).ForObj(o) + if len(instances) == 0 && !typeparams.HasTypeParams(o.Type()) { + return []typeparams.Instance{{Object: o}} + } + return instances +} + +// instName returns a JS expression that refers to the provided instance of a +// function or type. Non-generic objects may be represented as an instance with +// zero type arguments. +func (fc *funcContext) instName(inst typeparams.Instance) string { + objName := fc.objectName(inst.Object) + if inst.IsTrivial() { + return objName + } + fc.pkgCtx.DeclareDCEDep(inst.Object, inst.TArgs...) + label := inst.TypeParamsString(` /* `, ` */`) + return fmt.Sprintf("%s[%d%s]", objName, fc.pkgCtx.instanceSet.ID(inst), label) +} + +// methodName returns a JS identifier (specifically, object property name) +// corresponding to the given method. +func (fc *funcContext) methodName(fun *types.Func) string { + if fun.Type().(*types.Signature).Recv() == nil { + panic(fmt.Errorf("expected a method, got a standalone function %v", fun)) + } + name := fun.Name() + // Method names are scoped to their receiver type and guaranteed to be + // unique within that, so we only need to make sure it's not a reserved keyword + if reservedKeywords[name] { + name += "$" + } + return name +} + func (fc *funcContext) varPtrName(o *types.Var) string { if isPkgLevel(o) && o.Exported() { return fc.pkgVar(o.Pkg()) + "." + o.Name() + "$ptr" @@ -350,12 +488,17 @@ func (fc *funcContext) varPtrName(o *types.Var) string { name, ok := fc.pkgCtx.varPtrNames[o] if !ok { - name = fc.newVariableWithLevel(o.Name()+"$ptr", isPkgLevel(o)) + name = fc.newVariable(o.Name()+"$ptr", isPkgLevel(o)) fc.pkgCtx.varPtrNames[o] = name } return name } +// typeName returns a JS identifier name for the given Go type. +// +// For the built-in types it returns identifiers declared in the prelude. For +// all user-defined or composite types it creates a unique JS identifier and +// will return it on all subsequent calls for the type. func (fc *funcContext) typeName(ty types.Type) string { switch t := ty.(type) { case *types.Basic: @@ -364,25 +507,111 @@ func (fc *funcContext) typeName(ty types.Type) string { if t.Obj().Name() == "error" { return "$error" } - return fc.objectName(t.Obj()) + inst := typeparams.Instance{Object: t.Obj()} + + // Get type arguments for the type if there are any. + for i := 0; i < t.TypeArgs().Len(); i++ { + inst.TArgs = append(inst.TArgs, t.TypeArgs().At(i)) + } + + // Get the nesting type arguments if there are any. + if fn := typeparams.FindNestingFunc(t.Obj()); fn != nil { + if fn.Scope().Contains(t.Obj().Pos()) { + tp := typeparams.SignatureTypeParams(fn.Type().(*types.Signature)) + tNest := make([]types.Type, tp.Len()) + for i := 0; i < tp.Len(); i++ { + tNest[i] = fc.typeResolver.Substitute(tp.At(i)) + } + inst.TNest = typesutil.TypeList(tNest) + } + } + + return fc.instName(inst) case *types.Interface: if t.Empty() { return "$emptyInterface" } + case *types.TypeParam: + panic(fmt.Errorf("unexpected type parameter: %v", t)) } + // For anonymous composite types, generate a synthetic package-level type + // declaration, which will be reused for all instances of this type. This + // improves performance, since runtime won't have to synthesize the same type + // repeatedly. anonType, ok := fc.pkgCtx.anonTypeMap.At(ty).(*types.TypeName) if !ok { fc.initArgs(ty) // cause all embedded types to be registered - varName := fc.newVariableWithLevel(strings.ToLower(typeKind(ty)[5:])+"Type", true) + varName := fc.newVariable(strings.ToLower(typeKind(ty)[5:])+"Type", true) anonType = types.NewTypeName(token.NoPos, fc.pkgCtx.Pkg, varName, ty) // fake types.TypeName fc.pkgCtx.anonTypes = append(fc.pkgCtx.anonTypes, anonType) fc.pkgCtx.anonTypeMap.Set(ty, anonType) } - fc.pkgCtx.dependencies[anonType] = true + fc.pkgCtx.DeclareDCEDep(anonType) return anonType.Name() } +// importedPkgVar returns a package-level variable name for accessing an imported +// package. +// +// Allocates a new variable if this is the first call, or returns the existing +// one. The variable is based on the package name (implicitly derived from the +// `package` declaration in the imported package, or explicitly assigned by the +// import decl in the importing source file). +// +// Returns the allocated variable name. +func (fc *funcContext) importedPkgVar(pkg *types.Package) string { + if pkgVar, ok := fc.pkgCtx.pkgVars[pkg.Path()]; ok { + return pkgVar // Already registered. + } + + pkgVar := fc.newVariable(pkg.Name(), true) + fc.pkgCtx.pkgVars[pkg.Path()] = pkgVar + return pkgVar +} + +// instanceOf constructs an instance description of the object the ident is +// referring to. For non-generic objects, it will return a trivial instance with +// no type arguments. +func (fc *funcContext) instanceOf(ident *ast.Ident) typeparams.Instance { + inst := typeparams.Instance{Object: fc.pkgCtx.ObjectOf(ident)} + if i, ok := fc.pkgCtx.Instances[ident]; ok { + inst.TArgs = fc.typeResolver.SubstituteAll(i.TypeArgs) + } + return inst +} + +// typeOf returns a type associated with the given AST expression. For types +// defined in terms of type parameters, it will substitute type parameters with +// concrete types from the current set of type arguments. +func (fc *funcContext) typeOf(expr ast.Expr) types.Type { + typ := fc.pkgCtx.TypeOf(expr) + // If the expression is referring to an instance of a generic type or function, + // we want the instantiated type. + if ident, ok := expr.(*ast.Ident); ok { + if inst, ok := fc.pkgCtx.Instances[ident]; ok { + typ = inst.Type + } + } + return fc.typeResolver.Substitute(typ) +} + +// fieldType returns the type of the i-th field of the given struct +// after substituting type parameters with concrete types for nested context. +func (fc *funcContext) fieldType(t *types.Struct, i int) types.Type { + return fc.typeResolver.Substitute(t.Field(i).Type()) +} + +func (fc *funcContext) selectionOf(e *ast.SelectorExpr) (typesutil.Selection, bool) { + if sel, ok := fc.pkgCtx.Selections[e]; ok { + return fc.typeResolver.SubstituteSelection(sel), true + } + if sel, ok := fc.pkgCtx.additionalSelections[e]; ok { + return sel, true + } + return nil, false +} + func (fc *funcContext) externalize(s string, t types.Type) string { if typesutil.IsJsObject(t) { return s @@ -695,7 +924,15 @@ func rangeCheck(pattern string, constantIndex, array bool) string { } func encodeIdent(name string) string { - return strings.Replace(url.QueryEscape(name), "%", "$", -1) + // Quick-and-dirty way to make any string safe for use as an identifier in JS. + name = url.QueryEscape(name) + // We use unicode middle dot as a visual separator in synthetic identifiers. + // It is safe for use in a JS identifier, so we un-encode it for readability. + name = strings.ReplaceAll(name, "%C2%B7", midDot) + // QueryEscape uses '%' before hex-codes of escaped characters, which is not + // allowed in a JS identifier, use '$' instead. + name = strings.ReplaceAll(name, "%", "$") + return name } // formatJSStructTagVal returns JavaScript code for accessing an object's property @@ -723,61 +960,6 @@ func formatJSStructTagVal(jsTag string) string { return "." + jsTag } -// signatureTypes is a helper that provides convenient access to function -// signature type information. -type signatureTypes struct { - Sig *types.Signature -} - -// RequiredParams returns the number of required parameters in the function signature. -func (st signatureTypes) RequiredParams() int { - l := st.Sig.Params().Len() - if st.Sig.Variadic() { - return l - 1 // Last parameter is a slice of variadic params. - } - return l -} - -// VariadicType returns the slice-type corresponding to the signature's variadic -// parameter, or nil of the signature is not variadic. With the exception of -// the special-case `append([]byte{}, "string"...)`, the returned type is -// `*types.Slice` and `.Elem()` method can be used to get the type of individual -// arguments. -func (st signatureTypes) VariadicType() types.Type { - if !st.Sig.Variadic() { - return nil - } - return st.Sig.Params().At(st.Sig.Params().Len() - 1).Type() -} - -// Returns the expected argument type for the i'th argument position. -// -// This function is able to return correct expected types for variadic calls -// both when ellipsis syntax (e.g. myFunc(requiredArg, optionalArgSlice...)) -// is used and when optional args are passed individually. -// -// The returned types may differ from the actual argument expression types if -// there is an implicit type conversion involved (e.g. passing a struct into a -// function that expects an interface). -func (st signatureTypes) Param(i int, ellipsis bool) types.Type { - if i < st.RequiredParams() { - return st.Sig.Params().At(i).Type() - } - if !st.Sig.Variadic() { - // This should never happen if the code was type-checked successfully. - panic(fmt.Errorf("tried to access parameter %d of a non-variadic signature %s", i, st.Sig)) - } - if ellipsis { - return st.VariadicType() - } - return st.VariadicType().(*types.Slice).Elem() -} - -// ErrorAt annotates an error with a position in the source code. -func ErrorAt(err error, fset *token.FileSet, pos token.Pos) error { - return fmt.Errorf("%s: %w", fset.Position(pos), err) -} - // FatalError is an error compiler panics with when it encountered a fatal error. // // FatalError implements io.Writer, which can be used to record any free-form @@ -831,3 +1013,13 @@ func bailingOut(err interface{}) (*FatalError, bool) { fe, ok := err.(*FatalError) return fe, ok } + +func removeMatching[T comparable](haystack []T, needle T) []T { + var result []T + for _, el := range haystack { + if el != needle { + result = append(result, el) + } + } + return result +} diff --git a/doc/packages.md b/doc/packages.md index b49db7319..ca1ff80e5 100644 --- a/doc/packages.md +++ b/doc/packages.md @@ -1,8 +1,8 @@ # Supported Packages -On each commit, Circle CI automatically compiles all supported packages with GopherJS and runs their tests: +On each commit, Github Actions CI automatically compiles all supported packages with GopherJS and runs their tests: -[![Circle CI](https://circleci.com/gh/gopherjs/gopherjs.svg?style=svg)](https://circleci.com/gh/gopherjs/gopherjs) +[![Github Actions CI](https://github.com/gopherjs/gopherjs/actions/workflows/ci.yaml/badge.svg)](https://github.com/gopherjs/gopherjs/actions/workflows/ci.yaml) | Name | Supported | Comment | | ------------------- | ------------ | --------------------------------------------------------------------------------- | diff --git a/go.mod b/go.mod index 8edafd89b..faa94f070 100644 --- a/go.mod +++ b/go.mod @@ -3,23 +3,23 @@ module github.com/gopherjs/gopherjs go 1.18 require ( - github.com/evanw/esbuild v0.18.0 + github.com/evanw/esbuild v0.25.4 github.com/fsnotify/fsnotify v1.5.1 - github.com/google/go-cmp v0.5.7 + github.com/google/go-cmp v0.5.8 + github.com/msvitok77/goembed v0.3.5 github.com/neelance/astrewrite v0.0.0-20160511093645-99348263ae86 github.com/neelance/sourcemap v0.0.0-20200213170602-2833bce08e4c github.com/shurcooL/go v0.0.0-20200502201357-93f07166e636 github.com/sirupsen/logrus v1.8.1 - github.com/spf13/cobra v1.2.1 - github.com/spf13/pflag v1.0.5 - github.com/visualfc/goembed v0.3.3 - golang.org/x/sync v0.3.0 + github.com/spf13/cobra v1.9.1 + github.com/spf13/pflag v1.0.6 + golang.org/x/sync v0.5.0 golang.org/x/sys v0.10.0 golang.org/x/term v0.0.0-20220411215600-e5f449aeb171 - golang.org/x/tools v0.11.0 + golang.org/x/tools v0.16.0 ) require ( - github.com/inconshreveable/mousetrap v1.0.0 // indirect - golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f // indirect + github.com/inconshreveable/mousetrap v1.1.0 // indirect + golang.org/x/mod v0.14.0 // indirect ) diff --git a/go.sum b/go.sum index 349d599ba..29dc8900b 100644 --- a/go.sum +++ b/go.sum @@ -1,600 +1,45 @@ -cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= -cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= -cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= -cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= -cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= -cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= -cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= -cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= -cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= -cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= -cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= -cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= -cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= -cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= -cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= -cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg= -cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8= -cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0= -cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= -cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= -cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= -cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= -cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= -cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= -cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= -cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= -cloud.google.com/go/firestore v1.1.0/go.mod h1:ulACoGHTpvq5r8rxGJ4ddJZBZqakUQqClKRT5SZwBmk= -cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= -cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= -cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= -cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= -cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= -cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= -cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= -cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= -cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= -dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= -github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= -github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= -github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= -github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= -github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= -github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= -github.com/bketelsen/crypt v0.0.4/go.mod h1:aI6NrJ0pMGgvZKL1iVgXLnfIFJtfV+bKCoqOes/6LfM= -github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= -github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= -github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= -github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= -github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= -github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= -github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= -github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= -github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= -github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= -github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= -github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= -github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= -github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= -github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= -github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= -github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= -github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= -github.com/evanw/esbuild v0.18.0 h1:zJrquhC5ZiricRVQxMQTWqO8zYcV7F7OfUXstB9Ucbg= -github.com/evanw/esbuild v0.18.0/go.mod h1:iINY06rn799hi48UqEnaQvVfZWe6W9bET78LbvN8VWk= -github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= -github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= +github.com/evanw/esbuild v0.25.4 h1:k1bTSim+usBG27w7BfOCorhgx3tO+6bAfMj5pR+6SKg= +github.com/evanw/esbuild v0.25.4/go.mod h1:D2vIQZqV/vIf/VRHtViaUtViZmG7o+kKmlBfVQuRi48= github.com/fsnotify/fsnotify v1.5.1 h1:mZcQUHVQUQWoPXXtuf9yuEXKudkV2sx1E06UadKWpgI= github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU= -github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= -github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= -github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= -github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= -github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= -github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= -github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= -github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= -github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= -github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8= -github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= -github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= -github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= -github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= -github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= -github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= -github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= -github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= -github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= -github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= -github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= -github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= -github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM= -github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= -github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= -github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= -github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= -github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.7 h1:81/ik6ipDQS2aGcBfIN5dHDB36BwrStyeAQquSYCV4o= -github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= -github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= -github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= -github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= -github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= -github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= -github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= -github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= -github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= -github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= -github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= -github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= -github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q= -github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= -github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= -github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= -github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= -github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= -github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= -github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU= -github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= -github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4= -github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= -github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= -github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90= -github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= -github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= -github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ= -github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I= -github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc= -github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= -github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= -github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM= -github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= -github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= -github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= -github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= -github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= -github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= -github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= -github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= -github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= -github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= -github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= -github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= -github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= -github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= -github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc= -github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= -github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= -github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg= -github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY= -github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= -github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= -github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= -github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= -github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= -github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/google/go-cmp v0.5.8 h1:e6P7q2lk1O+qJJb4BtCQXlK8vWEO8V1ZeuEdJNOqZyg= +github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= +github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/msvitok77/goembed v0.3.5 h1:SNdkLLipv4YGNVWCVCn+/N01aSp7Ga6/YOcB+kYxnhk= +github.com/msvitok77/goembed v0.3.5/go.mod h1:ycBNmh+53HrsZPQfWOJHYXbu7vLwb1QYdJISOyKlnnc= github.com/neelance/astrewrite v0.0.0-20160511093645-99348263ae86 h1:D6paGObi5Wud7xg83MaEFyjxQB1W5bz5d0IFppr+ymk= github.com/neelance/astrewrite v0.0.0-20160511093645-99348263ae86/go.mod h1:kHJEU3ofeGjhHklVoIGuVj85JJwZ6kWPaJwCIxgnFmo= github.com/neelance/sourcemap v0.0.0-20200213170602-2833bce08e4c h1:bY6ktFuJkt+ZXkX0RChQch2FtHpWQLVS8Qo1YasiIVk= github.com/neelance/sourcemap v0.0.0-20200213170602-2833bce08e4c/go.mod h1:Qr6/a/Q4r9LP1IltGz7tA7iOK1WonHEYhu1HRBA7ZiM= -github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= -github.com/pelletier/go-toml v1.9.3/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= -github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= -github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= -github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= -github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= -github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= -github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= +github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/shurcooL/go v0.0.0-20200502201357-93f07166e636 h1:aSISeOcal5irEhJd1M+IrApc0PdcN7e7Aj4yuEnOrfQ= github.com/shurcooL/go v0.0.0-20200502201357-93f07166e636/go.mod h1:TDJrrUr11Vxrven61rcy3hJMUqaf/CLWYhHNPmT14Lk= -github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= github.com/sirupsen/logrus v1.8.1 h1:dJKuHgqk1NNQlqoA6BTlM1Wf9DOH3NBjQyu0h9+AZZE= github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= -github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= -github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= -github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I= -github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= -github.com/spf13/cobra v1.2.1 h1:+KmjbUw1hriSNMF55oPrkZcb27aECyrj8V2ytv7kWDw= -github.com/spf13/cobra v1.2.1/go.mod h1:ExllRjgxM/piMAM+3tAZvg8fsklGAf3tPfi+i8t68Nk= -github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= -github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= -github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= -github.com/spf13/viper v1.8.1/go.mod h1:o0Pch8wJ9BVSWGQMbra6iw0oQ5oktSIBaujf1rJH9Ns= -github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/spf13/cobra v1.9.1 h1:CXSaggrXdbHK9CF+8ywj8Amf7PBRmPCOJugH954Nnlo= +github.com/spf13/cobra v1.9.1/go.mod h1:nDyEzZ8ogv936Cinf6g1RU9MRY64Ir93oCnqb9wxYW0= +github.com/spf13/pflag v1.0.6 h1:jFzHGLGAlb3ruxLB8MhbI6A8+AQX/2eW4qeyNZXNp2o= +github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= -github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= -github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= -github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= -github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= -github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= -github.com/visualfc/goembed v0.3.3 h1:pOL02L715tHKsLQVMcZz06tTzRDAHkJKJLRnCA22G9Q= -github.com/visualfc/goembed v0.3.3/go.mod h1:jCVCz/yTJGyslo6Hta+pYxWWBuq9ADCcIVZBTQ0/iVI= -github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= -go.etcd.io/etcd/api/v3 v3.5.0/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs= -go.etcd.io/etcd/client/pkg/v3 v3.5.0/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= -go.etcd.io/etcd/client/v2 v2.305.0/go.mod h1:h9puh54ZTgAKtEbut2oe9P4L/oqKCVB6xsXlzd7alYQ= -go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= -go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= -go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= -go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= -go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= -go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= -go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo= -golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= -golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= -golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= -golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= -golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= -golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= -golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= -golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= -golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= -golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= -golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= -golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= -golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= -golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= -golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= -golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= -golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= -golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= -golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.12.0 h1:rmsUpXtvNzj340zd98LZ4KntptpfRHwpFOHG188oHXc= -golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= -golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= -golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= -golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= -golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210402161424-2e8d93401602/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.3.0 h1:ftCYgMx6zT/asHUrPw8BLLscYtGznsLAnjq5RH9P66E= -golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= -golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/mod v0.14.0 h1:dGoOF9QVLYng8IHTm7BAyWqCqSheQ5pYWGhzW00YJr0= +golang.org/x/mod v0.14.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/sync v0.5.0 h1:60k92dhOjHxJkrqnwsfl8KuaHbn/5dl0lUPUklKo3qE= +golang.org/x/sync v0.5.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.10.0 h1:SqMFp9UcQJZa+pmYuAKjd9xq1f0j5rLcDIk0mj4qAsA= golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20220411215600-e5f449aeb171 h1:EH1Deb8WZJ0xc0WK//leUHXcX9aLE5SymusoTmMZye8= golang.org/x/term v0.0.0-20220411215600-e5f449aeb171/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= -golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= -golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= -golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= -golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= -golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= -golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= -golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.11.0 h1:EMCa6U9S2LtZXLAMoWiR/R8dAQFRqbAitmbJ2UKhoi8= -golang.org/x/tools v0.11.0/go.mod h1:anzJrxPjNtfgiYQYirP2CPGzGLxrH2u2QBhn6Bf3qY8= -golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f h1:GGU+dLjvlC3qDwqYgL6UgRmHXhOOgns0bZu2Ty5mm6U= -golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= -google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= -google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= -google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= -google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= -google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= -google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= -google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= -google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= -google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= -google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= -google.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU= -google.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk/94= -google.golang.org/api v0.44.0/go.mod h1:EBOGZqzyhtvMDoxwS97ctnh0zUmYY6CxqXsc1AvkYD8= -google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= -google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= -google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= -google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= -google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= -google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= -google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= -google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= -google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= -google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= -google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= -google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= -google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= -google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= -google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= -google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= -google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= -google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= -google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= -google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= -google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= -google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= -google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= -google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= -google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= -google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= -google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= -google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= -google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= -google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= -google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= -google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= -google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= -google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= -google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +golang.org/x/tools v0.16.0 h1:GO788SKMRunPIBCXiQyo2AaexLstOrVhuAL5YwsckQM= +golang.org/x/tools v0.16.0/go.mod h1:kYVVN6I1mBNoB1OX+noeBjbRk4IUEPa7JJ+TJMEooJ0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= -gopkg.in/ini.v1 v1.62.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= -gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= -gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo= -gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= -honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= -honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= -rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= -rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= -rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/internal/errorList/errorList.go b/internal/errorList/errorList.go new file mode 100644 index 000000000..531a0f4e0 --- /dev/null +++ b/internal/errorList/errorList.go @@ -0,0 +1,68 @@ +package errorList + +import ( + "errors" + "fmt" +) + +// ErrTooManyErrors is added to the ErrorList by the Trim method. +var ErrTooManyErrors = errors.New("too many errors") + +// ErrorList wraps multiple errors as a single error. +type ErrorList []error + +func (errs ErrorList) Error() string { + if len(errs) == 0 { + return "" + } + return fmt.Sprintf("%s (and %d more errors)", errs[0].Error(), len(errs[1:])) +} + +// ErrOrNil returns nil if ErrorList is empty, or the error otherwise. +func (errs ErrorList) ErrOrNil() error { + if len(errs) == 0 { + return nil + } + return errs +} + +// Append an error to the list. +// +// If err is an instance of ErrorList, the lists are concatenated together, +// otherwise err is appended at the end of the list. If err is nil, the list is +// returned unmodified. +// +// err := DoStuff() +// errList := errList.Append(err) +func (errs ErrorList) Append(err error) ErrorList { + if err == nil { + return errs + } + if err, ok := err.(ErrorList); ok { + return append(errs, err...) + } + return append(errs, err) +} + +// AppendDistinct is similar to Append, but doesn't append the error if it has +// the same message as the last error on the list. +func (errs ErrorList) AppendDistinct(err error) ErrorList { + if l := len(errs); l > 0 { + if prev := errs[l-1]; prev != nil && err.Error() == prev.Error() { + return errs // The new error is the same as the last one, skip it. + } + } + + return errs.Append(err) +} + +// Trim the error list if it has more than limit errors. If the list is trimmed, +// all extraneous errors are replaced with a single ErrTooManyErrors, making the +// returned ErrorList length of limit+1. +func (errs ErrorList) Trim(limit int) ErrorList { + if len(errs) <= limit { + return errs + } + + return append(errs[:limit], ErrTooManyErrors) +} diff --git a/internal/experiments/experiments.go b/internal/experiments/experiments.go new file mode 100644 index 000000000..85abce562 --- /dev/null +++ b/internal/experiments/experiments.go @@ -0,0 +1,122 @@ +// Package experiments managed the list of experimental feature flags supported +// by GopherJS. +// +// GOPHERJS_EXPERIMENT environment variable can be used to control which features +// are enabled. +package experiments + +import ( + "errors" + "fmt" + "os" + "reflect" + "strconv" + "strings" +) + +var ( + // ErrInvalidDest is a kind of error returned by parseFlags() when the dest + // argument does not meet the requirements. + ErrInvalidDest = errors.New("invalid flag struct") + // ErrInvalidFormat is a kind of error returned by parseFlags() when the raw + // flag string format is not valid. + ErrInvalidFormat = errors.New("invalid flag string format") +) + +// Env contains experiment flag values from the GOPHERJS_EXPERIMENT +// environment variable. +var Env Flags + +func init() { + if err := parseFlags(os.Getenv("GOPHERJS_EXPERIMENT"), &Env); err != nil { + panic(fmt.Errorf("failed to parse GOPHERJS_EXPERIMENT flags: %w", err)) + } +} + +// Flags contains flags for currently supported experiments. +type Flags struct { + Generics bool `flag:"generics"` +} + +// parseFlags parses the `raw` flags string and populates flag values in the +// `dest`. +// +// `raw` is a comma-separated experiment flag list: `,,...`. Each +// flag may be either `` or `=`. Omitting value is equivalent +// to " = true". Spaces around name and value are trimmed during +// parsing. Flag name can't be empty. If the same flag is specified multiple +// times, the last instance takes effect. +// +// `dest` must be a pointer to a struct, which fields will be populated with +// flag values. Mapping between flag names and fields is established with the +// `flag` field tag. Fields without a flag tag will be left unpopulated. +// If multiple fields are associated with the same flag result is unspecified. +// +// Flags that don't have a corresponding field are silently ignored. This is +// done to avoid fatal errors when an experiment flag is removed from code, but +// remains specified in user's environment. +// +// Currently only boolean flag values are supported, as defined by +// `strconv.ParseBool()`. +func parseFlags(raw string, dest any) error { + ptr := reflect.ValueOf(dest) + if ptr.Type().Kind() != reflect.Pointer || ptr.Type().Elem().Kind() != reflect.Struct { + return fmt.Errorf("%w: must be a pointer to a struct", ErrInvalidDest) + } + if ptr.IsNil() { + return fmt.Errorf("%w: must not be nil", ErrInvalidDest) + } + fields := fieldMap(ptr.Elem()) + + if raw == "" { + return nil + } + entries := strings.Split(raw, ",") + + for _, entry := range entries { + entry = strings.TrimSpace(entry) + var key, val string + if idx := strings.IndexRune(entry, '='); idx != -1 { + key = strings.TrimSpace(entry[0:idx]) + val = strings.TrimSpace(entry[idx+1:]) + } else { + key = entry + val = "true" + } + + if key == "" { + return fmt.Errorf("%w: empty flag name", ErrInvalidFormat) + } + + field, ok := fields[key] + if !ok { + // Unknown field value, possibly an obsolete experiment, ignore it. + continue + } + if field.Type().Kind() != reflect.Bool { + return fmt.Errorf("%w: only boolean flags are supported", ErrInvalidDest) + } + b, err := strconv.ParseBool(val) + if err != nil { + return fmt.Errorf("%w: can't parse %q as boolean for flag %q", ErrInvalidFormat, val, key) + } + field.SetBool(b) + } + + return nil +} + +// fieldMap returns a map of struct fieldMap keyed by the value of the "flag" tag. +// +// `s` must be a struct. Fields without a "flag" tag are ignored. If multiple +// fieldMap have the same flag, the last field wins. +func fieldMap(s reflect.Value) map[string]reflect.Value { + typ := s.Type() + result := map[string]reflect.Value{} + for i := 0; i < typ.NumField(); i++ { + if val, ok := typ.Field(i).Tag.Lookup("flag"); ok { + result[val] = s.Field(i) + } + } + return result +} diff --git a/internal/experiments/experiments_test.go b/internal/experiments/experiments_test.go new file mode 100644 index 000000000..e1c3e6b38 --- /dev/null +++ b/internal/experiments/experiments_test.go @@ -0,0 +1,132 @@ +package experiments + +import ( + "errors" + "testing" + + "github.com/google/go-cmp/cmp" +) + +func TestParseFlags(t *testing.T) { + type testFlags struct { + Exp1 bool `flag:"exp1"` + Exp2 bool `flag:"exp2"` + Untagged bool + } + + tests := []struct { + descr string + raw string + want testFlags + wantErr error + }{{ + descr: "default values", + raw: "", + want: testFlags{ + Exp1: false, + Exp2: false, + }, + }, { + descr: "true flag", + raw: "exp1=true", + want: testFlags{ + Exp1: true, + Exp2: false, + }, + }, { + descr: "false flag", + raw: "exp1=false", + want: testFlags{ + Exp1: false, + Exp2: false, + }, + }, { + descr: "implicit value", + raw: "exp1", + want: testFlags{ + Exp1: true, + Exp2: false, + }, + }, { + descr: "multiple flags", + raw: "exp1=true,exp2=true", + want: testFlags{ + Exp1: true, + Exp2: true, + }, + }, { + descr: "repeated flag", + raw: "exp1=false,exp1=true", + want: testFlags{ + Exp1: true, + Exp2: false, + }, + }, { + descr: "spaces", + raw: " exp1 = true, exp2=true ", + want: testFlags{ + Exp1: true, + Exp2: true, + }, + }, { + descr: "unknown flags", + raw: "Exp1=true,Untagged,Foo=true", + want: testFlags{ + Exp1: false, + Exp2: false, + Untagged: false, + }, + }, { + descr: "empty flag name", + raw: "=true", + wantErr: ErrInvalidFormat, + }, { + descr: "invalid flag value", + raw: "exp1=foo", + wantErr: ErrInvalidFormat, + }} + + for _, test := range tests { + t.Run(test.descr, func(t *testing.T) { + got := testFlags{} + err := parseFlags(test.raw, &got) + if test.wantErr != nil { + if !errors.Is(err, test.wantErr) { + t.Errorf("Got: parseFlags(%q) returned error: %v. Want: %v.", test.raw, err, test.wantErr) + } + } else { + if err != nil { + t.Fatalf("Got: parseFlags(%q) returned error: %v. Want: no error.", test.raw, err) + } + if diff := cmp.Diff(test.want, got); diff != "" { + t.Fatalf("parseFlags(%q) returned diff (-want,+got):\n%s", test.raw, diff) + } + } + }) + } + + t.Run("invalid dest type", func(t *testing.T) { + var dest string + err := parseFlags("", &dest) + if !errors.Is(err, ErrInvalidDest) { + t.Fatalf("Got: parseFlags() returned error: %v. Want: %v.", err, ErrInvalidDest) + } + }) + + t.Run("nil dest", func(t *testing.T) { + err := parseFlags("", (*struct{})(nil)) + if !errors.Is(err, ErrInvalidDest) { + t.Fatalf("Got: parseFlags() returned error: %v. Want: %v.", err, ErrInvalidDest) + } + }) + + t.Run("unsupported flag type", func(t *testing.T) { + var dest struct { + Foo string `flag:"foo"` + } + err := parseFlags("foo", &dest) + if !errors.Is(err, ErrInvalidDest) { + t.Fatalf("Got: parseFlags() returned error: %v. Want: %v.", err, ErrInvalidDest) + } + }) +} diff --git a/internal/govendor/subst/export.go b/internal/govendor/subst/export.go new file mode 100644 index 000000000..00a77ca49 --- /dev/null +++ b/internal/govendor/subst/export.go @@ -0,0 +1,50 @@ +// Package subst is an excerpt from x/tools/go/ssa responsible for performing +// type substitution in types defined in terms of type parameters with provided +// type arguments. +package subst + +import ( + "fmt" + "go/types" +) + +// To simplify future updates of the borrowed code, we minimize modifications +// to it as much as possible. This file implements an exported interface to the +// original code for us to use. + +// Subster performs type parameter substitution. +type Subster struct { + impl *subster +} + +// New creates a new Subster with a given list of type parameters and matching args. +func New(tc *types.Context, tParams *types.TypeParamList, tArgs []types.Type) *Subster { + if tParams.Len() != len(tArgs) { + panic(fmt.Errorf("number of type parameters and arguments must match: %d => %d", tParams.Len(), len(tArgs))) + } + + if tParams.Len() == 0 && len(tArgs) == 0 { + return nil + } + + subst := makeSubster(tc, nil, tParams, tArgs, false) + return &Subster{impl: subst} +} + +// Type returns a version of typ with all references to type parameters +// replaced with the corresponding type arguments. +func (s *Subster) Type(typ types.Type) types.Type { + if s == nil { + return typ + } + return s.impl.typ(typ) +} + +// Types returns a version of ts with all references to type parameters +// replaced with the corresponding type arguments. +func (s *Subster) Types(ts []types.Type) []types.Type { + if s == nil { + return ts + } + return s.impl.types(ts) +} diff --git a/internal/govendor/subst/subst.go b/internal/govendor/subst/subst.go new file mode 100644 index 000000000..825e3c7f1 --- /dev/null +++ b/internal/govendor/subst/subst.go @@ -0,0 +1,480 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Copy of https://cs.opensource.google/go/x/tools/+/refs/tags/v0.17.0:go/ssa/subst.go +// Any changes to this copy are labelled with GOPHERJS. +package subst + +import ( + "go/types" +) + +// Type substituter for a fixed set of replacement types. +// +// A nil *subster is an valid, empty substitution map. It always acts as +// the identity function. This allows for treating parameterized and +// non-parameterized functions identically while compiling to ssa. +// +// Not concurrency-safe. +type subster struct { + replacements map[*types.TypeParam]types.Type // values should contain no type params + cache map[types.Type]types.Type // cache of subst results + ctxt *types.Context // cache for instantiation + scope *types.Scope // *types.Named declared within this scope can be substituted (optional) + debug bool // perform extra debugging checks + // TODO(taking): consider adding Pos + // TODO(zpavlinovic): replacements can contain type params + // when generating instances inside of a generic function body. +} + +// Returns a subster that replaces tparams[i] with targs[i]. Uses ctxt as a cache. +// targs should not contain any types in tparams. +// scope is the (optional) lexical block of the generic function for which we are substituting. +func makeSubster(ctxt *types.Context, scope *types.Scope, tparams *types.TypeParamList, targs []types.Type, debug bool) *subster { + assert(tparams.Len() == len(targs), "makeSubster argument count must match") + + subst := &subster{ + replacements: make(map[*types.TypeParam]types.Type, tparams.Len()), + cache: make(map[types.Type]types.Type), + ctxt: ctxt, + scope: scope, + debug: debug, + } + for i := 0; i < tparams.Len(); i++ { + subst.replacements[tparams.At(i)] = targs[i] + } + if subst.debug { + subst.wellFormed() + } + return subst +} + +// wellFormed asserts that subst was properly initialized. +func (subst *subster) wellFormed() { + if subst == nil { + return + } + // Check that all of the type params do not appear in the arguments. + s := make(map[types.Type]bool, len(subst.replacements)) + for tparam := range subst.replacements { + s[tparam] = true + } + for _, r := range subst.replacements { + if reaches(r, s) { + panic(subst) + } + } +} + +// typ returns the type of t with the type parameter tparams[i] substituted +// for the type targs[i] where subst was created using tparams and targs. +func (subst *subster) typ(t types.Type) (res types.Type) { + if subst == nil { + return t // A nil subst is type preserving. + } + if r, ok := subst.cache[t]; ok { + return r + } + defer func() { + subst.cache[t] = res + }() + + // fall through if result r will be identical to t, types.Identical(r, t). + switch t := t.(type) { + case *types.TypeParam: + // GOPHERJS: Replaced an assert that was causing a panic for nested types with code from + // https://cs.opensource.google/go/x/tools/+/refs/tags/v0.33.0:go/ssa/subst.go;l=92 + if r := subst.replacements[t]; r != nil { + return r + } + return t + + case *types.Basic: + return t + + case *types.Array: + if r := subst.typ(t.Elem()); r != t.Elem() { + return types.NewArray(r, t.Len()) + } + return t + + case *types.Slice: + if r := subst.typ(t.Elem()); r != t.Elem() { + return types.NewSlice(r) + } + return t + + case *types.Pointer: + if r := subst.typ(t.Elem()); r != t.Elem() { + return types.NewPointer(r) + } + return t + + case *types.Tuple: + return subst.tuple(t) + + case *types.Struct: + return subst.struct_(t) + + case *types.Map: + key := subst.typ(t.Key()) + elem := subst.typ(t.Elem()) + if key != t.Key() || elem != t.Elem() { + return types.NewMap(key, elem) + } + return t + + case *types.Chan: + if elem := subst.typ(t.Elem()); elem != t.Elem() { + return types.NewChan(t.Dir(), elem) + } + return t + + case *types.Signature: + return subst.signature(t) + + case *types.Union: + return subst.union(t) + + case *types.Interface: + return subst.interface_(t) + + case *types.Named: + return subst.named(t) + + default: + panic("unreachable") + } +} + +// types returns the result of {subst.typ(ts[i])}. +func (subst *subster) types(ts []types.Type) []types.Type { + res := make([]types.Type, len(ts)) + for i := range ts { + res[i] = subst.typ(ts[i]) + } + return res +} + +func (subst *subster) tuple(t *types.Tuple) *types.Tuple { + if t != nil { + if vars := subst.varlist(t); vars != nil { + return types.NewTuple(vars...) + } + } + return t +} + +type varlist interface { + At(i int) *types.Var + Len() int +} + +// fieldlist is an adapter for structs for the varlist interface. +type fieldlist struct { + str *types.Struct +} + +func (fl fieldlist) At(i int) *types.Var { return fl.str.Field(i) } +func (fl fieldlist) Len() int { return fl.str.NumFields() } + +func (subst *subster) struct_(t *types.Struct) *types.Struct { + if t != nil { + if fields := subst.varlist(fieldlist{t}); fields != nil { + tags := make([]string, t.NumFields()) + for i, n := 0, t.NumFields(); i < n; i++ { + tags[i] = t.Tag(i) + } + return types.NewStruct(fields, tags) + } + } + return t +} + +// varlist reutrns subst(in[i]) or return nils if subst(v[i]) == v[i] for all i. +func (subst *subster) varlist(in varlist) []*types.Var { + var out []*types.Var // nil => no updates + for i, n := 0, in.Len(); i < n; i++ { + v := in.At(i) + w := subst.var_(v) + if v != w && out == nil { + out = make([]*types.Var, n) + for j := 0; j < i; j++ { + out[j] = in.At(j) + } + } + if out != nil { + out[i] = w + } + } + return out +} + +func (subst *subster) var_(v *types.Var) *types.Var { + if v != nil { + if typ := subst.typ(v.Type()); typ != v.Type() { + if v.IsField() { + return types.NewField(v.Pos(), v.Pkg(), v.Name(), typ, v.Embedded()) + } + return types.NewVar(v.Pos(), v.Pkg(), v.Name(), typ) + } + } + return v +} + +func (subst *subster) union(u *types.Union) *types.Union { + var out []*types.Term // nil => no updates + + for i, n := 0, u.Len(); i < n; i++ { + t := u.Term(i) + r := subst.typ(t.Type()) + if r != t.Type() && out == nil { + out = make([]*types.Term, n) + for j := 0; j < i; j++ { + out[j] = u.Term(j) + } + } + if out != nil { + out[i] = types.NewTerm(t.Tilde(), r) + } + } + + if out != nil { + return types.NewUnion(out) + } + return u +} + +func (subst *subster) interface_(iface *types.Interface) *types.Interface { + if iface == nil { + return nil + } + + // methods for the interface. Initially nil if there is no known change needed. + // Signatures for the method where recv is nil. NewInterfaceType fills in the receivers. + var methods []*types.Func + initMethods := func(n int) { // copy first n explicit methods + methods = make([]*types.Func, iface.NumExplicitMethods()) + for i := 0; i < n; i++ { + f := iface.ExplicitMethod(i) + norecv := changeRecv(f.Type().(*types.Signature), nil) + methods[i] = types.NewFunc(f.Pos(), f.Pkg(), f.Name(), norecv) + } + } + for i := 0; i < iface.NumExplicitMethods(); i++ { + f := iface.ExplicitMethod(i) + // On interfaces, we need to cycle break on anonymous interface types + // being in a cycle with their signatures being in cycles with their receivers + // that do not go through a Named. + norecv := changeRecv(f.Type().(*types.Signature), nil) + sig := subst.typ(norecv) + if sig != norecv && methods == nil { + initMethods(i) + } + if methods != nil { + methods[i] = types.NewFunc(f.Pos(), f.Pkg(), f.Name(), sig.(*types.Signature)) + } + } + + var embeds []types.Type + initEmbeds := func(n int) { // copy first n embedded types + embeds = make([]types.Type, iface.NumEmbeddeds()) + for i := 0; i < n; i++ { + embeds[i] = iface.EmbeddedType(i) + } + } + for i := 0; i < iface.NumEmbeddeds(); i++ { + e := iface.EmbeddedType(i) + r := subst.typ(e) + if e != r && embeds == nil { + initEmbeds(i) + } + if embeds != nil { + embeds[i] = r + } + } + + if methods == nil && embeds == nil { + return iface + } + if methods == nil { + initMethods(iface.NumExplicitMethods()) + } + if embeds == nil { + initEmbeds(iface.NumEmbeddeds()) + } + return types.NewInterfaceType(methods, embeds).Complete() +} + +func (subst *subster) named(t *types.Named) types.Type { + // A named type may be: + // (1) ordinary named type (non-local scope, no type parameters, no type arguments), + // (2) locally scoped type, + // (3) generic (type parameters but no type arguments), or + // (4) instantiated (type parameters and type arguments). + tparams := t.TypeParams() + if tparams.Len() == 0 { + if subst.scope != nil && !subst.scope.Contains(t.Obj().Pos()) { + // Outside the current function scope? + return t // case (1) ordinary + } + + // case (2) locally scoped type. + // Create a new named type to represent this instantiation. + // We assume that local types of distinct instantiations of a + // generic function are distinct, even if they don't refer to + // type parameters, but the spec is unclear; see golang/go#58573. + // + // Subtle: We short circuit substitution and use a newly created type in + // subst, i.e. cache[t]=n, to pre-emptively replace t with n in recursive + // types during traversal. This both breaks infinite cycles and allows for + // constructing types with the replacement applied in subst.typ(under). + // + // Example: + // func foo[T any]() { + // type linkedlist struct { + // next *linkedlist + // val T + // } + // } + // + // When the field `next *linkedlist` is visited during subst.typ(under), + // we want the substituted type for the field `next` to be `*n`. + n := types.NewNamed(t.Obj(), nil, nil) + subst.cache[t] = n + subst.cache[n] = n + n.SetUnderlying(subst.typ(t.Underlying())) + return n + } + targs := t.TypeArgs() + + // insts are arguments to instantiate using. + insts := make([]types.Type, tparams.Len()) + + // case (3) generic ==> targs.Len() == 0 + // Instantiating a generic with no type arguments should be unreachable. + // Please report a bug if you encounter this. + assert(targs.Len() != 0, "substition into a generic Named type is currently unsupported") + + // case (4) instantiated. + // Substitute into the type arguments and instantiate the replacements/ + // Example: + // type N[A any] func() A + // func Foo[T](g N[T]) {} + // To instantiate Foo[string], one goes through {T->string}. To get the type of g + // one subsitutes T with string in {N with typeargs == {T} and typeparams == {A} } + // to get {N with TypeArgs == {string} and typeparams == {A} }. + assert(targs.Len() == tparams.Len(), "typeargs.Len() must match typeparams.Len() if present") + for i, n := 0, targs.Len(); i < n; i++ { + inst := subst.typ(targs.At(i)) // TODO(generic): Check with rfindley for mutual recursion + insts[i] = inst + } + r, err := types.Instantiate(subst.ctxt, t.Origin(), insts, false) + assert(err == nil, "failed to Instantiate Named type") + return r +} + +func (subst *subster) signature(t *types.Signature) types.Type { + tparams := t.TypeParams() + + // We are choosing not to support tparams.Len() > 0 until a need has been observed in practice. + // + // There are some known usages for types.Types coming from types.{Eval,CheckExpr}. + // To support tparams.Len() > 0, we just need to do the following [psuedocode]: + // targs := {subst.replacements[tparams[i]]]}; Instantiate(ctxt, t, targs, false) + + assert(tparams.Len() == 0, "Substituting types.Signatures with generic functions are currently unsupported.") + + // Either: + // (1)non-generic function. + // no type params to substitute + // (2)generic method and recv needs to be substituted. + + // Receivers can be either: + // named + // pointer to named + // interface + // nil + // interface is the problematic case. We need to cycle break there! + recv := subst.var_(t.Recv()) + params := subst.tuple(t.Params()) + results := subst.tuple(t.Results()) + if recv != t.Recv() || params != t.Params() || results != t.Results() { + return types.NewSignatureType(recv, nil, nil, params, results, t.Variadic()) + } + return t +} + +// reaches returns true if a type t reaches any type t' s.t. c[t'] == true. +// It updates c to cache results. +// +// reaches is currently only part of the wellFormed debug logic, and +// in practice c is initially only type parameters. It is not currently +// relied on in production. +func reaches(t types.Type, c map[types.Type]bool) (res bool) { + if c, ok := c[t]; ok { + return c + } + + // c is populated with temporary false entries as types are visited. + // This avoids repeat visits and break cycles. + c[t] = false + defer func() { + c[t] = res + }() + + switch t := t.(type) { + case *types.TypeParam, *types.Basic: + return false + case *types.Array: + return reaches(t.Elem(), c) + case *types.Slice: + return reaches(t.Elem(), c) + case *types.Pointer: + return reaches(t.Elem(), c) + case *types.Tuple: + for i := 0; i < t.Len(); i++ { + if reaches(t.At(i).Type(), c) { + return true + } + } + case *types.Struct: + for i := 0; i < t.NumFields(); i++ { + if reaches(t.Field(i).Type(), c) { + return true + } + } + case *types.Map: + return reaches(t.Key(), c) || reaches(t.Elem(), c) + case *types.Chan: + return reaches(t.Elem(), c) + case *types.Signature: + if t.Recv() != nil && reaches(t.Recv().Type(), c) { + return true + } + return reaches(t.Params(), c) || reaches(t.Results(), c) + case *types.Union: + for i := 0; i < t.Len(); i++ { + if reaches(t.Term(i).Type(), c) { + return true + } + } + case *types.Interface: + for i := 0; i < t.NumEmbeddeds(); i++ { + if reaches(t.Embedded(i), c) { + return true + } + } + for i := 0; i < t.NumExplicitMethods(); i++ { + if reaches(t.ExplicitMethod(i).Type(), c) { + return true + } + } + case *types.Named: + return reaches(t.Underlying(), c) + default: + panic("unreachable") + } + return false +} diff --git a/internal/govendor/subst/subst_test.go b/internal/govendor/subst/subst_test.go new file mode 100644 index 000000000..832f0ebd4 --- /dev/null +++ b/internal/govendor/subst/subst_test.go @@ -0,0 +1,104 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Copy of https://cs.opensource.google/go/x/tools/+/refs/tags/v0.17.0:go/ssa/subst_test.go +package subst + +import ( + "go/ast" + "go/parser" + "go/token" + "go/types" + "testing" +) + +func TestSubst(t *testing.T) { + const source = ` +package P + +type t0 int +func (t0) f() +type t1 interface{ f() } +type t2 interface{ g() } +type t3 interface{ ~int } + +func Fn0[T t1](x T) T { + x.f() + return x +} + +type A[T any] [4]T +type B[T any] []T +type C[T, S any] []struct{s S; t T} +type D[T, S any] *struct{s S; t *T} +type E[T, S any] interface{ F() (T, S) } +type F[K comparable, V any] map[K]V +type G[T any] chan *T +type H[T any] func() T +type I[T any] struct{x, y, z int; t T} +type J[T any] interface{ t1 } +type K[T any] interface{ t1; F() T } +type L[T any] interface{ F() T; J[T] } + +var _ L[int] = Fn0[L[int]](nil) +` + + fset := token.NewFileSet() + f, err := parser.ParseFile(fset, "hello.go", source, 0) + if err != nil { + t.Fatal(err) + } + + var conf types.Config + pkg, err := conf.Check("P", fset, []*ast.File{f}, nil) + if err != nil { + t.Fatal(err) + } + + for _, test := range []struct { + expr string // type expression of Named parameterized type + args []string // type expressions of args for named + want string // expected underlying value after substitution + }{ + {"A", []string{"string"}, "[4]string"}, + {"A", []string{"int"}, "[4]int"}, + {"B", []string{"int"}, "[]int"}, + {"B", []string{"int8"}, "[]int8"}, + {"C", []string{"int8", "string"}, "[]struct{s string; t int8}"}, + {"C", []string{"string", "int8"}, "[]struct{s int8; t string}"}, + {"D", []string{"int16", "string"}, "*struct{s string; t *int16}"}, + {"E", []string{"int32", "string"}, "interface{F() (int32, string)}"}, + {"F", []string{"int64", "string"}, "map[int64]string"}, + {"G", []string{"uint64"}, "chan *uint64"}, + {"H", []string{"uintptr"}, "func() uintptr"}, + {"I", []string{"t0"}, "struct{x int; y int; z int; t P.t0}"}, + {"J", []string{"t0"}, "interface{P.t1}"}, + {"K", []string{"t0"}, "interface{F() P.t0; P.t1}"}, + {"L", []string{"t0"}, "interface{F() P.t0; P.J[P.t0]}"}, + {"L", []string{"L[t0]"}, "interface{F() P.L[P.t0]; P.J[P.L[P.t0]]}"}, + } { + // Eval() expr for its type. + tv, err := types.Eval(fset, pkg, 0, test.expr) + if err != nil { + t.Fatalf("Eval(%s) failed: %v", test.expr, err) + } + // Eval() test.args[i] to get the i'th type arg. + var targs []types.Type + for _, astr := range test.args { + tv, err := types.Eval(fset, pkg, 0, astr) + if err != nil { + t.Fatalf("Eval(%s) failed: %v", astr, err) + } + targs = append(targs, tv.Type) + } + + T := tv.Type.(*types.Named) + + subst := makeSubster(types.NewContext(), nil, T.TypeParams(), targs, true) + sub := subst.typ(T.Underlying()) + if got := sub.String(); got != test.want { + t.Errorf("subst{%v->%v}.typ(%s) = %v, want %v", test.expr, test.args, T.Underlying(), got, test.want) + } + } +} diff --git a/internal/govendor/subst/util.go b/internal/govendor/subst/util.go new file mode 100644 index 000000000..5b55c0310 --- /dev/null +++ b/internal/govendor/subst/util.go @@ -0,0 +1,21 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package subst + +import "go/types" + +// assert panics with the mesage msg if p is false. +// Avoid combining with expensive string formatting. +// From https://cs.opensource.google/go/x/tools/+/refs/tags/v0.17.0:go/ssa/util.go;l=27 +func assert(p bool, msg string) { + if !p { + panic(msg) + } +} + +// From https://cs.opensource.google/go/x/tools/+/refs/tags/v0.33.0:go/ssa/wrappers.go;l=262 +func changeRecv(s *types.Signature, recv *types.Var) *types.Signature { + return types.NewSignatureType(recv, nil, nil, s.Params(), s.Results(), s.Variadic()) +} diff --git a/internal/srctesting/srctesting.go b/internal/srctesting/srctesting.go index 4e374845e..e4242991c 100644 --- a/internal/srctesting/srctesting.go +++ b/internal/srctesting/srctesting.go @@ -4,48 +4,89 @@ package srctesting import ( "bytes" + "fmt" "go/ast" "go/format" "go/parser" "go/token" "go/types" + "path/filepath" + "strings" "testing" + + "golang.org/x/tools/go/packages" ) -// Parse source from the string and return complete AST. -// -// Assumes source file name `test.go`. Fails the test on parsing error. -func Parse(t *testing.T, fset *token.FileSet, src string) *ast.File { - t.Helper() - f, err := parser.ParseFile(fset, "test.go", src, parser.ParseComments) - if err != nil { - t.Fatalf("Failed to parse test source: %s", err) - } - return f +// Fixture provides utilities for parsing and type checking Go code in tests. +type Fixture struct { + T *testing.T + FileSet *token.FileSet + Info *types.Info + Packages map[string]*types.Package } -// Check type correctness of the provided AST. -// -// Assumes "test" package import path. Fails the test if type checking fails. -// Provided AST is expected not to have any imports. -func Check(t *testing.T, fset *token.FileSet, files ...*ast.File) (*types.Info, *types.Package) { - t.Helper() - typesInfo := &types.Info{ +func newInfo() *types.Info { + return &types.Info{ Types: make(map[ast.Expr]types.TypeAndValue), Defs: make(map[*ast.Ident]types.Object), Uses: make(map[*ast.Ident]types.Object), Implicits: make(map[ast.Node]types.Object), Selections: make(map[*ast.SelectorExpr]*types.Selection), Scopes: make(map[ast.Node]*types.Scope), + Instances: make(map[*ast.Ident]types.Instance), + } +} + +// New creates a fresh Fixture. +func New(t *testing.T) *Fixture { + return &Fixture{ + T: t, + FileSet: token.NewFileSet(), + Info: newInfo(), + Packages: map[string]*types.Package{}, } +} + +// Parse source from the string and return complete AST. +func (f *Fixture) Parse(name, src string) *ast.File { + f.T.Helper() + file, err := parser.ParseFile(f.FileSet, name, src, parser.ParseComments) + if err != nil { + f.T.Fatalf("Failed to parse test source: %s", err) + } + return file +} + +// Check type correctness of the provided AST. +// +// Fails the test if type checking fails. Provided AST is expected not to have +// any imports. If f.Info is nil, it will create a new types.Info instance +// to store type checking results and return it, otherwise f.Info is used. +func (f *Fixture) Check(importPath string, files ...*ast.File) (*types.Info, *types.Package) { + f.T.Helper() config := &types.Config{ - Sizes: &types.StdSizes{WordSize: 4, MaxAlign: 8}, + Sizes: &types.StdSizes{WordSize: 4, MaxAlign: 8}, + Importer: f, + } + info := f.Info + if info == nil { + info = newInfo() } - typesPkg, err := config.Check("test", fset, files, typesInfo) + pkg, err := config.Check(importPath, f.FileSet, files, info) if err != nil { - t.Fatalf("Filed to type check test source: %s", err) + f.T.Fatalf("Failed to type check test source: %s", err) } - return typesInfo, typesPkg + f.Packages[importPath] = pkg + return info, pkg +} + +// Import implements types.Importer. +func (f *Fixture) Import(path string) (*types.Package, error) { + pkg, ok := f.Packages[path] + if !ok { + return nil, fmt.Errorf("missing type info for package %q", path) + } + return pkg, nil } // ParseFuncDecl parses source with a single function defined and returns the @@ -68,8 +109,7 @@ func ParseFuncDecl(t *testing.T, src string) *ast.FuncDecl { // Fails the test if there isn't exactly one declaration in the source. func ParseDecl(t *testing.T, src string) ast.Decl { t.Helper() - fset := token.NewFileSet() - file := Parse(t, fset, src) + file := New(t).Parse("test.go", src) if l := len(file.Decls); l != 1 { t.Fatalf(`Got %d decls in the sources, expected exactly 1`, l) } @@ -107,3 +147,138 @@ func Format(t *testing.T, fset *token.FileSet, node any) string { } return buf.String() } + +// LookupObj returns a top-level object with the given name. +// +// Methods can be referred to as RecvTypeName.MethodName. +func LookupObj(pkg *types.Package, name string) types.Object { + path := strings.Split(name, ".") + scope := pkg.Scope() + var obj types.Object + + for len(path) > 0 { + obj = scope.Lookup(path[0]) + if obj == nil { + panic(fmt.Sprintf("failed to find %q in %q", path[0], name)) + } + path = path[1:] + + if fun, ok := obj.(*types.Func); ok { + scope = fun.Scope() + continue + } + + // If we are here, the latest object is a named type. If there are more path + // elements left, they must refer to field or method. + if len(path) > 0 { + obj, _, _ = types.LookupFieldOrMethod(obj.Type(), true, obj.Pkg(), path[0]) + path = path[1:] + if fun, ok := obj.(*types.Func); ok { + scope = fun.Scope() + } + } + } + return obj +} + +type Source struct { + Name string + Contents []byte +} + +// ParseSources parses the given source files and returns the root package +// that contains the given source files. +// +// The source file should all be from the same package as the files for the +// root package. At least one source file must be given. +// The root package's path will be `command-line-arguments`. +// +// The auxiliary files can be for different packages but should have paths +// added to the source name so that they can be grouped together by package. +// To import an auxiliary package, the path should be prepended by +// `github.com/gopherjs/gopherjs/compiler`. +func ParseSources(t *testing.T, sourceFiles []Source, auxFiles []Source) *packages.Package { + t.Helper() + const mode = packages.NeedName | + packages.NeedFiles | + packages.NeedImports | + packages.NeedDeps | + packages.NeedTypes | + packages.NeedSyntax + + dir, err := filepath.Abs(`./`) + if err != nil { + t.Fatal(`error getting working directory:`, err) + } + + patterns := make([]string, len(sourceFiles)) + overlay := make(map[string][]byte, len(sourceFiles)) + for i, src := range sourceFiles { + filename := src.Name + patterns[i] = filename + absName := filepath.Join(dir, filename) + overlay[absName] = []byte(src.Contents) + } + for _, src := range auxFiles { + absName := filepath.Join(dir, src.Name) + overlay[absName] = []byte(src.Contents) + } + + config := &packages.Config{ + Mode: mode, + Overlay: overlay, + Dir: dir, + } + + pkgs, err := packages.Load(config, patterns...) + if err != nil { + t.Fatal(`error loading packages:`, err) + } + + hasErrors := false + packages.Visit(pkgs, nil, func(pkg *packages.Package) { + for _, err := range pkg.Errors { + hasErrors = true + t.Error(err) + } + }) + if hasErrors { + t.FailNow() + } + + if len(pkgs) != 1 { + t.Fatal(`expected one and only one root package but got`, len(pkgs)) + } + return pkgs[0] +} + +// GetNodeAtLineNo returns the first node of type N that starts on the given +// line in the given file. This helps lookup nodes that aren't named but +// are needed by a specific test. +func GetNodeAtLineNo[N ast.Node](file *ast.File, fSet *token.FileSet, lineNo int) N { + var node N + keepLooking := true + ast.Inspect(file, func(n ast.Node) bool { + if n == nil || !keepLooking { + return false + } + nodeLine := fSet.Position(n.Pos()).Line + switch { + case nodeLine < lineNo: + // We haven't reached the line yet, so check if we can skip over + // this whole node or if we should look inside it. + return fSet.Position(n.End()).Line >= lineNo + case nodeLine > lineNo: + // We went past it without finding it, so stop looking. + keepLooking = false + return false + default: // nodeLine == lineNo + if n, ok := n.(N); ok { + node = n + keepLooking = false + } + return keepLooking + } + }) + return node +} diff --git a/internal/srctesting/srctesting_test.go b/internal/srctesting/srctesting_test.go new file mode 100644 index 000000000..44fa51ead --- /dev/null +++ b/internal/srctesting/srctesting_test.go @@ -0,0 +1,28 @@ +package srctesting + +import "testing" + +func TestFixture(t *testing.T) { + f := New(t) + + const src1 = `package foo + type X int + ` + _, foo := f.Check("pkg/foo", f.Parse("foo.go", src1)) + + if !foo.Complete() { + t.Fatalf("Got: incomplete package pkg/foo: %s. Want: complete package.", foo) + } + + const src2 = `package bar + import "pkg/foo" + func Fun() foo.X { return 0 } + ` + + // Should type check successfully with dependency on pkg/foo. + _, bar := f.Check("pkg/bar", f.Parse("bar.go", src2)) + + if !bar.Complete() { + t.Fatalf("Got: incomplete package pkg/bar: %s. Want: complete package.", foo) + } +} diff --git a/internal/testingx/must.go b/internal/testingx/must.go new file mode 100644 index 000000000..62d27dce8 --- /dev/null +++ b/internal/testingx/must.go @@ -0,0 +1,24 @@ +// Package testingx provides helpers for use with the testing package. +package testingx + +import "testing" + +// Must provides a concise way to handle returned error in cases that +// "should never happen"©. +// +// This function can be used in test case setup that can be presumed to be +// correct, but technically may return an error. This function MUST NOT be used +// to check for test case conditions themselves because it generates a generic, +// nondescript test error message. +// +// func startServer(addr string) (*server, err) +// mustServer := testingx.Must[*server](t) +// mustServer(startServer(":8080")) +func Must[T any](t *testing.T) func(v T, err error) T { + return func(v T, err error) T { + if err != nil { + t.Fatalf("Got: unexpected error: %s. Want: no error.", err) + } + return v + } +} diff --git a/internal/testmain/testmain.go b/internal/testmain/testmain.go index f1b3257d5..3de87d382 100644 --- a/internal/testmain/testmain.go +++ b/internal/testmain/testmain.go @@ -5,7 +5,7 @@ import ( "errors" "fmt" "go/ast" - gobuild "go/build" + "go/build" "go/doc" "go/parser" "go/token" @@ -16,7 +16,6 @@ import ( "unicode" "unicode/utf8" - "github.com/gopherjs/gopherjs/build" "golang.org/x/tools/go/buildutil" ) @@ -66,7 +65,8 @@ func (ef ExampleFunc) Executable() bool { // TestMain is a helper type responsible for generation of the test main package. type TestMain struct { - Package *build.PackageData + Package *build.Package + Context *build.Context Tests []TestFunc Benchmarks []TestFunc Fuzz []TestFunc @@ -88,7 +88,7 @@ func (tm *TestMain) Scan(fset *token.FileSet) error { func (tm *TestMain) scanPkg(fset *token.FileSet, files []string, loc FuncLocation) error { for _, name := range files { srcPath := path.Join(tm.Package.Dir, name) - f, err := buildutil.OpenFile(tm.Package.InternalBuildContext(), srcPath) + f, err := buildutil.OpenFile(tm.Context, srcPath) if err != nil { return fmt.Errorf("failed to open source file %q: %w", srcPath, err) } @@ -158,7 +158,7 @@ func (tm *TestMain) scanFile(f *ast.File, loc FuncLocation) error { } // Synthesize main package for the tests. -func (tm *TestMain) Synthesize(fset *token.FileSet) (*build.PackageData, *ast.File, error) { +func (tm *TestMain) Synthesize(fset *token.FileSet) (*build.Package, *ast.File, error) { buf := &bytes.Buffer{} if err := testmainTmpl.Execute(buf, tm); err != nil { return nil, nil, fmt.Errorf("failed to generate testmain source for package %s: %w", tm.Package.ImportPath, err) @@ -167,12 +167,10 @@ func (tm *TestMain) Synthesize(fset *token.FileSet) (*build.PackageData, *ast.Fi if err != nil { return nil, nil, fmt.Errorf("failed to parse testmain source for package %s: %w", tm.Package.ImportPath, err) } - pkg := &build.PackageData{ - Package: &gobuild.Package{ - ImportPath: tm.Package.ImportPath + ".testmain", - Name: "main", - GoFiles: []string{"_testmain.go"}, - }, + pkg := &build.Package{ + ImportPath: tm.Package.ImportPath + ".testmain", + Name: "main", + GoFiles: []string{"_testmain.go"}, } return pkg, src, nil } diff --git a/internal/testmain/testmain_test.go b/internal/testmain/testmain_test.go index 01c92cc76..8e0b268d2 100644 --- a/internal/testmain/testmain_test.go +++ b/internal/testmain/testmain_test.go @@ -7,6 +7,7 @@ import ( "github.com/google/go-cmp/cmp" "github.com/google/go-cmp/cmp/cmpopts" + "github.com/gopherjs/gopherjs/build" "github.com/gopherjs/gopherjs/internal/srctesting" . "github.com/gopherjs/gopherjs/internal/testmain" @@ -21,7 +22,10 @@ func TestScan(t *testing.T) { fset := token.NewFileSet() - got := TestMain{Package: pkg} + got := TestMain{ + Package: pkg.Package, + Context: pkg.InternalBuildContext(), + } if err := got.Scan(fset); err != nil { t.Fatalf("Got: tm.Scan() returned error: %s. Want: no error.", err) } @@ -47,6 +51,7 @@ func TestScan(t *testing.T) { } opts := cmp.Options{ cmpopts.IgnoreFields(TestMain{}, "Package"), // Inputs. + cmpopts.IgnoreFields(TestMain{}, "Context"), } if diff := cmp.Diff(want, got, opts...); diff != "" { t.Errorf("List of test function is different from expected (-want,+got):\n%s", diff) @@ -54,9 +59,7 @@ func TestScan(t *testing.T) { } func TestSynthesize(t *testing.T) { - pkg := &build.PackageData{ - Package: &gobuild.Package{ImportPath: "foo/bar"}, - } + pkg := &gobuild.Package{ImportPath: "foo/bar"} tests := []struct { descr string diff --git a/node-syscall/package-lock.json b/node-syscall/package-lock.json index a92b06df6..df06938c6 100644 --- a/node-syscall/package-lock.json +++ b/node-syscall/package-lock.json @@ -1,92 +1,136 @@ { "name": "syscall", + "lockfileVersion": 3, "requires": true, - "lockfileVersion": 1, - "dependencies": { - "@npmcli/move-file": { + "packages": { + "": { + "name": "syscall", + "hasInstallScript": true, + "license": "BSD-2-Clause", + "dependencies": { + "node-gyp": "^8.1.0" + } + }, + "node_modules/@gar/promisify": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@gar/promisify/-/promisify-1.1.3.tgz", + "integrity": "sha512-k2Ty1JcVojjJFwrg/ThKi2ujJ7XNLYaFGNB/bWT9wGR+oSMJHMa5w+CUq6p/pVrKeNNgA7pCqEcjSnHVoqJQFw==" + }, + "node_modules/@npmcli/fs": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-1.1.1.tgz", + "integrity": "sha512-8KG5RD0GVP4ydEzRn/I4BNDuxDtqVbOdm8675T49OIG/NGhaK0pjPX7ZcDlvKYbA+ulvVK3ztfcF4uBdOxuJbQ==", + "dependencies": { + "@gar/promisify": "^1.0.1", + "semver": "^7.3.5" + } + }, + "node_modules/@npmcli/move-file": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@npmcli/move-file/-/move-file-1.1.2.tgz", "integrity": "sha512-1SUf/Cg2GzGDyaf15aR9St9TWlb+XvbZXWpDx8YKs7MLzMH/BCeopv+y9vzrzgkfykCGuWOlSu3mZhj2+FQcrg==", - "requires": { + "deprecated": "This functionality has been moved to @npmcli/fs", + "dependencies": { "mkdirp": "^1.0.4", "rimraf": "^3.0.2" + }, + "engines": { + "node": ">=10" } }, - "@tootallnate/once": { + "node_modules/@tootallnate/once": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-1.1.2.tgz", - "integrity": "sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==" + "integrity": "sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==", + "engines": { + "node": ">= 6" + } }, - "abbrev": { + "node_modules/abbrev": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==" }, - "agent-base": { + "node_modules/agent-base": { "version": "6.0.2", "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", - "requires": { + "dependencies": { "debug": "4" + }, + "engines": { + "node": ">= 6.0.0" } }, - "agentkeepalive": { - "version": "4.1.4", - "resolved": "https://registry.npmjs.org/agentkeepalive/-/agentkeepalive-4.1.4.tgz", - "integrity": "sha512-+V/rGa3EuU74H6wR04plBb7Ks10FbtUQgRj/FQOG7uUIEuaINI+AiqJR1k6t3SVNs7o7ZjIdus6706qqzVq8jQ==", - "requires": { - "debug": "^4.1.0", - "depd": "^1.1.2", + "node_modules/agentkeepalive": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/agentkeepalive/-/agentkeepalive-4.5.0.tgz", + "integrity": "sha512-5GG/5IbQQpC9FpkRGsSvZI5QYeSCzlJHdpBQntCsuTOxhKD8lqKhrleg2Yi7yvMIf82Ycmmqln9U8V9qwEiJew==", + "dependencies": { "humanize-ms": "^1.2.1" + }, + "engines": { + "node": ">= 8.0.0" } }, - "aggregate-error": { + "node_modules/aggregate-error": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", - "requires": { + "dependencies": { "clean-stack": "^2.0.0", "indent-string": "^4.0.0" + }, + "engines": { + "node": ">=8" } }, - "ansi-regex": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", - "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=" + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "engines": { + "node": ">=8" + } }, - "aproba": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz", - "integrity": "sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==" + "node_modules/aproba": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/aproba/-/aproba-2.0.0.tgz", + "integrity": "sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==" }, - "are-we-there-yet": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-1.1.5.tgz", - "integrity": "sha512-5hYdAkZlcG8tOLujVDTgCT+uPX0VnpAH28gWsLfzpXYm7wP6mp5Q/gYyR7YQ0cKVJcXJnl3j2kpBan13PtQf6w==", - "requires": { + "node_modules/are-we-there-yet": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-3.0.1.tgz", + "integrity": "sha512-QZW4EDmGwlYur0Yyf/b2uGucHQMa8aFUP7eu9ddR73vvhFyt4V0Vl3QHPcTNJ8l6qYOBdxgXdnBXQrHilfRQBg==", + "deprecated": "This package is no longer supported.", + "dependencies": { "delegates": "^1.0.0", - "readable-stream": "^2.0.6" + "readable-stream": "^3.6.0" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" } }, - "balanced-match": { + "node_modules/balanced-match": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" }, - "brace-expansion": { + "node_modules/brace-expansion": { "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "requires": { + "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" } }, - "cacache": { - "version": "15.2.0", - "resolved": "https://registry.npmjs.org/cacache/-/cacache-15.2.0.tgz", - "integrity": "sha512-uKoJSHmnrqXgthDFx/IU6ED/5xd+NNGe+Bb+kLZy7Ku4P+BaiWEUflAKPZ7eAzsYGcsAGASJZsybXp+quEcHTw==", - "requires": { + "node_modules/cacache": { + "version": "15.3.0", + "resolved": "https://registry.npmjs.org/cacache/-/cacache-15.3.0.tgz", + "integrity": "sha512-VVdYzXEn+cnbXpFgWs5hTT7OScegHVmLhJIR8Ufqk3iFD6A6j5iSX1KuBTfNEv4tdJWE2PzA6IVFtcLC7fN9wQ==", + "dependencies": { + "@npmcli/fs": "^1.0.0", "@npmcli/move-file": "^1.0.1", "chownr": "^2.0.0", "fs-minipass": "^2.0.0", @@ -104,239 +148,302 @@ "ssri": "^8.0.1", "tar": "^6.0.2", "unique-filename": "^1.1.1" + }, + "engines": { + "node": ">= 10" } }, - "chownr": { + "node_modules/chownr": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", - "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==" + "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==", + "engines": { + "node": ">=10" + } }, - "clean-stack": { + "node_modules/clean-stack": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", - "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==" + "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", + "engines": { + "node": ">=6" + } }, - "code-point-at": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz", - "integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=" + "node_modules/color-support": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz", + "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==", + "bin": { + "color-support": "bin.js" + } }, - "concat-map": { + "node_modules/concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" }, - "console-control-strings": { + "node_modules/console-control-strings": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz", - "integrity": "sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4=" + "integrity": "sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ==" }, - "core-util-is": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", - "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" - }, - "debug": { - "version": "4.3.2", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz", - "integrity": "sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==", - "requires": { + "node_modules/debug": { + "version": "4.3.5", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.5.tgz", + "integrity": "sha512-pt0bNEmneDIvdL1Xsd9oDQ/wrQRkXDT4AUWlNZNPKvW5x/jyO9VFXkJUP07vQ2upmw5PlaITaPKc31jK13V+jg==", + "dependencies": { "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } } }, - "delegates": { + "node_modules/debug/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node_modules/delegates": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", - "integrity": "sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o=" + "integrity": "sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ==" }, - "depd": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", - "integrity": "sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=" + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" }, - "encoding": { + "node_modules/encoding": { "version": "0.1.13", "resolved": "https://registry.npmjs.org/encoding/-/encoding-0.1.13.tgz", "integrity": "sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A==", "optional": true, - "requires": { + "dependencies": { "iconv-lite": "^0.6.2" } }, - "env-paths": { + "node_modules/env-paths": { "version": "2.2.1", "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-2.2.1.tgz", - "integrity": "sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==" + "integrity": "sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==", + "engines": { + "node": ">=6" + } }, - "err-code": { + "node_modules/err-code": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/err-code/-/err-code-2.0.3.tgz", "integrity": "sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA==" }, - "fs-minipass": { + "node_modules/fs-minipass": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", - "requires": { + "dependencies": { "minipass": "^3.0.0" + }, + "engines": { + "node": ">= 8" } }, - "fs.realpath": { + "node_modules/fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" - }, - "gauge": { - "version": "2.7.4", - "resolved": "https://registry.npmjs.org/gauge/-/gauge-2.7.4.tgz", - "integrity": "sha1-LANAXHU4w51+s3sxcCLjJfsBi/c=", - "requires": { - "aproba": "^1.0.3", - "console-control-strings": "^1.0.0", - "has-unicode": "^2.0.0", - "object-assign": "^4.1.0", - "signal-exit": "^3.0.0", - "string-width": "^1.0.1", - "strip-ansi": "^3.0.1", - "wide-align": "^1.1.0" - } - }, - "glob": { - "version": "7.1.7", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.7.tgz", - "integrity": "sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==", - "requires": { + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==" + }, + "node_modules/gauge": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/gauge/-/gauge-4.0.4.tgz", + "integrity": "sha512-f9m+BEN5jkg6a0fZjleidjN51VE1X+mPFQ2DJ0uv1V39oCLCbsGe6yjbBnp7eK7z/+GAon99a3nHuqbuuthyPg==", + "deprecated": "This package is no longer supported.", + "dependencies": { + "aproba": "^1.0.3 || ^2.0.0", + "color-support": "^1.1.3", + "console-control-strings": "^1.1.0", + "has-unicode": "^2.0.1", + "signal-exit": "^3.0.7", + "string-width": "^4.2.3", + "strip-ansi": "^6.0.1", + "wide-align": "^1.1.5" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "dependencies": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", "inherits": "2", - "minimatch": "^3.0.4", + "minimatch": "^3.1.1", "once": "^1.3.0", "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" } }, - "graceful-fs": { - "version": "4.2.6", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.6.tgz", - "integrity": "sha512-nTnJ528pbqxYanhpDYsi4Rd8MAeaBA67+RZ10CM1m3bTAVFEDcd5AuA4a6W5YkGZ1iNXHzZz8T6TBKLeBuNriQ==" + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==" }, - "has-unicode": { + "node_modules/has-unicode": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", - "integrity": "sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk=" + "integrity": "sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ==" }, - "http-cache-semantics": { + "node_modules/http-cache-semantics": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz", "integrity": "sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==" }, - "http-proxy-agent": { + "node_modules/http-proxy-agent": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz", "integrity": "sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg==", - "requires": { + "dependencies": { "@tootallnate/once": "1", "agent-base": "6", "debug": "4" + }, + "engines": { + "node": ">= 6" } }, - "https-proxy-agent": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.0.tgz", - "integrity": "sha512-EkYm5BcKUGiduxzSt3Eppko+PiNWNEpa4ySk9vTC6wDsQJW9rHSa+UhGNJoRYp7bz6Ht1eaRIa6QaJqO5rCFbA==", - "requires": { + "node_modules/https-proxy-agent": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", + "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", + "dependencies": { "agent-base": "6", "debug": "4" + }, + "engines": { + "node": ">= 6" } }, - "humanize-ms": { + "node_modules/humanize-ms": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/humanize-ms/-/humanize-ms-1.2.1.tgz", - "integrity": "sha1-xG4xWaKT9riW2ikxbYtv6Lt5u+0=", - "requires": { + "integrity": "sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ==", + "dependencies": { "ms": "^2.0.0" } }, - "iconv-lite": { + "node_modules/iconv-lite": { "version": "0.6.3", "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", "optional": true, - "requires": { + "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" } }, - "imurmurhash": { + "node_modules/imurmurhash": { "version": "0.1.4", "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", - "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=" + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "engines": { + "node": ">=0.8.19" + } }, - "indent-string": { + "node_modules/indent-string": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", - "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==" + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", + "engines": { + "node": ">=8" + } }, - "infer-owner": { + "node_modules/infer-owner": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/infer-owner/-/infer-owner-1.0.4.tgz", "integrity": "sha512-IClj+Xz94+d7irH5qRyfJonOdfTzuDaifE6ZPWfx0N0+/ATZCbuTPq2prFl526urkQd90WyUKIh1DfBQ2hMz9A==" }, - "inflight": { + "node_modules/inflight": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", - "requires": { + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", + "dependencies": { "once": "^1.3.0", "wrappy": "1" } }, - "inherits": { + "node_modules/inherits": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" }, - "ip": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/ip/-/ip-1.1.5.tgz", - "integrity": "sha1-vd7XARQpCCjAoDnnLvJfWq7ENUo=" + "node_modules/ip-address": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/ip-address/-/ip-address-9.0.5.tgz", + "integrity": "sha512-zHtQzGojZXTwZTHQqra+ETKd4Sn3vgi7uBmlPoXVWZqYvuKmtI0l/VZTjqGmJY9x88GGOaZ9+G9ES8hC4T4X8g==", + "dependencies": { + "jsbn": "1.1.0", + "sprintf-js": "^1.1.3" + }, + "engines": { + "node": ">= 12" + } }, - "is-fullwidth-code-point": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", - "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", - "requires": { - "number-is-nan": "^1.0.0" + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "engines": { + "node": ">=8" } }, - "is-lambda": { + "node_modules/is-lambda": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/is-lambda/-/is-lambda-1.0.1.tgz", - "integrity": "sha1-PZh3iZ5qU+/AFgUEzeFfgubwYdU=" - }, - "isarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" + "integrity": "sha512-z7CMFGNrENq5iFB9Bqo64Xk6Y9sg+epq1myIcdHaGnbMTYOxvzsEtdYqQUylB7LxfkvgrrjP32T6Ywciio9UIQ==" }, - "isexe": { + "node_modules/isexe": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=" + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" + }, + "node_modules/jsbn": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-1.1.0.tgz", + "integrity": "sha512-4bYVV3aAMtDTTu4+xsDYa6sy9GyJ69/amsu9sYF2zqjiEoZA5xJi3BrfX3uY+/IekIu7MwdObdbDWpoZdBv3/A==" }, - "lru-cache": { + "node_modules/lru-cache": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "requires": { + "dependencies": { "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" } }, - "make-fetch-happen": { - "version": "8.0.14", - "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-8.0.14.tgz", - "integrity": "sha512-EsS89h6l4vbfJEtBZnENTOFk8mCRpY5ru36Xe5bcX1KYIli2mkSHqoFsp5O1wMDvTJJzxe/4THpCTtygjeeGWQ==", - "requires": { + "node_modules/make-fetch-happen": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-9.1.0.tgz", + "integrity": "sha512-+zopwDy7DNknmwPQplem5lAZX/eCOzSvSNNcSKm5eVwTkOBzoktEfXsa9L23J/GIRhxRsaxzkPEhrJEpE2F4Gg==", + "dependencies": { "agentkeepalive": "^4.1.3", - "cacache": "^15.0.5", + "cacache": "^15.2.0", "http-cache-semantics": "^4.1.0", "http-proxy-agent": "^4.0.1", "https-proxy-agent": "^5.0.0", @@ -347,345 +454,470 @@ "minipass-fetch": "^1.3.2", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", + "negotiator": "^0.6.2", "promise-retry": "^2.0.1", - "socks-proxy-agent": "^5.0.0", + "socks-proxy-agent": "^6.0.0", "ssri": "^8.0.0" + }, + "engines": { + "node": ">= 10" } }, - "minimatch": { + "node_modules/minimatch": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "requires": { + "dependencies": { "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" } }, - "minipass": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.3.tgz", - "integrity": "sha512-Mgd2GdMVzY+x3IJ+oHnVM+KG3lA5c8tnabyJKmHSaG2kAGpudxuOf8ToDkhumF7UzME7DecbQE9uOZhNm7PuJg==", - "requires": { + "node_modules/minipass": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", + "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "dependencies": { "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" } }, - "minipass-collect": { + "node_modules/minipass-collect": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/minipass-collect/-/minipass-collect-1.0.2.tgz", "integrity": "sha512-6T6lH0H8OG9kITm/Jm6tdooIbogG9e0tLgpY6mphXSm/A9u8Nq1ryBG+Qspiub9LjWlBPsPS3tWQ/Botq4FdxA==", - "requires": { + "dependencies": { "minipass": "^3.0.0" + }, + "engines": { + "node": ">= 8" } }, - "minipass-fetch": { - "version": "1.3.4", - "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-1.3.4.tgz", - "integrity": "sha512-TielGogIzbUEtd1LsjZFs47RWuHHfhl6TiCx1InVxApBAmQ8bL0dL5ilkLGcRvuyW/A9nE+Lvn855Ewz8S0PnQ==", - "requires": { - "encoding": "^0.1.12", + "node_modules/minipass-fetch": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-1.4.1.tgz", + "integrity": "sha512-CGH1eblLq26Y15+Azk7ey4xh0J/XfJfrCox5LDJiKqI2Q2iwOLOKrlmIaODiSQS8d18jalF6y2K2ePUm0CmShw==", + "dependencies": { "minipass": "^3.1.0", "minipass-sized": "^1.0.3", "minizlib": "^2.0.0" + }, + "engines": { + "node": ">=8" + }, + "optionalDependencies": { + "encoding": "^0.1.12" } }, - "minipass-flush": { + "node_modules/minipass-flush": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/minipass-flush/-/minipass-flush-1.0.5.tgz", "integrity": "sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw==", - "requires": { + "dependencies": { "minipass": "^3.0.0" + }, + "engines": { + "node": ">= 8" } }, - "minipass-pipeline": { + "node_modules/minipass-pipeline": { "version": "1.2.4", "resolved": "https://registry.npmjs.org/minipass-pipeline/-/minipass-pipeline-1.2.4.tgz", "integrity": "sha512-xuIq7cIOt09RPRJ19gdi4b+RiNvDFYe5JH+ggNvBqGqpQXcru3PcRmOZuHBKWK1Txf9+cQ+HMVN4d6z46LZP7A==", - "requires": { + "dependencies": { "minipass": "^3.0.0" + }, + "engines": { + "node": ">=8" } }, - "minipass-sized": { + "node_modules/minipass-sized": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/minipass-sized/-/minipass-sized-1.0.3.tgz", "integrity": "sha512-MbkQQ2CTiBMlA2Dm/5cY+9SWFEN8pzzOXi6rlM5Xxq0Yqbda5ZQy9sU75a673FE9ZK0Zsbr6Y5iP6u9nktfg2g==", - "requires": { + "dependencies": { "minipass": "^3.0.0" + }, + "engines": { + "node": ">=8" } }, - "minizlib": { + "node_modules/minizlib": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", - "requires": { + "dependencies": { "minipass": "^3.0.0", "yallist": "^4.0.0" + }, + "engines": { + "node": ">= 8" } }, - "mkdirp": { + "node_modules/mkdirp": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", - "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==" + "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", + "bin": { + "mkdirp": "bin/cmd.js" + }, + "engines": { + "node": ">=10" + } }, - "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" + }, + "node_modules/negotiator": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", + "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", + "engines": { + "node": ">= 0.6" + } }, - "node-gyp": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-8.1.0.tgz", - "integrity": "sha512-o2elh1qt7YUp3lkMwY3/l4KF3j/A3fI/Qt4NH+CQQgPJdqGE9y7qnP84cjIWN27Q0jJkrSAhCVDg+wBVNBYdBg==", - "requires": { + "node_modules/node-gyp": { + "version": "8.4.1", + "resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-8.4.1.tgz", + "integrity": "sha512-olTJRgUtAb/hOXG0E93wZDs5YiJlgbXxTwQAFHyNlRsXQnYzUaF2aGgujZbw+hR8aF4ZG/rST57bWMWD16jr9w==", + "dependencies": { "env-paths": "^2.2.0", "glob": "^7.1.4", "graceful-fs": "^4.2.6", - "make-fetch-happen": "^8.0.14", + "make-fetch-happen": "^9.1.0", "nopt": "^5.0.0", - "npmlog": "^4.1.2", + "npmlog": "^6.0.0", "rimraf": "^3.0.2", "semver": "^7.3.5", - "tar": "^6.1.0", + "tar": "^6.1.2", "which": "^2.0.2" + }, + "bin": { + "node-gyp": "bin/node-gyp.js" + }, + "engines": { + "node": ">= 10.12.0" } }, - "nopt": { + "node_modules/nopt": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/nopt/-/nopt-5.0.0.tgz", "integrity": "sha512-Tbj67rffqceeLpcRXrT7vKAN8CwfPeIBgM7E6iBkmKLV7bEMwpGgYLGv0jACUsECaa/vuxP0IjEont6umdMgtQ==", - "requires": { + "dependencies": { "abbrev": "1" + }, + "bin": { + "nopt": "bin/nopt.js" + }, + "engines": { + "node": ">=6" } }, - "npmlog": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-4.1.2.tgz", - "integrity": "sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg==", - "requires": { - "are-we-there-yet": "~1.1.2", - "console-control-strings": "~1.1.0", - "gauge": "~2.7.3", - "set-blocking": "~2.0.0" - } - }, - "number-is-nan": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz", - "integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=" - }, - "object-assign": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=" - }, - "once": { + "node_modules/npmlog": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-6.0.2.tgz", + "integrity": "sha512-/vBvz5Jfr9dT/aFWd0FIRf+T/Q2WBsLENygUaFUqstqsycmZAP/t5BvFJTK0viFmSUxiUKTUplWy5vt+rvKIxg==", + "deprecated": "This package is no longer supported.", + "dependencies": { + "are-we-there-yet": "^3.0.0", + "console-control-strings": "^1.1.0", + "gauge": "^4.0.3", + "set-blocking": "^2.0.0" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", - "requires": { + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dependencies": { "wrappy": "1" } }, - "p-map": { + "node_modules/p-map": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz", "integrity": "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==", - "requires": { + "dependencies": { "aggregate-error": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "path-is-absolute": { + "node_modules/path-is-absolute": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=" - }, - "process-nextick-args": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", - "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==" + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "engines": { + "node": ">=0.10.0" + } }, - "promise-inflight": { + "node_modules/promise-inflight": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/promise-inflight/-/promise-inflight-1.0.1.tgz", - "integrity": "sha1-mEcocL8igTL8vdhoEputEsPAKeM=" + "integrity": "sha512-6zWPyEOFaQBJYcGMHBKTKJ3u6TBsnMFOIZSa6ce1e/ZrrsOlnHRHbabMjLiBYKp+n44X9eUI6VUPaukCXHuG4g==" }, - "promise-retry": { + "node_modules/promise-retry": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/promise-retry/-/promise-retry-2.0.1.tgz", "integrity": "sha512-y+WKFlBR8BGXnsNlIHFGPZmyDf3DFMoLhaflAnyZgV6rG6xu+JwesTo2Q9R6XwYmtmwAFCkAk3e35jEdoeh/3g==", - "requires": { + "dependencies": { "err-code": "^2.0.2", "retry": "^0.12.0" + }, + "engines": { + "node": ">=10" } }, - "readable-stream": { - "version": "2.3.7", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", - "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" + "node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" } }, - "retry": { + "node_modules/retry": { "version": "0.12.0", "resolved": "https://registry.npmjs.org/retry/-/retry-0.12.0.tgz", - "integrity": "sha1-G0KmJmoh8HQh0bC1S33BZ7AcATs=" + "integrity": "sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==", + "engines": { + "node": ">= 4" + } }, - "rimraf": { + "node_modules/rimraf": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", - "requires": { + "deprecated": "Rimraf versions prior to v4 are no longer supported", + "dependencies": { "glob": "^7.1.3" - } - }, - "safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" - }, - "safer-buffer": { + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/safer-buffer": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", "optional": true }, - "semver": { - "version": "7.5.3", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.3.tgz", - "integrity": "sha512-QBlUtyVk/5EeHbi7X0fw6liDZc7BBmEaSYn01fMU1OUYbf6GPsbTtd8WmnqbI20SeycoHSeiybkE/q1Q+qlThQ==", - "requires": { - "lru-cache": "^6.0.0" + "node_modules/semver": { + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" } }, - "set-blocking": { + "node_modules/set-blocking": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", - "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=" - }, - "signal-exit": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.3.tgz", - "integrity": "sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA==" - }, - "smart-buffer": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.1.0.tgz", - "integrity": "sha512-iVICrxOzCynf/SNaBQCw34eM9jROU/s5rzIhpOvzhzuYHfJR/DhZfDkXiZSgKXfgv26HT3Yni3AV/DGw0cGnnw==" - }, - "socks": { - "version": "2.6.1", - "resolved": "https://registry.npmjs.org/socks/-/socks-2.6.1.tgz", - "integrity": "sha512-kLQ9N5ucj8uIcxrDwjm0Jsqk06xdpBjGNQtpXy4Q8/QY2k+fY7nZH8CARy+hkbG+SGAovmzzuauCpBlb8FrnBA==", - "requires": { - "ip": "^1.1.5", - "smart-buffer": "^4.1.0" - } - }, - "socks-proxy-agent": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-5.0.1.tgz", - "integrity": "sha512-vZdmnjb9a2Tz6WEQVIurybSwElwPxMZaIc7PzqbJTrezcKNznv6giT7J7tZDZ1BojVaa1jvO/UiUdhDVB0ACoQ==", - "requires": { + "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==" + }, + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==" + }, + "node_modules/smart-buffer": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.2.0.tgz", + "integrity": "sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==", + "engines": { + "node": ">= 6.0.0", + "npm": ">= 3.0.0" + } + }, + "node_modules/socks": { + "version": "2.8.3", + "resolved": "https://registry.npmjs.org/socks/-/socks-2.8.3.tgz", + "integrity": "sha512-l5x7VUUWbjVFbafGLxPWkYsHIhEvmF85tbIeFZWc8ZPtoMyybuEhL7Jye/ooC4/d48FgOjSJXgsF/AJPYCW8Zw==", + "dependencies": { + "ip-address": "^9.0.5", + "smart-buffer": "^4.2.0" + }, + "engines": { + "node": ">= 10.0.0", + "npm": ">= 3.0.0" + } + }, + "node_modules/socks-proxy-agent": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-6.2.1.tgz", + "integrity": "sha512-a6KW9G+6B3nWZ1yB8G7pJwL3ggLy1uTzKAgCb7ttblwqdz9fMGJUuTy3uFzEP48FAs9FLILlmzDlE2JJhVQaXQ==", + "dependencies": { "agent-base": "^6.0.2", - "debug": "4", - "socks": "^2.3.3" + "debug": "^4.3.3", + "socks": "^2.6.2" + }, + "engines": { + "node": ">= 10" } }, - "ssri": { + "node_modules/sprintf-js": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.1.3.tgz", + "integrity": "sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA==" + }, + "node_modules/ssri": { "version": "8.0.1", "resolved": "https://registry.npmjs.org/ssri/-/ssri-8.0.1.tgz", "integrity": "sha512-97qShzy1AiyxvPNIkLWoGua7xoQzzPjQ0HAH4B0rWKo7SZ6USuPcrUiAFrws0UH8RrbWmgq3LMTObhPIHbbBeQ==", - "requires": { + "dependencies": { "minipass": "^3.1.1" - } - }, - "string-width": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", - "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", - "requires": { - "code-point-at": "^1.0.0", - "is-fullwidth-code-point": "^1.0.0", - "strip-ansi": "^3.0.0" - } - }, - "string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "requires": { - "safe-buffer": "~5.1.0" - } - }, - "strip-ansi": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", - "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", - "requires": { - "ansi-regex": "^2.0.0" - } - }, - "tar": { - "version": "6.1.11", - "resolved": "https://registry.npmjs.org/tar/-/tar-6.1.11.tgz", - "integrity": "sha512-an/KZQzQUkZCkuoAA64hM92X0Urb6VpRhAFllDzz44U2mcD5scmT3zBc4VgVpkugF580+DQn8eAFSyoQt0tznA==", - "requires": { + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "dependencies": { + "safe-buffer": "~5.2.0" + } + }, + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/tar": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz", + "integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==", + "dependencies": { "chownr": "^2.0.0", "fs-minipass": "^2.0.0", - "minipass": "^3.0.0", + "minipass": "^5.0.0", "minizlib": "^2.1.1", "mkdirp": "^1.0.3", "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" } }, - "unique-filename": { + "node_modules/tar/node_modules/minipass": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", + "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/unique-filename": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-1.1.1.tgz", "integrity": "sha512-Vmp0jIp2ln35UTXuryvjzkjGdRyf9b2lTXuSYUiPmzRcl3FDtYqAwOnTJkAngD9SWhnoJzDbTKwaOrZ+STtxNQ==", - "requires": { + "dependencies": { "unique-slug": "^2.0.0" } }, - "unique-slug": { + "node_modules/unique-slug": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/unique-slug/-/unique-slug-2.0.2.tgz", "integrity": "sha512-zoWr9ObaxALD3DOPfjPSqxt4fnZiWblxHIgeWqW8x7UqDzEtHEQLzji2cuJYQFCU6KmoJikOYAZlrTHHebjx2w==", - "requires": { + "dependencies": { "imurmurhash": "^0.1.4" } }, - "util-deprecate": { + "node_modules/util-deprecate": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=" + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" }, - "which": { + "node_modules/which": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "requires": { + "dependencies": { "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" } }, - "wide-align": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.3.tgz", - "integrity": "sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA==", - "requires": { - "string-width": "^1.0.2 || 2" + "node_modules/wide-align": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.5.tgz", + "integrity": "sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==", + "dependencies": { + "string-width": "^1.0.2 || 2 || 3 || 4" } }, - "wrappy": { + "node_modules/wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" }, - "yallist": { + "node_modules/yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" diff --git a/node-syscall/syscall.cc b/node-syscall/syscall.cc index 9f6469c07..be4fb786d 100644 --- a/node-syscall/syscall.cc +++ b/node-syscall/syscall.cc @@ -7,30 +7,25 @@ #include #include #include +#include using namespace v8; -#if NODE_MAJOR_VERSION == 0 -#define ARRAY_BUFFER_DATA_OFFSET 23 -#else -#define ARRAY_BUFFER_DATA_OFFSET 31 -#endif - // arena stores buffers we allocate for data passed to syscalls. // // This object lives for the duration of Syscall() or Syscall6() and correctly // frees all allocated buffers at the end. This is necessary to avoid memory // leaks on each call. class arena { - std::vector> allocs_; + std::vector> allocs_; public: arena() = default; virtual ~arena() = default; arena(const arena& a) = delete; - intptr_t* allocate(size_t n) { - allocs_.emplace_back(new intptr_t[n]); - return allocs_.end()->get(); + void* allocate(size_t n) { + allocs_.emplace_back(n); // Allocate a new vector of n byte size. + return allocs_[allocs_.size() - 1].data(); // Return the pointer to its data buffer; } }; @@ -46,17 +41,20 @@ Local integerOrDie(Local ctx, Local value) { throw std::runtime_error("expected integer, got something else"); } +// Transforms a JS value into a native value that can be passed to the syscall() call. intptr_t toNative(Local ctx, arena& a, Local value) { if (value.IsEmpty()) { return 0; } if (value->IsArrayBufferView()) { Local view = Local::Cast(value); - return *reinterpret_cast(*reinterpret_cast(*view->Buffer()) + ARRAY_BUFFER_DATA_OFFSET) + view->ByteOffset(); // ugly hack, because of https://codereview.chromium.org/25221002 + void* native = a.allocate(view->ByteLength()); + view->CopyContents(native, view->ByteLength()); + return reinterpret_cast(native); } if (value->IsArray()) { Local array = Local::Cast(value); - intptr_t* native = a.allocate(array->Length()); + intptr_t* native = reinterpret_cast(a.allocate(array->Length() * sizeof(intptr_t))); for (uint32_t i = 0; i < array->Length(); i++) { native[i] = toNative(ctx, a, array->Get(ctx, i).ToLocalChecked()); } @@ -142,9 +140,10 @@ void Syscall6(const FunctionCallbackInfo& info) { } } -void init(Local exports) { +extern "C" NODE_MODULE_EXPORT void +NODE_MODULE_INITIALIZER(Local exports, + Local module, + Local context) { NODE_SET_METHOD(exports, "Syscall", Syscall); NODE_SET_METHOD(exports, "Syscall6", Syscall6); } - -NODE_MODULE(syscall, init); diff --git a/package-lock.json b/package-lock.json index 21b8f5864..b8ba5e000 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,164 +1,36 @@ { "name": "gopherjs", - "lockfileVersion": 2, + "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "gopherjs", "license": "BSD-2-Clause", - "dependencies": { - "source-map-support": "^0.5.19" - }, - "devDependencies": { - "uglify-es": "^3.3.9" - }, "optionalDependencies": { "syscall": "file:./node-syscall" } }, - "node_modules/buffer-from": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", - "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==" - }, - "node_modules/chownr": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", - "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==", - "optional": true, - "engines": { - "node": ">=10" - } - }, - "node_modules/commander": { - "version": "2.13.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.13.0.tgz", - "integrity": "sha512-MVuS359B+YzaWqjCL/c+22gfryv+mCBPHAv3zyVI2GN8EY6IRP8VwtasXn8jyyhvvq84R4ImN1OKRtcbIasjYA==", - "dev": true - }, - "node_modules/fs-minipass": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", - "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", - "optional": true, - "dependencies": { - "minipass": "^3.0.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/minipass": { - "version": "3.1.5", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.5.tgz", - "integrity": "sha512-+8NzxD82XQoNKNrl1d/FSi+X8wAEWR+sbYAfIvub4Nz0d22plFG72CEVVaufV8PNf4qSslFTD8VMOxNVhHCjTw==", - "optional": true, - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/minizlib": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", - "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", - "optional": true, - "dependencies": { - "minipass": "^3.0.0", - "yallist": "^4.0.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/mkdirp": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", - "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", - "optional": true, - "bin": { - "mkdirp": "bin/cmd.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/source-map-support": { - "version": "0.5.19", - "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.19.tgz", - "integrity": "sha512-Wonm7zOCIJzBGQdB+thsPar0kYuCIzYvxZwlBa87yi/Mdjv7Tip2cyVbLj5o0cFPN4EVkuTwb3GDDyUx2DGnGw==", - "dependencies": { - "buffer-from": "^1.0.0", - "source-map": "^0.6.0" - } - }, - "node_modules/syscall": { - "resolved": "node-syscall", - "link": true - }, - "node_modules/tar": { - "version": "6.1.11", - "resolved": "https://registry.npmjs.org/tar/-/tar-6.1.11.tgz", - "integrity": "sha512-an/KZQzQUkZCkuoAA64hM92X0Urb6VpRhAFllDzz44U2mcD5scmT3zBc4VgVpkugF580+DQn8eAFSyoQt0tznA==", - "optional": true, - "dependencies": { - "chownr": "^2.0.0", - "fs-minipass": "^2.0.0", - "minipass": "^3.0.0", - "minizlib": "^2.1.1", - "mkdirp": "^1.0.3", - "yallist": "^4.0.0" - }, - "engines": { - "node": ">= 10" - } - }, - "node_modules/uglify-es": { - "version": "3.3.9", - "resolved": "https://registry.npmjs.org/uglify-es/-/uglify-es-3.3.9.tgz", - "integrity": "sha512-r+MU0rfv4L/0eeW3xZrd16t4NZfK8Ld4SWVglYBb7ez5uXFWHuVRs6xCTrf1yirs9a4j4Y27nn7SRfO6v67XsQ==", - "deprecated": "support for ECMAScript is superseded by `uglify-js` as of v3.13.0", - "dev": true, - "dependencies": { - "commander": "~2.13.0", - "source-map": "~0.6.1" - }, - "bin": { - "uglifyjs": "bin/uglifyjs" - }, - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "node_modules/@gar/promisify": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@gar/promisify/-/promisify-1.1.3.tgz", + "integrity": "sha512-k2Ty1JcVojjJFwrg/ThKi2ujJ7XNLYaFGNB/bWT9wGR+oSMJHMa5w+CUq6p/pVrKeNNgA7pCqEcjSnHVoqJQFw==", "optional": true }, - "node-syscall": { - "name": "syscall", - "hasInstallScript": true, - "license": "BSD-2-Clause", + "node_modules/@npmcli/fs": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-1.1.1.tgz", + "integrity": "sha512-8KG5RD0GVP4ydEzRn/I4BNDuxDtqVbOdm8675T49OIG/NGhaK0pjPX7ZcDlvKYbA+ulvVK3ztfcF4uBdOxuJbQ==", "optional": true, "dependencies": { - "node-gyp": "^8.1.0" + "@gar/promisify": "^1.0.1", + "semver": "^7.3.5" } }, - "node-syscall/node_modules/@npmcli/move-file": { + "node_modules/@npmcli/move-file": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@npmcli/move-file/-/move-file-1.1.2.tgz", "integrity": "sha512-1SUf/Cg2GzGDyaf15aR9St9TWlb+XvbZXWpDx8YKs7MLzMH/BCeopv+y9vzrzgkfykCGuWOlSu3mZhj2+FQcrg==", + "deprecated": "This functionality has been moved to @npmcli/fs", "optional": true, "dependencies": { "mkdirp": "^1.0.4", @@ -168,7 +40,7 @@ "node": ">=10" } }, - "node-syscall/node_modules/@tootallnate/once": { + "node_modules/@tootallnate/once": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-1.1.2.tgz", "integrity": "sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==", @@ -177,13 +49,13 @@ "node": ">= 6" } }, - "node-syscall/node_modules/abbrev": { + "node_modules/abbrev": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==", "optional": true }, - "node-syscall/node_modules/agent-base": { + "node_modules/agent-base": { "version": "6.0.2", "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", @@ -195,21 +67,19 @@ "node": ">= 6.0.0" } }, - "node-syscall/node_modules/agentkeepalive": { - "version": "4.1.4", - "resolved": "https://registry.npmjs.org/agentkeepalive/-/agentkeepalive-4.1.4.tgz", - "integrity": "sha512-+V/rGa3EuU74H6wR04plBb7Ks10FbtUQgRj/FQOG7uUIEuaINI+AiqJR1k6t3SVNs7o7ZjIdus6706qqzVq8jQ==", + "node_modules/agentkeepalive": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/agentkeepalive/-/agentkeepalive-4.5.0.tgz", + "integrity": "sha512-5GG/5IbQQpC9FpkRGsSvZI5QYeSCzlJHdpBQntCsuTOxhKD8lqKhrleg2Yi7yvMIf82Ycmmqln9U8V9qwEiJew==", "optional": true, "dependencies": { - "debug": "^4.1.0", - "depd": "^1.1.2", "humanize-ms": "^1.2.1" }, "engines": { "node": ">= 8.0.0" } }, - "node-syscall/node_modules/aggregate-error": { + "node_modules/aggregate-error": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", @@ -222,38 +92,42 @@ "node": ">=8" } }, - "node-syscall/node_modules/ansi-regex": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", - "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", "optional": true, "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, - "node-syscall/node_modules/aproba": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz", - "integrity": "sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==", + "node_modules/aproba": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/aproba/-/aproba-2.0.0.tgz", + "integrity": "sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==", "optional": true }, - "node-syscall/node_modules/are-we-there-yet": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-1.1.5.tgz", - "integrity": "sha512-5hYdAkZlcG8tOLujVDTgCT+uPX0VnpAH28gWsLfzpXYm7wP6mp5Q/gYyR7YQ0cKVJcXJnl3j2kpBan13PtQf6w==", + "node_modules/are-we-there-yet": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-3.0.1.tgz", + "integrity": "sha512-QZW4EDmGwlYur0Yyf/b2uGucHQMa8aFUP7eu9ddR73vvhFyt4V0Vl3QHPcTNJ8l6qYOBdxgXdnBXQrHilfRQBg==", + "deprecated": "This package is no longer supported.", "optional": true, "dependencies": { "delegates": "^1.0.0", - "readable-stream": "^2.0.6" + "readable-stream": "^3.6.0" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" } }, - "node-syscall/node_modules/balanced-match": { + "node_modules/balanced-match": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", "optional": true }, - "node-syscall/node_modules/brace-expansion": { + "node_modules/brace-expansion": { "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", @@ -263,12 +137,13 @@ "concat-map": "0.0.1" } }, - "node-syscall/node_modules/cacache": { - "version": "15.2.0", - "resolved": "https://registry.npmjs.org/cacache/-/cacache-15.2.0.tgz", - "integrity": "sha512-uKoJSHmnrqXgthDFx/IU6ED/5xd+NNGe+Bb+kLZy7Ku4P+BaiWEUflAKPZ7eAzsYGcsAGASJZsybXp+quEcHTw==", + "node_modules/cacache": { + "version": "15.3.0", + "resolved": "https://registry.npmjs.org/cacache/-/cacache-15.3.0.tgz", + "integrity": "sha512-VVdYzXEn+cnbXpFgWs5hTT7OScegHVmLhJIR8Ufqk3iFD6A6j5iSX1KuBTfNEv4tdJWE2PzA6IVFtcLC7fN9wQ==", "optional": true, "dependencies": { + "@npmcli/fs": "^1.0.0", "@npmcli/move-file": "^1.0.1", "chownr": "^2.0.0", "fs-minipass": "^2.0.0", @@ -291,7 +166,16 @@ "node": ">= 10" } }, - "node-syscall/node_modules/clean-stack": { + "node_modules/chownr": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", + "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==", + "optional": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/clean-stack": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", @@ -300,37 +184,31 @@ "node": ">=6" } }, - "node-syscall/node_modules/code-point-at": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz", - "integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=", + "node_modules/color-support": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz", + "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==", "optional": true, - "engines": { - "node": ">=0.10.0" + "bin": { + "color-support": "bin.js" } }, - "node-syscall/node_modules/concat-map": { + "node_modules/concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", "optional": true }, - "node-syscall/node_modules/console-control-strings": { + "node_modules/console-control-strings": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz", - "integrity": "sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4=", - "optional": true - }, - "node-syscall/node_modules/core-util-is": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", - "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=", + "integrity": "sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ==", "optional": true }, - "node-syscall/node_modules/debug": { - "version": "4.3.2", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz", - "integrity": "sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==", + "node_modules/debug": { + "version": "4.3.5", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.5.tgz", + "integrity": "sha512-pt0bNEmneDIvdL1Xsd9oDQ/wrQRkXDT4AUWlNZNPKvW5x/jyO9VFXkJUP07vQ2upmw5PlaITaPKc31jK13V+jg==", "optional": true, "dependencies": { "ms": "2.1.2" @@ -344,22 +222,19 @@ } } }, - "node-syscall/node_modules/delegates": { + "node_modules/delegates": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", - "integrity": "sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o=", + "integrity": "sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ==", "optional": true }, - "node-syscall/node_modules/depd": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", - "integrity": "sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=", - "optional": true, - "engines": { - "node": ">= 0.6" - } + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "optional": true }, - "node-syscall/node_modules/encoding": { + "node_modules/encoding": { "version": "0.1.13", "resolved": "https://registry.npmjs.org/encoding/-/encoding-0.1.13.tgz", "integrity": "sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A==", @@ -368,7 +243,7 @@ "iconv-lite": "^0.6.2" } }, - "node-syscall/node_modules/env-paths": { + "node_modules/env-paths": { "version": "2.2.1", "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-2.2.1.tgz", "integrity": "sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==", @@ -377,44 +252,61 @@ "node": ">=6" } }, - "node-syscall/node_modules/err-code": { + "node_modules/err-code": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/err-code/-/err-code-2.0.3.tgz", "integrity": "sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA==", "optional": true }, - "node-syscall/node_modules/fs.realpath": { + "node_modules/fs-minipass": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", + "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", + "optional": true, + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", "optional": true }, - "node-syscall/node_modules/gauge": { - "version": "2.7.4", - "resolved": "https://registry.npmjs.org/gauge/-/gauge-2.7.4.tgz", - "integrity": "sha1-LANAXHU4w51+s3sxcCLjJfsBi/c=", + "node_modules/gauge": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/gauge/-/gauge-4.0.4.tgz", + "integrity": "sha512-f9m+BEN5jkg6a0fZjleidjN51VE1X+mPFQ2DJ0uv1V39oCLCbsGe6yjbBnp7eK7z/+GAon99a3nHuqbuuthyPg==", + "deprecated": "This package is no longer supported.", "optional": true, "dependencies": { - "aproba": "^1.0.3", - "console-control-strings": "^1.0.0", - "has-unicode": "^2.0.0", - "object-assign": "^4.1.0", - "signal-exit": "^3.0.0", - "string-width": "^1.0.1", - "strip-ansi": "^3.0.1", - "wide-align": "^1.1.0" + "aproba": "^1.0.3 || ^2.0.0", + "color-support": "^1.1.3", + "console-control-strings": "^1.1.0", + "has-unicode": "^2.0.1", + "signal-exit": "^3.0.7", + "string-width": "^4.2.3", + "strip-ansi": "^6.0.1", + "wide-align": "^1.1.5" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" } }, - "node-syscall/node_modules/glob": { - "version": "7.1.7", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.7.tgz", - "integrity": "sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==", + "node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Glob versions prior to v9 are no longer supported", "optional": true, "dependencies": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", "inherits": "2", - "minimatch": "^3.0.4", + "minimatch": "^3.1.1", "once": "^1.3.0", "path-is-absolute": "^1.0.0" }, @@ -425,25 +317,25 @@ "url": "https://github.com/sponsors/isaacs" } }, - "node-syscall/node_modules/graceful-fs": { - "version": "4.2.6", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.6.tgz", - "integrity": "sha512-nTnJ528pbqxYanhpDYsi4Rd8MAeaBA67+RZ10CM1m3bTAVFEDcd5AuA4a6W5YkGZ1iNXHzZz8T6TBKLeBuNriQ==", + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", "optional": true }, - "node-syscall/node_modules/has-unicode": { + "node_modules/has-unicode": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", - "integrity": "sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk=", + "integrity": "sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ==", "optional": true }, - "node-syscall/node_modules/http-cache-semantics": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.0.tgz", - "integrity": "sha512-carPklcUh7ROWRK7Cv27RPtdhYhUsela/ue5/jKzjegVvXDqM2ILE9Q2BGn9JZJh1g87cp56su/FgQSzcWS8cQ==", + "node_modules/http-cache-semantics": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz", + "integrity": "sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==", "optional": true }, - "node-syscall/node_modules/http-proxy-agent": { + "node_modules/http-proxy-agent": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz", "integrity": "sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg==", @@ -457,10 +349,10 @@ "node": ">= 6" } }, - "node-syscall/node_modules/https-proxy-agent": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.0.tgz", - "integrity": "sha512-EkYm5BcKUGiduxzSt3Eppko+PiNWNEpa4ySk9vTC6wDsQJW9rHSa+UhGNJoRYp7bz6Ht1eaRIa6QaJqO5rCFbA==", + "node_modules/https-proxy-agent": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", + "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", "optional": true, "dependencies": { "agent-base": "6", @@ -470,16 +362,16 @@ "node": ">= 6" } }, - "node-syscall/node_modules/humanize-ms": { + "node_modules/humanize-ms": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/humanize-ms/-/humanize-ms-1.2.1.tgz", - "integrity": "sha1-xG4xWaKT9riW2ikxbYtv6Lt5u+0=", + "integrity": "sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ==", "optional": true, "dependencies": { "ms": "^2.0.0" } }, - "node-syscall/node_modules/iconv-lite": { + "node_modules/iconv-lite": { "version": "0.6.3", "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", @@ -491,16 +383,16 @@ "node": ">=0.10.0" } }, - "node-syscall/node_modules/imurmurhash": { + "node_modules/imurmurhash": { "version": "0.1.4", "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", - "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", "optional": true, "engines": { "node": ">=0.8.19" } }, - "node-syscall/node_modules/indent-string": { + "node_modules/indent-string": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", @@ -509,65 +401,70 @@ "node": ">=8" } }, - "node-syscall/node_modules/infer-owner": { + "node_modules/infer-owner": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/infer-owner/-/infer-owner-1.0.4.tgz", "integrity": "sha512-IClj+Xz94+d7irH5qRyfJonOdfTzuDaifE6ZPWfx0N0+/ATZCbuTPq2prFl526urkQd90WyUKIh1DfBQ2hMz9A==", "optional": true }, - "node-syscall/node_modules/inflight": { + "node_modules/inflight": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", "optional": true, "dependencies": { "once": "^1.3.0", "wrappy": "1" } }, - "node-syscall/node_modules/inherits": { + "node_modules/inherits": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", "optional": true }, - "node-syscall/node_modules/ip": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/ip/-/ip-1.1.5.tgz", - "integrity": "sha1-vd7XARQpCCjAoDnnLvJfWq7ENUo=", - "optional": true - }, - "node-syscall/node_modules/is-fullwidth-code-point": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", - "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", + "node_modules/ip-address": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/ip-address/-/ip-address-9.0.5.tgz", + "integrity": "sha512-zHtQzGojZXTwZTHQqra+ETKd4Sn3vgi7uBmlPoXVWZqYvuKmtI0l/VZTjqGmJY9x88GGOaZ9+G9ES8hC4T4X8g==", "optional": true, "dependencies": { - "number-is-nan": "^1.0.0" + "jsbn": "1.1.0", + "sprintf-js": "^1.1.3" }, "engines": { - "node": ">=0.10.0" + "node": ">= 12" } }, - "node-syscall/node_modules/is-lambda": { + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "optional": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-lambda": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/is-lambda/-/is-lambda-1.0.1.tgz", - "integrity": "sha1-PZh3iZ5qU+/AFgUEzeFfgubwYdU=", - "optional": true - }, - "node-syscall/node_modules/isarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=", + "integrity": "sha512-z7CMFGNrENq5iFB9Bqo64Xk6Y9sg+epq1myIcdHaGnbMTYOxvzsEtdYqQUylB7LxfkvgrrjP32T6Ywciio9UIQ==", "optional": true }, - "node-syscall/node_modules/isexe": { + "node_modules/isexe": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "optional": true + }, + "node_modules/jsbn": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-1.1.0.tgz", + "integrity": "sha512-4bYVV3aAMtDTTu4+xsDYa6sy9GyJ69/amsu9sYF2zqjiEoZA5xJi3BrfX3uY+/IekIu7MwdObdbDWpoZdBv3/A==", "optional": true }, - "node-syscall/node_modules/lru-cache": { + "node_modules/lru-cache": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", @@ -579,14 +476,14 @@ "node": ">=10" } }, - "node-syscall/node_modules/make-fetch-happen": { - "version": "8.0.14", - "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-8.0.14.tgz", - "integrity": "sha512-EsS89h6l4vbfJEtBZnENTOFk8mCRpY5ru36Xe5bcX1KYIli2mkSHqoFsp5O1wMDvTJJzxe/4THpCTtygjeeGWQ==", + "node_modules/make-fetch-happen": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-9.1.0.tgz", + "integrity": "sha512-+zopwDy7DNknmwPQplem5lAZX/eCOzSvSNNcSKm5eVwTkOBzoktEfXsa9L23J/GIRhxRsaxzkPEhrJEpE2F4Gg==", "optional": true, "dependencies": { "agentkeepalive": "^4.1.3", - "cacache": "^15.0.5", + "cacache": "^15.2.0", "http-cache-semantics": "^4.1.0", "http-proxy-agent": "^4.0.1", "https-proxy-agent": "^5.0.0", @@ -597,18 +494,19 @@ "minipass-fetch": "^1.3.2", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", + "negotiator": "^0.6.2", "promise-retry": "^2.0.1", - "socks-proxy-agent": "^5.0.0", + "socks-proxy-agent": "^6.0.0", "ssri": "^8.0.0" }, "engines": { "node": ">= 10" } }, - "node-syscall/node_modules/minimatch": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", - "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", "optional": true, "dependencies": { "brace-expansion": "^1.1.7" @@ -617,7 +515,19 @@ "node": "*" } }, - "node-syscall/node_modules/minipass-collect": { + "node_modules/minipass": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", + "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "optional": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/minipass-collect": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/minipass-collect/-/minipass-collect-1.0.2.tgz", "integrity": "sha512-6T6lH0H8OG9kITm/Jm6tdooIbogG9e0tLgpY6mphXSm/A9u8Nq1ryBG+Qspiub9LjWlBPsPS3tWQ/Botq4FdxA==", @@ -629,10 +539,10 @@ "node": ">= 8" } }, - "node-syscall/node_modules/minipass-fetch": { - "version": "1.3.4", - "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-1.3.4.tgz", - "integrity": "sha512-TielGogIzbUEtd1LsjZFs47RWuHHfhl6TiCx1InVxApBAmQ8bL0dL5ilkLGcRvuyW/A9nE+Lvn855Ewz8S0PnQ==", + "node_modules/minipass-fetch": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-1.4.1.tgz", + "integrity": "sha512-CGH1eblLq26Y15+Azk7ey4xh0J/XfJfrCox5LDJiKqI2Q2iwOLOKrlmIaODiSQS8d18jalF6y2K2ePUm0CmShw==", "optional": true, "dependencies": { "minipass": "^3.1.0", @@ -646,7 +556,7 @@ "encoding": "^0.1.12" } }, - "node-syscall/node_modules/minipass-flush": { + "node_modules/minipass-flush": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/minipass-flush/-/minipass-flush-1.0.5.tgz", "integrity": "sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw==", @@ -658,7 +568,7 @@ "node": ">= 8" } }, - "node-syscall/node_modules/minipass-pipeline": { + "node_modules/minipass-pipeline": { "version": "1.2.4", "resolved": "https://registry.npmjs.org/minipass-pipeline/-/minipass-pipeline-1.2.4.tgz", "integrity": "sha512-xuIq7cIOt09RPRJ19gdi4b+RiNvDFYe5JH+ggNvBqGqpQXcru3PcRmOZuHBKWK1Txf9+cQ+HMVN4d6z46LZP7A==", @@ -670,7 +580,7 @@ "node": ">=8" } }, - "node-syscall/node_modules/minipass-sized": { + "node_modules/minipass-sized": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/minipass-sized/-/minipass-sized-1.0.3.tgz", "integrity": "sha512-MbkQQ2CTiBMlA2Dm/5cY+9SWFEN8pzzOXi6rlM5Xxq0Yqbda5ZQy9sU75a673FE9ZK0Zsbr6Y5iP6u9nktfg2g==", @@ -682,27 +592,61 @@ "node": ">=8" } }, - "node-syscall/node_modules/ms": { + "node_modules/minizlib": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", + "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", + "optional": true, + "dependencies": { + "minipass": "^3.0.0", + "yallist": "^4.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/mkdirp": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", + "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", + "optional": true, + "bin": { + "mkdirp": "bin/cmd.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", "optional": true }, - "node-syscall/node_modules/node-gyp": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-8.1.0.tgz", - "integrity": "sha512-o2elh1qt7YUp3lkMwY3/l4KF3j/A3fI/Qt4NH+CQQgPJdqGE9y7qnP84cjIWN27Q0jJkrSAhCVDg+wBVNBYdBg==", + "node_modules/negotiator": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", + "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", + "optional": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/node-gyp": { + "version": "8.4.1", + "resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-8.4.1.tgz", + "integrity": "sha512-olTJRgUtAb/hOXG0E93wZDs5YiJlgbXxTwQAFHyNlRsXQnYzUaF2aGgujZbw+hR8aF4ZG/rST57bWMWD16jr9w==", "optional": true, "dependencies": { "env-paths": "^2.2.0", "glob": "^7.1.4", "graceful-fs": "^4.2.6", - "make-fetch-happen": "^8.0.14", + "make-fetch-happen": "^9.1.0", "nopt": "^5.0.0", - "npmlog": "^4.1.2", + "npmlog": "^6.0.0", "rimraf": "^3.0.2", "semver": "^7.3.5", - "tar": "^6.1.0", + "tar": "^6.1.2", "which": "^2.0.2" }, "bin": { @@ -712,7 +656,7 @@ "node": ">= 10.12.0" } }, - "node-syscall/node_modules/nopt": { + "node_modules/nopt": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/nopt/-/nopt-5.0.0.tgz", "integrity": "sha512-Tbj67rffqceeLpcRXrT7vKAN8CwfPeIBgM7E6iBkmKLV7bEMwpGgYLGv0jACUsECaa/vuxP0IjEont6umdMgtQ==", @@ -727,46 +671,32 @@ "node": ">=6" } }, - "node-syscall/node_modules/npmlog": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-4.1.2.tgz", - "integrity": "sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg==", + "node_modules/npmlog": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-6.0.2.tgz", + "integrity": "sha512-/vBvz5Jfr9dT/aFWd0FIRf+T/Q2WBsLENygUaFUqstqsycmZAP/t5BvFJTK0viFmSUxiUKTUplWy5vt+rvKIxg==", + "deprecated": "This package is no longer supported.", "optional": true, "dependencies": { - "are-we-there-yet": "~1.1.2", - "console-control-strings": "~1.1.0", - "gauge": "~2.7.3", - "set-blocking": "~2.0.0" - } - }, - "node-syscall/node_modules/number-is-nan": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz", - "integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=", - "optional": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node-syscall/node_modules/object-assign": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=", - "optional": true, + "are-we-there-yet": "^3.0.0", + "console-control-strings": "^1.1.0", + "gauge": "^4.0.3", + "set-blocking": "^2.0.0" + }, "engines": { - "node": ">=0.10.0" + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" } }, - "node-syscall/node_modules/once": { + "node_modules/once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", "optional": true, "dependencies": { "wrappy": "1" } }, - "node-syscall/node_modules/p-map": { + "node_modules/p-map": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz", "integrity": "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==", @@ -781,28 +711,22 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node-syscall/node_modules/path-is-absolute": { + "node_modules/path-is-absolute": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", "optional": true, "engines": { "node": ">=0.10.0" } }, - "node-syscall/node_modules/process-nextick-args": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", - "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", - "optional": true - }, - "node-syscall/node_modules/promise-inflight": { + "node_modules/promise-inflight": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/promise-inflight/-/promise-inflight-1.0.1.tgz", - "integrity": "sha1-mEcocL8igTL8vdhoEputEsPAKeM=", + "integrity": "sha512-6zWPyEOFaQBJYcGMHBKTKJ3u6TBsnMFOIZSa6ce1e/ZrrsOlnHRHbabMjLiBYKp+n44X9eUI6VUPaukCXHuG4g==", "optional": true }, - "node-syscall/node_modules/promise-retry": { + "node_modules/promise-retry": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/promise-retry/-/promise-retry-2.0.1.tgz", "integrity": "sha512-y+WKFlBR8BGXnsNlIHFGPZmyDf3DFMoLhaflAnyZgV6rG6xu+JwesTo2Q9R6XwYmtmwAFCkAk3e35jEdoeh/3g==", @@ -815,34 +739,34 @@ "node": ">=10" } }, - "node-syscall/node_modules/readable-stream": { - "version": "2.3.7", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", - "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", "optional": true, "dependencies": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" } }, - "node-syscall/node_modules/retry": { + "node_modules/retry": { "version": "0.12.0", "resolved": "https://registry.npmjs.org/retry/-/retry-0.12.0.tgz", - "integrity": "sha1-G0KmJmoh8HQh0bC1S33BZ7AcATs=", + "integrity": "sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==", "optional": true, "engines": { "node": ">= 4" } }, - "node-syscall/node_modules/rimraf": { + "node_modules/rimraf": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "deprecated": "Rimraf versions prior to v4 are no longer supported", "optional": true, "dependencies": { "glob": "^7.1.3" @@ -854,26 +778,37 @@ "url": "https://github.com/sponsors/isaacs" } }, - "node-syscall/node_modules/safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], "optional": true }, - "node-syscall/node_modules/safer-buffer": { + "node_modules/safer-buffer": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", "optional": true }, - "node-syscall/node_modules/semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", + "node_modules/semver": { + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", "optional": true, - "dependencies": { - "lru-cache": "^6.0.0" - }, "bin": { "semver": "bin/semver.js" }, @@ -881,57 +816,63 @@ "node": ">=10" } }, - "node-syscall/node_modules/set-blocking": { + "node_modules/set-blocking": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", - "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=", + "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==", "optional": true }, - "node-syscall/node_modules/signal-exit": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.3.tgz", - "integrity": "sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA==", + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", "optional": true }, - "node-syscall/node_modules/smart-buffer": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.1.0.tgz", - "integrity": "sha512-iVICrxOzCynf/SNaBQCw34eM9jROU/s5rzIhpOvzhzuYHfJR/DhZfDkXiZSgKXfgv26HT3Yni3AV/DGw0cGnnw==", + "node_modules/smart-buffer": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.2.0.tgz", + "integrity": "sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==", "optional": true, "engines": { "node": ">= 6.0.0", "npm": ">= 3.0.0" } }, - "node-syscall/node_modules/socks": { - "version": "2.6.1", - "resolved": "https://registry.npmjs.org/socks/-/socks-2.6.1.tgz", - "integrity": "sha512-kLQ9N5ucj8uIcxrDwjm0Jsqk06xdpBjGNQtpXy4Q8/QY2k+fY7nZH8CARy+hkbG+SGAovmzzuauCpBlb8FrnBA==", + "node_modules/socks": { + "version": "2.8.3", + "resolved": "https://registry.npmjs.org/socks/-/socks-2.8.3.tgz", + "integrity": "sha512-l5x7VUUWbjVFbafGLxPWkYsHIhEvmF85tbIeFZWc8ZPtoMyybuEhL7Jye/ooC4/d48FgOjSJXgsF/AJPYCW8Zw==", "optional": true, "dependencies": { - "ip": "^1.1.5", - "smart-buffer": "^4.1.0" + "ip-address": "^9.0.5", + "smart-buffer": "^4.2.0" }, "engines": { - "node": ">= 10.13.0", + "node": ">= 10.0.0", "npm": ">= 3.0.0" } }, - "node-syscall/node_modules/socks-proxy-agent": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-5.0.1.tgz", - "integrity": "sha512-vZdmnjb9a2Tz6WEQVIurybSwElwPxMZaIc7PzqbJTrezcKNznv6giT7J7tZDZ1BojVaa1jvO/UiUdhDVB0ACoQ==", + "node_modules/socks-proxy-agent": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-6.2.1.tgz", + "integrity": "sha512-a6KW9G+6B3nWZ1yB8G7pJwL3ggLy1uTzKAgCb7ttblwqdz9fMGJUuTy3uFzEP48FAs9FLILlmzDlE2JJhVQaXQ==", "optional": true, "dependencies": { "agent-base": "^6.0.2", - "debug": "4", - "socks": "^2.3.3" + "debug": "^4.3.3", + "socks": "^2.6.2" }, "engines": { - "node": ">= 6" + "node": ">= 10" } }, - "node-syscall/node_modules/ssri": { + "node_modules/sprintf-js": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.1.3.tgz", + "integrity": "sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA==", + "optional": true + }, + "node_modules/ssri": { "version": "8.0.1", "resolved": "https://registry.npmjs.org/ssri/-/ssri-8.0.1.tgz", "integrity": "sha512-97qShzy1AiyxvPNIkLWoGua7xoQzzPjQ0HAH4B0rWKo7SZ6USuPcrUiAFrws0UH8RrbWmgq3LMTObhPIHbbBeQ==", @@ -943,42 +884,72 @@ "node": ">= 8" } }, - "node-syscall/node_modules/string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "node_modules/string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", "optional": true, "dependencies": { - "safe-buffer": "~5.1.0" + "safe-buffer": "~5.2.0" } }, - "node-syscall/node_modules/string-width": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", - "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", "optional": true, "dependencies": { - "code-point-at": "^1.0.0", - "is-fullwidth-code-point": "^1.0.0", - "strip-ansi": "^3.0.0" + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" }, "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, - "node-syscall/node_modules/strip-ansi": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", - "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", "optional": true, "dependencies": { - "ansi-regex": "^2.0.0" + "ansi-regex": "^5.0.1" }, "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, - "node-syscall/node_modules/unique-filename": { + "node_modules/syscall": { + "resolved": "node-syscall", + "link": true + }, + "node_modules/tar": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz", + "integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==", + "optional": true, + "dependencies": { + "chownr": "^2.0.0", + "fs-minipass": "^2.0.0", + "minipass": "^5.0.0", + "minizlib": "^2.1.1", + "mkdirp": "^1.0.3", + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/tar/node_modules/minipass": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", + "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", + "optional": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/unique-filename": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-1.1.1.tgz", "integrity": "sha512-Vmp0jIp2ln35UTXuryvjzkjGdRyf9b2lTXuSYUiPmzRcl3FDtYqAwOnTJkAngD9SWhnoJzDbTKwaOrZ+STtxNQ==", @@ -987,7 +958,7 @@ "unique-slug": "^2.0.0" } }, - "node-syscall/node_modules/unique-slug": { + "node_modules/unique-slug": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/unique-slug/-/unique-slug-2.0.2.tgz", "integrity": "sha512-zoWr9ObaxALD3DOPfjPSqxt4fnZiWblxHIgeWqW8x7UqDzEtHEQLzji2cuJYQFCU6KmoJikOYAZlrTHHebjx2w==", @@ -996,13 +967,13 @@ "imurmurhash": "^0.1.4" } }, - "node-syscall/node_modules/util-deprecate": { + "node_modules/util-deprecate": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", "optional": true }, - "node-syscall/node_modules/which": { + "node_modules/which": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", @@ -1017,841 +988,35 @@ "node": ">= 8" } }, - "node-syscall/node_modules/wide-align": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.3.tgz", - "integrity": "sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA==", + "node_modules/wide-align": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.5.tgz", + "integrity": "sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==", "optional": true, "dependencies": { - "string-width": "^1.0.2 || 2" + "string-width": "^1.0.2 || 2 || 3 || 4" } }, - "node-syscall/node_modules/wrappy": { + "node_modules/wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", "optional": true - } - }, - "dependencies": { - "buffer-from": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", - "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==" - }, - "chownr": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", - "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==", - "optional": true - }, - "commander": { - "version": "2.13.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.13.0.tgz", - "integrity": "sha512-MVuS359B+YzaWqjCL/c+22gfryv+mCBPHAv3zyVI2GN8EY6IRP8VwtasXn8jyyhvvq84R4ImN1OKRtcbIasjYA==", - "dev": true - }, - "fs-minipass": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", - "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", - "optional": true, - "requires": { - "minipass": "^3.0.0" - } - }, - "minipass": { - "version": "3.1.5", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.5.tgz", - "integrity": "sha512-+8NzxD82XQoNKNrl1d/FSi+X8wAEWR+sbYAfIvub4Nz0d22plFG72CEVVaufV8PNf4qSslFTD8VMOxNVhHCjTw==", - "optional": true, - "requires": { - "yallist": "^4.0.0" - } }, - "minizlib": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", - "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", - "optional": true, - "requires": { - "minipass": "^3.0.0", - "yallist": "^4.0.0" - } - }, - "mkdirp": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", - "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", + "node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", "optional": true }, - "source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" - }, - "source-map-support": { - "version": "0.5.19", - "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.19.tgz", - "integrity": "sha512-Wonm7zOCIJzBGQdB+thsPar0kYuCIzYvxZwlBa87yi/Mdjv7Tip2cyVbLj5o0cFPN4EVkuTwb3GDDyUx2DGnGw==", - "requires": { - "buffer-from": "^1.0.0", - "source-map": "^0.6.0" - } - }, - "syscall": { - "version": "file:node-syscall", + "node-syscall": { + "name": "syscall", + "hasInstallScript": true, + "license": "BSD-2-Clause", "optional": true, - "requires": { - "node-gyp": "^8.1.0" - }, "dependencies": { - "@npmcli/move-file": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/@npmcli/move-file/-/move-file-1.1.2.tgz", - "integrity": "sha512-1SUf/Cg2GzGDyaf15aR9St9TWlb+XvbZXWpDx8YKs7MLzMH/BCeopv+y9vzrzgkfykCGuWOlSu3mZhj2+FQcrg==", - "optional": true, - "requires": { - "mkdirp": "^1.0.4", - "rimraf": "^3.0.2" - } - }, - "@tootallnate/once": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-1.1.2.tgz", - "integrity": "sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==", - "optional": true - }, - "abbrev": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", - "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==", - "optional": true - }, - "agent-base": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", - "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", - "optional": true, - "requires": { - "debug": "4" - } - }, - "agentkeepalive": { - "version": "4.1.4", - "resolved": "https://registry.npmjs.org/agentkeepalive/-/agentkeepalive-4.1.4.tgz", - "integrity": "sha512-+V/rGa3EuU74H6wR04plBb7Ks10FbtUQgRj/FQOG7uUIEuaINI+AiqJR1k6t3SVNs7o7ZjIdus6706qqzVq8jQ==", - "optional": true, - "requires": { - "debug": "^4.1.0", - "depd": "^1.1.2", - "humanize-ms": "^1.2.1" - } - }, - "aggregate-error": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", - "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", - "optional": true, - "requires": { - "clean-stack": "^2.0.0", - "indent-string": "^4.0.0" - } - }, - "ansi-regex": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", - "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", - "optional": true - }, - "aproba": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz", - "integrity": "sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==", - "optional": true - }, - "are-we-there-yet": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-1.1.5.tgz", - "integrity": "sha512-5hYdAkZlcG8tOLujVDTgCT+uPX0VnpAH28gWsLfzpXYm7wP6mp5Q/gYyR7YQ0cKVJcXJnl3j2kpBan13PtQf6w==", - "optional": true, - "requires": { - "delegates": "^1.0.0", - "readable-stream": "^2.0.6" - } - }, - "balanced-match": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", - "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", - "optional": true - }, - "brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "optional": true, - "requires": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "cacache": { - "version": "15.2.0", - "resolved": "https://registry.npmjs.org/cacache/-/cacache-15.2.0.tgz", - "integrity": "sha512-uKoJSHmnrqXgthDFx/IU6ED/5xd+NNGe+Bb+kLZy7Ku4P+BaiWEUflAKPZ7eAzsYGcsAGASJZsybXp+quEcHTw==", - "optional": true, - "requires": { - "@npmcli/move-file": "^1.0.1", - "chownr": "^2.0.0", - "fs-minipass": "^2.0.0", - "glob": "^7.1.4", - "infer-owner": "^1.0.4", - "lru-cache": "^6.0.0", - "minipass": "^3.1.1", - "minipass-collect": "^1.0.2", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.2", - "mkdirp": "^1.0.3", - "p-map": "^4.0.0", - "promise-inflight": "^1.0.1", - "rimraf": "^3.0.2", - "ssri": "^8.0.1", - "tar": "^6.0.2", - "unique-filename": "^1.1.1" - } - }, - "clean-stack": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", - "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", - "optional": true - }, - "code-point-at": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz", - "integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=", - "optional": true - }, - "concat-map": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", - "optional": true - }, - "console-control-strings": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz", - "integrity": "sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4=", - "optional": true - }, - "core-util-is": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", - "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=", - "optional": true - }, - "debug": { - "version": "4.3.2", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz", - "integrity": "sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==", - "optional": true, - "requires": { - "ms": "2.1.2" - } - }, - "delegates": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", - "integrity": "sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o=", - "optional": true - }, - "depd": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", - "integrity": "sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=", - "optional": true - }, - "encoding": { - "version": "0.1.13", - "resolved": "https://registry.npmjs.org/encoding/-/encoding-0.1.13.tgz", - "integrity": "sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A==", - "optional": true, - "requires": { - "iconv-lite": "^0.6.2" - } - }, - "env-paths": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-2.2.1.tgz", - "integrity": "sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==", - "optional": true - }, - "err-code": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/err-code/-/err-code-2.0.3.tgz", - "integrity": "sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA==", - "optional": true - }, - "fs.realpath": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", - "optional": true - }, - "gauge": { - "version": "2.7.4", - "resolved": "https://registry.npmjs.org/gauge/-/gauge-2.7.4.tgz", - "integrity": "sha1-LANAXHU4w51+s3sxcCLjJfsBi/c=", - "optional": true, - "requires": { - "aproba": "^1.0.3", - "console-control-strings": "^1.0.0", - "has-unicode": "^2.0.0", - "object-assign": "^4.1.0", - "signal-exit": "^3.0.0", - "string-width": "^1.0.1", - "strip-ansi": "^3.0.1", - "wide-align": "^1.1.0" - } - }, - "glob": { - "version": "7.1.7", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.7.tgz", - "integrity": "sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==", - "optional": true, - "requires": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - } - }, - "graceful-fs": { - "version": "4.2.6", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.6.tgz", - "integrity": "sha512-nTnJ528pbqxYanhpDYsi4Rd8MAeaBA67+RZ10CM1m3bTAVFEDcd5AuA4a6W5YkGZ1iNXHzZz8T6TBKLeBuNriQ==", - "optional": true - }, - "has-unicode": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", - "integrity": "sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk=", - "optional": true - }, - "http-cache-semantics": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.0.tgz", - "integrity": "sha512-carPklcUh7ROWRK7Cv27RPtdhYhUsela/ue5/jKzjegVvXDqM2ILE9Q2BGn9JZJh1g87cp56su/FgQSzcWS8cQ==", - "optional": true - }, - "http-proxy-agent": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz", - "integrity": "sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg==", - "optional": true, - "requires": { - "@tootallnate/once": "1", - "agent-base": "6", - "debug": "4" - } - }, - "https-proxy-agent": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.0.tgz", - "integrity": "sha512-EkYm5BcKUGiduxzSt3Eppko+PiNWNEpa4ySk9vTC6wDsQJW9rHSa+UhGNJoRYp7bz6Ht1eaRIa6QaJqO5rCFbA==", - "optional": true, - "requires": { - "agent-base": "6", - "debug": "4" - } - }, - "humanize-ms": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/humanize-ms/-/humanize-ms-1.2.1.tgz", - "integrity": "sha1-xG4xWaKT9riW2ikxbYtv6Lt5u+0=", - "optional": true, - "requires": { - "ms": "^2.0.0" - } - }, - "iconv-lite": { - "version": "0.6.3", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", - "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", - "optional": true, - "requires": { - "safer-buffer": ">= 2.1.2 < 3.0.0" - } - }, - "imurmurhash": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", - "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=", - "optional": true - }, - "indent-string": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", - "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", - "optional": true - }, - "infer-owner": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/infer-owner/-/infer-owner-1.0.4.tgz", - "integrity": "sha512-IClj+Xz94+d7irH5qRyfJonOdfTzuDaifE6ZPWfx0N0+/ATZCbuTPq2prFl526urkQd90WyUKIh1DfBQ2hMz9A==", - "optional": true - }, - "inflight": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", - "optional": true, - "requires": { - "once": "^1.3.0", - "wrappy": "1" - } - }, - "inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", - "optional": true - }, - "ip": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/ip/-/ip-1.1.5.tgz", - "integrity": "sha1-vd7XARQpCCjAoDnnLvJfWq7ENUo=", - "optional": true - }, - "is-fullwidth-code-point": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", - "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", - "optional": true, - "requires": { - "number-is-nan": "^1.0.0" - } - }, - "is-lambda": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-lambda/-/is-lambda-1.0.1.tgz", - "integrity": "sha1-PZh3iZ5qU+/AFgUEzeFfgubwYdU=", - "optional": true - }, - "isarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=", - "optional": true - }, - "isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", - "optional": true - }, - "lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "optional": true, - "requires": { - "yallist": "^4.0.0" - } - }, - "make-fetch-happen": { - "version": "8.0.14", - "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-8.0.14.tgz", - "integrity": "sha512-EsS89h6l4vbfJEtBZnENTOFk8mCRpY5ru36Xe5bcX1KYIli2mkSHqoFsp5O1wMDvTJJzxe/4THpCTtygjeeGWQ==", - "optional": true, - "requires": { - "agentkeepalive": "^4.1.3", - "cacache": "^15.0.5", - "http-cache-semantics": "^4.1.0", - "http-proxy-agent": "^4.0.1", - "https-proxy-agent": "^5.0.0", - "is-lambda": "^1.0.1", - "lru-cache": "^6.0.0", - "minipass": "^3.1.3", - "minipass-collect": "^1.0.2", - "minipass-fetch": "^1.3.2", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "promise-retry": "^2.0.1", - "socks-proxy-agent": "^5.0.0", - "ssri": "^8.0.0" - } - }, - "minimatch": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", - "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", - "optional": true, - "requires": { - "brace-expansion": "^1.1.7" - } - }, - "minipass-collect": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/minipass-collect/-/minipass-collect-1.0.2.tgz", - "integrity": "sha512-6T6lH0H8OG9kITm/Jm6tdooIbogG9e0tLgpY6mphXSm/A9u8Nq1ryBG+Qspiub9LjWlBPsPS3tWQ/Botq4FdxA==", - "optional": true, - "requires": { - "minipass": "^3.0.0" - } - }, - "minipass-fetch": { - "version": "1.3.4", - "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-1.3.4.tgz", - "integrity": "sha512-TielGogIzbUEtd1LsjZFs47RWuHHfhl6TiCx1InVxApBAmQ8bL0dL5ilkLGcRvuyW/A9nE+Lvn855Ewz8S0PnQ==", - "optional": true, - "requires": { - "encoding": "^0.1.12", - "minipass": "^3.1.0", - "minipass-sized": "^1.0.3", - "minizlib": "^2.0.0" - } - }, - "minipass-flush": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/minipass-flush/-/minipass-flush-1.0.5.tgz", - "integrity": "sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw==", - "optional": true, - "requires": { - "minipass": "^3.0.0" - } - }, - "minipass-pipeline": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/minipass-pipeline/-/minipass-pipeline-1.2.4.tgz", - "integrity": "sha512-xuIq7cIOt09RPRJ19gdi4b+RiNvDFYe5JH+ggNvBqGqpQXcru3PcRmOZuHBKWK1Txf9+cQ+HMVN4d6z46LZP7A==", - "optional": true, - "requires": { - "minipass": "^3.0.0" - } - }, - "minipass-sized": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/minipass-sized/-/minipass-sized-1.0.3.tgz", - "integrity": "sha512-MbkQQ2CTiBMlA2Dm/5cY+9SWFEN8pzzOXi6rlM5Xxq0Yqbda5ZQy9sU75a673FE9ZK0Zsbr6Y5iP6u9nktfg2g==", - "optional": true, - "requires": { - "minipass": "^3.0.0" - } - }, - "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", - "optional": true - }, - "node-gyp": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-8.1.0.tgz", - "integrity": "sha512-o2elh1qt7YUp3lkMwY3/l4KF3j/A3fI/Qt4NH+CQQgPJdqGE9y7qnP84cjIWN27Q0jJkrSAhCVDg+wBVNBYdBg==", - "optional": true, - "requires": { - "env-paths": "^2.2.0", - "glob": "^7.1.4", - "graceful-fs": "^4.2.6", - "make-fetch-happen": "^8.0.14", - "nopt": "^5.0.0", - "npmlog": "^4.1.2", - "rimraf": "^3.0.2", - "semver": "^7.3.5", - "tar": "^6.1.0", - "which": "^2.0.2" - } - }, - "nopt": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/nopt/-/nopt-5.0.0.tgz", - "integrity": "sha512-Tbj67rffqceeLpcRXrT7vKAN8CwfPeIBgM7E6iBkmKLV7bEMwpGgYLGv0jACUsECaa/vuxP0IjEont6umdMgtQ==", - "optional": true, - "requires": { - "abbrev": "1" - } - }, - "npmlog": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-4.1.2.tgz", - "integrity": "sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg==", - "optional": true, - "requires": { - "are-we-there-yet": "~1.1.2", - "console-control-strings": "~1.1.0", - "gauge": "~2.7.3", - "set-blocking": "~2.0.0" - } - }, - "number-is-nan": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz", - "integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=", - "optional": true - }, - "object-assign": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=", - "optional": true - }, - "once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", - "optional": true, - "requires": { - "wrappy": "1" - } - }, - "p-map": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz", - "integrity": "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==", - "optional": true, - "requires": { - "aggregate-error": "^3.0.0" - } - }, - "path-is-absolute": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", - "optional": true - }, - "process-nextick-args": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", - "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", - "optional": true - }, - "promise-inflight": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/promise-inflight/-/promise-inflight-1.0.1.tgz", - "integrity": "sha1-mEcocL8igTL8vdhoEputEsPAKeM=", - "optional": true - }, - "promise-retry": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/promise-retry/-/promise-retry-2.0.1.tgz", - "integrity": "sha512-y+WKFlBR8BGXnsNlIHFGPZmyDf3DFMoLhaflAnyZgV6rG6xu+JwesTo2Q9R6XwYmtmwAFCkAk3e35jEdoeh/3g==", - "optional": true, - "requires": { - "err-code": "^2.0.2", - "retry": "^0.12.0" - } - }, - "readable-stream": { - "version": "2.3.7", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", - "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", - "optional": true, - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "retry": { - "version": "0.12.0", - "resolved": "https://registry.npmjs.org/retry/-/retry-0.12.0.tgz", - "integrity": "sha1-G0KmJmoh8HQh0bC1S33BZ7AcATs=", - "optional": true - }, - "rimraf": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", - "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", - "optional": true, - "requires": { - "glob": "^7.1.3" - } - }, - "safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", - "optional": true - }, - "safer-buffer": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", - "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", - "optional": true - }, - "semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", - "optional": true, - "requires": { - "lru-cache": "^6.0.0" - } - }, - "set-blocking": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", - "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=", - "optional": true - }, - "signal-exit": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.3.tgz", - "integrity": "sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA==", - "optional": true - }, - "smart-buffer": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.1.0.tgz", - "integrity": "sha512-iVICrxOzCynf/SNaBQCw34eM9jROU/s5rzIhpOvzhzuYHfJR/DhZfDkXiZSgKXfgv26HT3Yni3AV/DGw0cGnnw==", - "optional": true - }, - "socks": { - "version": "2.6.1", - "resolved": "https://registry.npmjs.org/socks/-/socks-2.6.1.tgz", - "integrity": "sha512-kLQ9N5ucj8uIcxrDwjm0Jsqk06xdpBjGNQtpXy4Q8/QY2k+fY7nZH8CARy+hkbG+SGAovmzzuauCpBlb8FrnBA==", - "optional": true, - "requires": { - "ip": "^1.1.5", - "smart-buffer": "^4.1.0" - } - }, - "socks-proxy-agent": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-5.0.1.tgz", - "integrity": "sha512-vZdmnjb9a2Tz6WEQVIurybSwElwPxMZaIc7PzqbJTrezcKNznv6giT7J7tZDZ1BojVaa1jvO/UiUdhDVB0ACoQ==", - "optional": true, - "requires": { - "agent-base": "^6.0.2", - "debug": "4", - "socks": "^2.3.3" - } - }, - "ssri": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/ssri/-/ssri-8.0.1.tgz", - "integrity": "sha512-97qShzy1AiyxvPNIkLWoGua7xoQzzPjQ0HAH4B0rWKo7SZ6USuPcrUiAFrws0UH8RrbWmgq3LMTObhPIHbbBeQ==", - "optional": true, - "requires": { - "minipass": "^3.1.1" - } - }, - "string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "optional": true, - "requires": { - "safe-buffer": "~5.1.0" - } - }, - "string-width": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", - "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", - "optional": true, - "requires": { - "code-point-at": "^1.0.0", - "is-fullwidth-code-point": "^1.0.0", - "strip-ansi": "^3.0.0" - } - }, - "strip-ansi": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", - "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", - "optional": true, - "requires": { - "ansi-regex": "^2.0.0" - } - }, - "unique-filename": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-1.1.1.tgz", - "integrity": "sha512-Vmp0jIp2ln35UTXuryvjzkjGdRyf9b2lTXuSYUiPmzRcl3FDtYqAwOnTJkAngD9SWhnoJzDbTKwaOrZ+STtxNQ==", - "optional": true, - "requires": { - "unique-slug": "^2.0.0" - } - }, - "unique-slug": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/unique-slug/-/unique-slug-2.0.2.tgz", - "integrity": "sha512-zoWr9ObaxALD3DOPfjPSqxt4fnZiWblxHIgeWqW8x7UqDzEtHEQLzji2cuJYQFCU6KmoJikOYAZlrTHHebjx2w==", - "optional": true, - "requires": { - "imurmurhash": "^0.1.4" - } - }, - "util-deprecate": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=", - "optional": true - }, - "which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "optional": true, - "requires": { - "isexe": "^2.0.0" - } - }, - "wide-align": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.3.tgz", - "integrity": "sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA==", - "optional": true, - "requires": { - "string-width": "^1.0.2 || 2" - } - }, - "wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", - "optional": true - } - } - }, - "tar": { - "version": "6.1.11", - "resolved": "https://registry.npmjs.org/tar/-/tar-6.1.11.tgz", - "integrity": "sha512-an/KZQzQUkZCkuoAA64hM92X0Urb6VpRhAFllDzz44U2mcD5scmT3zBc4VgVpkugF580+DQn8eAFSyoQt0tznA==", - "optional": true, - "requires": { - "chownr": "^2.0.0", - "fs-minipass": "^2.0.0", - "minipass": "^3.0.0", - "minizlib": "^2.1.1", - "mkdirp": "^1.0.3", - "yallist": "^4.0.0" - } - }, - "uglify-es": { - "version": "3.3.9", - "resolved": "https://registry.npmjs.org/uglify-es/-/uglify-es-3.3.9.tgz", - "integrity": "sha512-r+MU0rfv4L/0eeW3xZrd16t4NZfK8Ld4SWVglYBb7ez5uXFWHuVRs6xCTrf1yirs9a4j4Y27nn7SRfO6v67XsQ==", - "dev": true, - "requires": { - "commander": "~2.13.0", - "source-map": "~0.6.1" + "node-gyp": "^8.1.0" } - }, - "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "optional": true } } } diff --git a/package.json b/package.json index f276a4eb1..ec8add087 100644 --- a/package.json +++ b/package.json @@ -1,12 +1,6 @@ { "name": "gopherjs", "license": "BSD-2-Clause", - "devDependencies": { - "uglify-es": "^3.3.9" - }, - "dependencies": { - "source-map-support": "^0.5.19" - }, "optionalDependencies": { "syscall": "file:./node-syscall" } diff --git a/tests/gorepo/gorepo_test.go b/tests/gorepo/gorepo_test.go index d80715948..5ab93e9af 100644 --- a/tests/gorepo/gorepo_test.go +++ b/tests/gorepo/gorepo_test.go @@ -21,15 +21,6 @@ func TestGoRepositoryCompilerTests(t *testing.T) { args = append(args, "-v") } - shards := os.Getenv("CIRCLE_NODE_TOTAL") - shard := os.Getenv("CIRCLE_NODE_INDEX") - if shards != "" && shard != "" { - // We are running under CircleCI parallel test job, so we need to shard execution. - args = append(args, "-shard="+shard, "-shards="+shards) - // CircleCI reports a lot more cores than we can actually use, so we have to limit concurrency. - args = append(args, "-n=2", "-l=2") - } - cmd := exec.Command(args[0], args[1:]...) cmd.Stdout = os.Stdout cmd.Stderr = os.Stdout diff --git a/tests/gorepo/run.go b/tests/gorepo/run.go index d58968ada..6720f50d7 100644 --- a/tests/gorepo/run.go +++ b/tests/gorepo/run.go @@ -22,6 +22,7 @@ import ( "errors" "flag" "fmt" + "go/build/constraint" "hash/fnv" "io" "log" @@ -109,9 +110,6 @@ var knownFails = map[string]failReason{ "fixedbugs/issue23188.go": {desc: "incorrect order of evaluation of index operations"}, "fixedbugs/issue24547.go": {desc: "incorrect computing method sets with shadowed methods"}, - // These are new tests in Go 1.11.5 - "fixedbugs/issue28688.go": {category: notApplicable, desc: "testing runtime optimisations"}, - // These are new tests in Go 1.12. "fixedbugs/issue23837.go": {desc: "missing panic on nil pointer-to-empty-struct dereference"}, "fixedbugs/issue27201.go": {desc: "incorrect stack trace for nil dereference in inlined function"}, @@ -121,7 +119,6 @@ var knownFails = map[string]failReason{ // These are new tests in Go 1.12.9. "fixedbugs/issue30977.go": {category: neverTerminates, desc: "does for { runtime.GC() }"}, "fixedbugs/issue32477.go": {category: notApplicable, desc: "uses runtime.SetFinalizer and runtime.GC"}, - "fixedbugs/issue32680.go": {category: notApplicable, desc: "uses -gcflags=-d=ssa/check/on flag"}, // These are new tests in Go 1.13-1.16. "fixedbugs/issue19113.go": {category: lowLevelRuntimeDifference, desc: "JavaScript bit shifts by negative amount don't cause an exception"}, @@ -134,7 +131,6 @@ var knownFails = map[string]failReason{ "fixedbugs/issue30116u.go": {desc: "GopherJS doesn't specify the array/slice index selector in the out-of-bounds message"}, "fixedbugs/issue34395.go": {category: neverTerminates, desc: "https://github.com/gopherjs/gopherjs/issues/1007"}, "fixedbugs/issue35027.go": {category: usesUnsupportedPackage, desc: "uses unsupported conversion to reflect.SliceHeader and -gcflags=-d=checkptr"}, - "fixedbugs/issue35073.go": {category: usesUnsupportedPackage, desc: "uses unsupported flag -gcflags=-d=checkptr"}, "fixedbugs/issue35576.go": {category: lowLevelRuntimeDifference, desc: "GopherJS print/println format for floats differs from Go's"}, "fixedbugs/issue40917.go": {category: notApplicable, desc: "uses pointer arithmetic and unsupported flag -gcflags=-d=checkptr"}, @@ -149,17 +145,22 @@ var knownFails = map[string]failReason{ "fixedbugs/issue50854.go": {category: lowLevelRuntimeDifference, desc: "negative int32 overflow behaves differently in JS"}, // These are new tests in Go 1.18 - "fixedbugs/issue46938.go": {category: notApplicable, desc: "tests -d=checkptr compiler mode, which GopherJS doesn't support"}, - "fixedbugs/issue47928.go": {category: notApplicable, desc: "//go:nointerface is a part of GOEXPERIMENT=fieldtrack and is not supported by GopherJS"}, - "fixedbugs/issue48898.go": {category: other, desc: "https://github.com/gopherjs/gopherjs/issues/1128"}, - "fixedbugs/issue48536.go": {category: usesUnsupportedPackage, desc: "https://github.com/gopherjs/gopherjs/issues/1130"}, - "fixedbugs/issue53600.go": {category: lowLevelRuntimeDifference, desc: "GopherJS println format is different from Go's"}, + "fixedbugs/issue47928.go": {category: notApplicable, desc: "//go:nointerface is a part of GOEXPERIMENT=fieldtrack and is not supported by GopherJS"}, + "fixedbugs/issue48536.go": {category: usesUnsupportedPackage, desc: "https://github.com/gopherjs/gopherjs/issues/1130"}, + "fixedbugs/issue48898.go": {category: other, desc: "https://github.com/gopherjs/gopherjs/issues/1128"}, + "fixedbugs/issue53600.go": {category: lowLevelRuntimeDifference, desc: "GopherJS println format is different from Go's"}, + "typeparam/chans.go": {category: neverTerminates, desc: "uses runtime.SetFinalizer() and runtime.GC()."}, + "typeparam/issue51733.go": {category: usesUnsupportedPackage, desc: "unsafe: uintptr to struct pointer conversion is unsupported"}, + "typeparam/typeswitch5.go": {category: lowLevelRuntimeDifference, desc: "GopherJS println format is different from Go's"}, + + // Failures related to the lack of generics support. Ideally, this section + // should be emptied once https://github.com/gopherjs/gopherjs/issues/1013 is + // fixed. + "typeparam/nested.go": {category: usesUnsupportedGenerics, desc: "incomplete support for generic types inside generic functions"}, // These are new tests in Go 1.19 - "fixedbugs/issue50672.go": {category: usesUnsupportedGenerics, desc: "Checking function nesting with one function having a type parameter."}, - "fixedbugs/issue53137.go": {category: usesUnsupportedGenerics, desc: "Checking setting type parameter of struct in parameter of a generic function."}, - "fixedbugs/issue53309.go": {category: usesUnsupportedGenerics, desc: "Checking unused type parameter in method call to interface"}, - "fixedbugs/issue53635.go": {category: usesUnsupportedGenerics, desc: "Checking switch type against nil type with unsupported type parameters"}, + "typeparam/issue51521.go": {category: lowLevelRuntimeDifference, desc: "different panic message when calling a method on nil interface"}, + "fixedbugs/issue50672.go": {category: other, desc: "https://github.com/gopherjs/gopherjs/issues/1271"}, "fixedbugs/issue53653.go": {category: lowLevelRuntimeDifference, desc: "GopherJS println format of int64 is different from Go's"}, } @@ -202,7 +203,7 @@ var ( // dirs are the directories to look for *.go files in. // TODO(bradfitz): just use all directories? - dirs = []string{".", "ken", "chan", "interface", "syntax", "dwarf", "fixedbugs"} + dirs = []string{".", "ken", "chan", "interface", "syntax", "dwarf", "fixedbugs", "typeparam"} // ratec controls the max number of tests running at a time. ratec chan bool @@ -366,6 +367,7 @@ func goFiles(dir string) []string { f, err := os.Open(dir) check(err) dirnames, err := f.Readdirnames(-1) + f.Close() check(err) names := []string{} for _, name := range dirnames { @@ -520,36 +522,19 @@ func shouldTest(src string, goos, goarch string) (ok bool, whyNot string) { } for _, line := range strings.Split(src, "\n") { - line = strings.TrimSpace(line) - if strings.HasPrefix(line, "//") { - line = line[2:] - } else { - continue - } - line = strings.TrimSpace(line) - if len(line) == 0 || line[0] != '+' { - continue + if strings.HasPrefix(line, "package ") { + break } - ctxt := &context{ - GOOS: goos, - GOARCH: goarch, - } - words := strings.Fields(line) - if words[0] == "+build" { - ok := false - for _, word := range words[1:] { - if ctxt.match(word) { - ok = true - break - } + if expr, err := constraint.Parse(line); err == nil { + ctxt := &context{ + GOOS: goos, + GOARCH: goarch, } - if !ok { - // no matching tag found. + if !expr.Eval(ctxt.match) { return false, line } } } - // no build tags return true, "" } @@ -557,16 +542,6 @@ func (ctxt *context) match(name string) bool { if name == "" { return false } - if i := strings.Index(name, ","); i >= 0 { - // comma-separated list - return ctxt.match(name[:i]) && ctxt.match(name[i+1:]) - } - if strings.HasPrefix(name, "!!") { // bad syntax, reject always - return false - } - if strings.HasPrefix(name, "!") { // negation - return len(name) > 1 && !ctxt.match(name[1:]) - } // Tags must be letters, digits, underscores or dots. // Unlike in Go identifiers, all digits are fine (e.g., "386"). @@ -576,10 +551,18 @@ func (ctxt *context) match(name string) bool { } } + // GOPHERJS: Ignore "goexperiment." for now + // GOPHERJS: Don't match "cgo" since not supported + // GOPHERJS: Don't match "gc" if name == ctxt.GOOS || name == ctxt.GOARCH { return true } + // GOPHERJS: Don't match "gcflags_noopt" + if name == "test_run" { + return true + } + return false } @@ -611,26 +594,23 @@ func (t *test) run() { } // Execution recipe stops at first blank line. - pos := strings.Index(t.src, "\n\n") - if pos == -1 { - t.err = errors.New("double newline not found") + action, _, ok := strings.Cut(t.src, "\n\n") + if !ok { + t.err = fmt.Errorf("double newline ending execution recipe not found in %s", t.goFileName()) return } - action := t.src[:pos] - if nl := strings.Index(action, "\n"); nl >= 0 && strings.Contains(action[:nl], "+build") { + if firstLine, rest, ok := strings.Cut(action, "\n"); ok && strings.Contains(firstLine, "+build") { // skip first line - action = action[nl+1:] - } - if strings.HasPrefix(action, "//") { - action = action[2:] + action = rest } + action = strings.TrimPrefix(action, "//") // Check for build constraints only up to the actual code. - pkgPos := strings.Index(t.src, "\npackage") - if pkgPos == -1 { - pkgPos = pos // some files are intentionally malformed + header, _, ok := strings.Cut(t.src, "\npackage") + if !ok { + header = action // some files are intentionally malformed } - if ok, why := shouldTest(t.src[:pkgPos], goos, goarch); !ok { + if ok, why := shouldTest(header, goos, goarch); !ok { t.action = "skip" if *showSkips { fmt.Printf("%-20s %-20s: %s\n", t.action, t.goFileName(), why) @@ -640,16 +620,20 @@ func (t *test) run() { var args, flags []string wantError := false - f := strings.Fields(action) + f, err := splitQuoted(action) + if err != nil { + t.err = fmt.Errorf("invalid test recipe: %v", err) + return + } if len(f) > 0 { action = f[0] args = f[1:] } - // GOPHERJS: For now, only run with "run", "cmpout" actions, in "fixedbugs" dir. Skip all others. + // GOPHERJS: For now, only run with "run", "cmpout" actions, in "fixedbugs" and "typeparam" dirs. Skip all others. switch action { case "run", "cmpout": - if filepath.Clean(t.dir) != "fixedbugs" { + if d := filepath.Clean(t.dir); d != "fixedbugs" && d != "typeparam" { action = "skip" } default: @@ -699,6 +683,19 @@ func (t *test) run() { os.Setenv("GOARCH", goarch) } + { + // GopherJS: we don't support any of -gcflags, but for the most part they + // are not too relevant to the outcome of the test. + supportedArgs := []string{} + for _, a := range args { + if strings.HasPrefix(a, "-gcflags") { + continue + } + supportedArgs = append(supportedArgs, a) + } + args = supportedArgs + } + useTmp := true runcmd := func(args ...string) ([]byte, error) { cmd := exec.Command(args[0], args[1:]...) @@ -1263,3 +1260,65 @@ func getenv(key, def string) string { } return def } + +// splitQuoted splits the string s around each instance of one or more consecutive +// white space characters while taking into account quotes and escaping, and +// returns an array of substrings of s or an empty list if s contains only white space. +// Single quotes and double quotes are recognized to prevent splitting within the +// quoted region, and are removed from the resulting substrings. If a quote in s +// isn't closed err will be set and r will have the unclosed argument as the +// last element. The backslash is used for escaping. +// +// For example, the following string: +// +// a b:"c d" 'e''f' "g\"" +// +// Would be parsed as: +// +// []string{"a", "b:c d", "ef", `g"`} +// +// [copied from src/go/build/build.go] +func splitQuoted(s string) (r []string, err error) { + var args []string + arg := make([]rune, len(s)) + escaped := false + quoted := false + quote := '\x00' + i := 0 + for _, rune := range s { + switch { + case escaped: + escaped = false + case rune == '\\': + escaped = true + continue + case quote != '\x00': + if rune == quote { + quote = '\x00' + continue + } + case rune == '"' || rune == '\'': + quoted = true + quote = rune + continue + case unicode.IsSpace(rune): + if quoted || i > 0 { + quoted = false + args = append(args, string(arg[:i])) + i = 0 + } + continue + } + arg[i] = rune + i++ + } + if quoted || i > 0 { + args = append(args, string(arg[:i])) + } + if quote != 0 { + err = errors.New("unclosed quote") + } else if escaped { + err = errors.New("unfinished escaping") + } + return args, err +} diff --git a/tests/js_test.go b/tests/js_test.go index 2ce43865f..6f6eaa542 100644 --- a/tests/js_test.go +++ b/tests/js_test.go @@ -829,6 +829,45 @@ func TestExternalize(t *testing.T) { } } +func TestInternalizeSlice(t *testing.T) { + tests := []struct { + name string + init []int + want string + }{ + { + name: `nil slice`, + init: []int(nil), + want: `[]int(nil)`, + }, + { + name: `empty slice`, + init: []int{}, + want: `[]int{}`, + }, + { + name: `non-empty slice`, + init: []int{42, 53, 64}, + want: `[]int{42, 53, 64}`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + b := struct { + *js.Object + V []int `js:"V"` // V is externalized + }{Object: js.Global.Get("Object").New()} + b.V = tt.init + + result := fmt.Sprintf(`%#v`, b.V) // internalize b.V + if result != tt.want { + t.Errorf(`Unexpected result %q != %q`, result, tt.want) + } + }) + } +} + func TestInternalizeExternalizeNull(t *testing.T) { type S struct { *js.Object diff --git a/tests/map_js_test.go b/tests/map_js_test.go index 64cc8e6f0..c815661ab 100644 --- a/tests/map_js_test.go +++ b/tests/map_js_test.go @@ -69,7 +69,7 @@ func Test_MapStructObjectWrapper(t *testing.T) { stringMap := map[string]string{"key": "value"} - // You cannot wrap a map directly, so put it in a stuct. + // You cannot wrap a map directly, so put it in a struct. type StructWithMap struct { Map map[string]string } diff --git a/tests/misc_test.go b/tests/misc_test.go index a38d91c81..8dc3be924 100644 --- a/tests/misc_test.go +++ b/tests/misc_test.go @@ -900,7 +900,7 @@ func TestReflectSetForEmbed(t *testing.T) { f0 := e.Field(0) e.Set(in) if e.Field(0) != f0 { - t.Fatalf("relfect.Set got %v, want %v", f0, e.Field(0)) + t.Fatalf("reflect.Set got %v, want %v", f0, e.Field(0)) } } diff --git a/tests/numeric_test.go b/tests/numeric_test.go index df06db584..484968f41 100644 --- a/tests/numeric_test.go +++ b/tests/numeric_test.go @@ -1,10 +1,14 @@ package tests import ( + "fmt" + "math/bits" "math/rand" "runtime" "testing" "testing/quick" + + "github.com/gopherjs/gopherjs/js" ) // naiveMul64 performs 64-bit multiplication without using the multiplication @@ -93,3 +97,107 @@ func BenchmarkMul64(b *testing.B) { } }) } + +func TestIssue733(t *testing.T) { + if runtime.GOOS != "js" { + t.Skip("test uses GopherJS-specific features") + } + + t.Run("sign", func(t *testing.T) { + f := float64(-1) + i := uint32(f) + underlying := js.InternalObject(i).Float() // Get the raw JS number behind i. + if want := float64(4294967295); underlying != want { + t.Errorf("Got: uint32(float64(%v)) = %v. Want: %v.", f, underlying, want) + } + }) + t.Run("truncation", func(t *testing.T) { + f := float64(300) + i := uint8(f) + underlying := js.InternalObject(i).Float() // Get the raw JS number behind i. + if want := float64(44); underlying != want { + t.Errorf("Got: uint32(float64(%v)) = %v. Want: %v.", f, underlying, want) + } + }) +} + +// Test_32BitEnvironment tests that GopherJS behaves correctly +// as a 32-bit environment for integers. To simulate a 32 bit environment +// we have to use `$imul` instead of `*` to get the correct result. +func Test_32BitEnvironment(t *testing.T) { + if bits.UintSize != 32 { + t.Skip(`test is only relevant for 32-bit environment`) + } + + tests := []struct { + x, y, exp uint64 + }{ + { + x: 65535, // x = 2^16 - 1 + y: 65535, // same as x + exp: 4294836225, // x² works since it doesn't overflow 32 bits. + }, + { + x: 134217729, // x = 2^27 + 1, x < 2^32 and x > sqrt(2^53), so x² overflows 53 bits. + y: 134217729, // same as x + exp: 268435457, // x² mod 2^32 = (2^27 + 1)² mod 2^32 = (2^54 + 2^28 + 1) mod 2^32 = 2^28 + 1 + // In pure JS, `x * x >>> 0`, would result in 268,435,456 because it lost the least significant bit + // prior to being truncated, where in a real 32 bit environment, it would be 268,435,457 since + // the rollover removed the most significant bit and doesn't affect the least significant bit. + }, + { + x: 4294967295, // x = 2^32 - 1 another case where x² overflows 53 bits causing a loss of precision. + y: 4294967295, // same as x + exp: 1, // x² mod 2^32 = (2^32 - 1)² mod 2^32 = (2^64 - 2^33 + 1) mod 2^32 = 1 + // In pure JS, `x * x >>> 0`, would result in 0 because it lost the least significant bits. + }, + { + x: 4294967295, // x = 2^32 - 1 + y: 3221225473, // y = 2^31 + 2^30 + 1 + exp: 1073741823, // 2^32 - 1. + // In pure JS, `x * y >>> 0`, would result in 1,073,741,824. + }, + { + x: 4294967295, // x = 2^32 - 1 + y: 134217729, // y = 2^27 + 1 + exp: 4160749567, // In pure JS, `x * y >>> 0`, would result in 4,160,749,568. + }, + } + + for i, test := range tests { + t.Run(fmt.Sprintf(`#%d/uint32`, i), func(t *testing.T) { + x, y, exp := uint32(test.x), uint32(test.y), uint32(test.exp) + if got := x * y; got != exp { + t.Errorf("got: %d\nwant: %d.", got, exp) + } + }) + + t.Run(fmt.Sprintf(`#%d/uintptr`, i), func(t *testing.T) { + x, y, exp := uintptr(test.x), uintptr(test.y), uintptr(test.exp) + if got := x * y; got != exp { + t.Errorf("got: %d\nwant: %d.", got, exp) + } + }) + + t.Run(fmt.Sprintf(`#%d/uint`, i), func(t *testing.T) { + x, y, exp := uint(test.x), uint(test.y), uint(test.exp) + if got := x * y; got != exp { + t.Errorf("got: %d\nwant: %d.", got, exp) + } + }) + + t.Run(fmt.Sprintf(`#%d/int32`, i), func(t *testing.T) { + x, y, exp := int32(test.x), int32(test.y), int32(test.exp) + if got := x * y; got != exp { + t.Errorf("got: %d\nwant: %d.", got, exp) + } + }) + + t.Run(fmt.Sprintf(`#%d/int`, i), func(t *testing.T) { + x, y, exp := int(test.x), int(test.y), int(test.exp) + if got := x * y; got != exp { + t.Errorf("got: %d\nwant: %d.", got, exp) + } + }) + } +} diff --git a/tool.go b/tool.go index 06483e96b..46d6a6edc 100644 --- a/tool.go +++ b/tool.go @@ -4,10 +4,8 @@ import ( "bytes" "errors" "fmt" - "go/ast" "go/build" "go/scanner" - "go/token" "go/types" "io" "net" @@ -28,8 +26,8 @@ import ( gbuild "github.com/gopherjs/gopherjs/build" "github.com/gopherjs/gopherjs/build/cache" "github.com/gopherjs/gopherjs/compiler" + "github.com/gopherjs/gopherjs/internal/errorList" "github.com/gopherjs/gopherjs/internal/sysutil" - "github.com/gopherjs/gopherjs/internal/testmain" "github.com/neelance/sourcemap" log "github.com/sirupsen/logrus" "github.com/spf13/cobra" @@ -147,7 +145,7 @@ func main() { if err != nil { return err } - archive, err := s.BuildPackage(pkg) + archive, err := s.BuildProject(pkg) if err != nil { return err } @@ -214,8 +212,7 @@ func main() { if err != nil { return err } - - archive, err := s.BuildPackage(pkg) + archive, err := s.BuildProject(pkg) if err != nil { return err } @@ -371,27 +368,8 @@ func main() { return err } - _, err = s.BuildPackage(pkg.TestPackage()) - if err != nil { - return err - } - _, err = s.BuildPackage(pkg.XTestPackage()) - if err != nil { - return err - } - - fset := token.NewFileSet() - tests := testmain.TestMain{Package: pkg} - tests.Scan(fset) - mainPkg, mainFile, err := tests.Synthesize(fset) - if err != nil { - return fmt.Errorf("failed to generate testmain package for %s: %w", pkg.ImportPath, err) - } - importContext := &compiler.ImportContext{ - Packages: s.Types, - Import: s.ImportResolverFor(mainPkg), - } - mainPkgArchive, err := compiler.Compile(mainPkg.ImportPath, []*ast.File{mainFile}, fset, importContext, options.Minify) + pkg.IsTest = true + mainPkgArchive, err := s.BuildProject(pkg) if err != nil { return fmt.Errorf("failed to compile testmain package for %s: %w", pkg.ImportPath, err) } @@ -664,7 +642,7 @@ func (fs serveCommandFileSystem) Open(requestName string) (http.File, error) { buf := new(bytes.Buffer) browserErrors := new(bytes.Buffer) err := func() error { - archive, err := s.BuildPackage(pkg) + archive, err := s.BuildProject(pkg) if err != nil { return err } @@ -673,7 +651,7 @@ func (fs serveCommandFileSystem) Open(requestName string) (http.File, error) { m := &sourcemap.Map{File: base + ".js"} sourceMapFilter.MappingCallback = s.SourceMappingCallback(m) - deps, err := compiler.ImportDependencies(archive, s.BuildImportPath) + deps, err := compiler.ImportDependencies(archive, s.ImportResolverFor("")) if err != nil { return err } @@ -789,7 +767,7 @@ func handleError(err error, options *gbuild.Options, browserErrors *bytes.Buffer switch err := err.(type) { case nil: return 0 - case compiler.ErrorList: + case errorList.ErrorList: for _, entry := range err { printError(entry, options, browserErrors) } @@ -838,13 +816,7 @@ func sprintError(err error) string { func runNode(script string, args []string, dir string, quiet bool, out io.Writer) error { var allArgs []string if b, _ := strconv.ParseBool(os.Getenv("SOURCE_MAP_SUPPORT")); os.Getenv("SOURCE_MAP_SUPPORT") == "" || b { - allArgs = []string{"--require", "source-map-support/register"} - if err := exec.Command("node", "--require", "source-map-support/register", "--eval", "").Run(); err != nil { - if !quiet { - fmt.Fprintln(os.Stderr, "gopherjs: Source maps disabled. Install source-map-support module for nice stack traces. See https://github.com/gopherjs/gopherjs#gopherjs-run-gopherjs-test.") - } - allArgs = []string{} - } + allArgs = []string{"--enable-source-maps"} } if runtime.GOOS != "windows" {