Skip to content
Snippets Groups Projects
Commit e146d3ea authored by Matthew Dempsky's avatar Matthew Dempsky
Browse files

cmd/compile: switch to final unified IR export format

Now that there's a native go/types importer for unified IR, the
compiler no longer needs to stay backwards compatible with old iexport
importers.

This CL also updates the go/types and go/internal/gcimporter tests to
expect that the unified IR importer sets the receiver parameter type
to the underlying Interface type, rather than the Named type. This is
a temporary workaround until we make a decision on #49906.

Notably, this makes `GOEXPERIMENT=unified go test` work on generics
code without requiring `-vet=off` (because previously cmd/vet was
relying on unified IR's backwards-compatible iexport data, which
omitted generic types).

Change-Id: Iac7a2346bb7a91e6690fb2978fb702fadae5559d
Reviewed-on: https://go-review.googlesource.com/c/go/+/386004


Trust: Matthew Dempsky <mdempsky@google.com>
Run-TryBot: Matthew Dempsky <mdempsky@google.com>
Reviewed-by: default avatarRobert Griesemer <gri@golang.org>
Reviewed-by: default avatarRobert Findley <rfindley@google.com>
TryBot-Result: Gopher Robot <gobot@golang.org>
parent deaec2ec
No related branches found
No related tags found
No related merge requests found
...@@ -115,10 +115,14 @@ func TestImportTestdata(t *testing.T) { ...@@ -115,10 +115,14 @@ func TestImportTestdata(t *testing.T) {
} }
testfiles := map[string][]string{ testfiles := map[string][]string{
"exports.go": {"go/ast", "go/token"}, "exports.go": {"go/ast", "go/token"},
"generics.go": nil,
} }
if !goexperiment.Unified { if goexperiment.Unified {
testfiles["generics.go"] = nil // TODO(mdempsky): Fix test below to flatten the transitive
// Package.Imports graph. Unified IR is more precise about
// recreating the package import graph.
testfiles["exports.go"] = []string{"go/ast"}
} }
for testfile, wantImports := range testfiles { for testfile, wantImports := range testfiles {
...@@ -326,6 +330,14 @@ func verifyInterfaceMethodRecvs(t *testing.T, named *types2.Named, level int) { ...@@ -326,6 +330,14 @@ func verifyInterfaceMethodRecvs(t *testing.T, named *types2.Named, level int) {
return // not an interface return // not an interface
} }
// The unified IR importer always sets interface method receiver
// parameters to point to the Interface type, rather than the Named.
// See #49906.
var want types2.Type = named
if goexperiment.Unified {
want = iface
}
// check explicitly declared methods // check explicitly declared methods
for i := 0; i < iface.NumExplicitMethods(); i++ { for i := 0; i < iface.NumExplicitMethods(); i++ {
m := iface.ExplicitMethod(i) m := iface.ExplicitMethod(i)
...@@ -334,7 +346,7 @@ func verifyInterfaceMethodRecvs(t *testing.T, named *types2.Named, level int) { ...@@ -334,7 +346,7 @@ func verifyInterfaceMethodRecvs(t *testing.T, named *types2.Named, level int) {
t.Errorf("%s: missing receiver type", m) t.Errorf("%s: missing receiver type", m)
continue continue
} }
if recv.Type() != named { if recv.Type() != want {
t.Errorf("%s: got recv type %s; want %s", m, recv.Type(), named) t.Errorf("%s: got recv type %s; want %s", m, recv.Type(), named)
} }
} }
......
...@@ -14,52 +14,22 @@ import ( ...@@ -14,52 +14,22 @@ import (
"cmd/internal/bio" "cmd/internal/bio"
) )
// writeNewExportFunc is a hook that can be added to append extra
// export data after the normal export data section. It allows
// experimenting with new export data format designs without requiring
// immediate support in the go/internal or x/tools importers.
var writeNewExportFunc func(out io.Writer)
func WriteExports(out *bio.Writer) { func WriteExports(out *bio.Writer) {
// When unified IR exports are enable, we simply append it to the var data bytes.Buffer
// end of the normal export data (with compiler extensions
// disabled), and write an extra header giving its size.
//
// If the compiler sees this header, it knows to read the new data
// instead; meanwhile the go/types importers will silently ignore it
// and continue processing the old export instead.
//
// This allows us to experiment with changes to the new export data
// format without needing to update the go/internal/gcimporter or
// (worse) x/tools/go/gcexportdata.
useNewExport := writeNewExportFunc != nil
var old, new bytes.Buffer
typecheck.WriteExports(&old, !useNewExport)
if useNewExport {
writeNewExportFunc(&new)
}
oldLen := old.Len()
newLen := new.Len()
if useNewExport { if base.Debug.Unified != 0 {
fmt.Fprintf(out, "\nnewexportsize %v\n", newLen) data.WriteByte('u')
writeUnifiedExport(&data)
} else {
typecheck.WriteExports(&data, true)
} }
// The linker also looks for the $$ marker - use char after $$ to distinguish format. // The linker also looks for the $$ marker - use char after $$ to distinguish format.
out.WriteString("\n$$B\n") // indicate binary export format out.WriteString("\n$$B\n") // indicate binary export format
io.Copy(out, &old) io.Copy(out, &data)
out.WriteString("\n$$\n") out.WriteString("\n$$\n")
io.Copy(out, &new)
if base.Debug.Export != 0 { if base.Debug.Export != 0 {
fmt.Printf("BenchmarkExportSize:%s 1 %d bytes\n", base.Ctxt.Pkgpath, oldLen) fmt.Printf("BenchmarkExportSize:%s 1 %d bytes\n", base.Ctxt.Pkgpath, data.Len())
if useNewExport {
fmt.Printf("BenchmarkNewExportSize:%s 1 %d bytes\n", base.Ctxt.Pkgpath, newLen)
}
} }
} }
...@@ -8,10 +8,10 @@ import ( ...@@ -8,10 +8,10 @@ import (
"errors" "errors"
"fmt" "fmt"
"internal/buildcfg" "internal/buildcfg"
"internal/pkgbits"
"os" "os"
pathpkg "path" pathpkg "path"
"runtime" "runtime"
"strconv"
"strings" "strings"
"unicode" "unicode"
"unicode/utf8" "unicode/utf8"
...@@ -28,22 +28,6 @@ import ( ...@@ -28,22 +28,6 @@ import (
"cmd/internal/objabi" "cmd/internal/objabi"
) )
// haveLegacyImports records whether we've imported any packages
// without a new export data section. This is useful for experimenting
// with new export data format designs, when you need to support
// existing tests that manually compile files with inconsistent
// compiler flags.
var haveLegacyImports = false
// newReadImportFunc is an extension hook for experimenting with new
// export data formats. If a new export data payload was written out
// for an imported package by overloading writeNewExportFunc, then
// that payload will be mapped into memory and passed to
// newReadImportFunc.
var newReadImportFunc = func(data string, pkg1 *types.Pkg, env *types2.Context, packages map[string]*types2.Package) (pkg2 *types2.Package, err error) {
panic("unexpected new export data payload")
}
type gcimports struct { type gcimports struct {
ctxt *types2.Context ctxt *types2.Context
packages map[string]*types2.Package packages map[string]*types2.Package
...@@ -220,7 +204,7 @@ func readImportFile(path string, target *ir.Package, env *types2.Context, packag ...@@ -220,7 +204,7 @@ func readImportFile(path string, target *ir.Package, env *types2.Context, packag
} }
defer f.Close() defer f.Close()
r, end, newsize, err := findExportData(f) r, end, err := findExportData(f)
if err != nil { if err != nil {
return return
} }
...@@ -229,41 +213,40 @@ func readImportFile(path string, target *ir.Package, env *types2.Context, packag ...@@ -229,41 +213,40 @@ func readImportFile(path string, target *ir.Package, env *types2.Context, packag
fmt.Printf("importing %s (%s)\n", path, f.Name()) fmt.Printf("importing %s (%s)\n", path, f.Name())
} }
if newsize != 0 { c, err := r.ReadByte()
// We have unified IR data. Map it, and feed to the importers. if err != nil {
end -= newsize return
var data string }
data, err = base.MapFile(r.File(), end, newsize)
if err != nil {
return
}
pkg2, err = newReadImportFunc(data, pkg1, env, packages) pos := r.Offset()
} else {
// We only have old data. Oh well, fall back to the legacy importers.
haveLegacyImports = true
var c byte // Map export data section into memory as a single large
switch c, err = r.ReadByte(); { // string. This reduces heap fragmentation and allows returning
case err != nil: // individual substrings very efficiently.
return var data string
data, err = base.MapFile(r.File(), pos, end-pos)
if err != nil {
return
}
case c != 'i': switch c {
// Indexed format is distinguished by an 'i' byte, case 'u':
// whereas previous export formats started with 'c', 'd', or 'v'. if !buildcfg.Experiment.Unified {
err = fmt.Errorf("unexpected package format byte: %v", c) base.Fatalf("unexpected export data format")
return
} }
pos := r.Offset() // TODO(mdempsky): This seems a bit clunky.
data = strings.TrimSuffix(data, "\n$$\n")
// Map string (and data) section into memory as a single large pr := pkgbits.NewPkgDecoder(pkg1.Path, data)
// string. This reduces heap fragmentation and allows
// returning individual substrings very efficiently. // Read package descriptors for both types2 and compiler backend.
var data string readPackage(newPkgReader(pr), pkg1)
data, err = base.MapFile(r.File(), pos, end-pos) pkg2 = importer.ReadPackage(env, packages, pr)
if err != nil {
return case 'i':
if buildcfg.Experiment.Unified {
base.Fatalf("unexpected export data format")
} }
typecheck.ReadImports(pkg1, data) typecheck.ReadImports(pkg1, data)
...@@ -274,6 +257,12 @@ func readImportFile(path string, target *ir.Package, env *types2.Context, packag ...@@ -274,6 +257,12 @@ func readImportFile(path string, target *ir.Package, env *types2.Context, packag
return return
} }
} }
default:
// Indexed format is distinguished by an 'i' byte,
// whereas previous export formats started with 'c', 'd', or 'v'.
err = fmt.Errorf("unexpected package format byte: %v", c)
return
} }
err = addFingerprint(path, f, end) err = addFingerprint(path, f, end)
...@@ -283,7 +272,7 @@ func readImportFile(path string, target *ir.Package, env *types2.Context, packag ...@@ -283,7 +272,7 @@ func readImportFile(path string, target *ir.Package, env *types2.Context, packag
// findExportData returns a *bio.Reader positioned at the start of the // findExportData returns a *bio.Reader positioned at the start of the
// binary export data section, and a file offset for where to stop // binary export data section, and a file offset for where to stop
// reading. // reading.
func findExportData(f *os.File) (r *bio.Reader, end, newsize int64, err error) { func findExportData(f *os.File) (r *bio.Reader, end int64, err error) {
r = bio.NewReader(f) r = bio.NewReader(f)
// check object header // check object header
...@@ -326,14 +315,6 @@ func findExportData(f *os.File) (r *bio.Reader, end, newsize int64, err error) { ...@@ -326,14 +315,6 @@ func findExportData(f *os.File) (r *bio.Reader, end, newsize int64, err error) {
// process header lines // process header lines
for !strings.HasPrefix(line, "$$") { for !strings.HasPrefix(line, "$$") {
if strings.HasPrefix(line, "newexportsize ") {
fields := strings.Fields(line)
newsize, err = strconv.ParseInt(fields[1], 10, 64)
if err != nil {
return
}
}
line, err = r.ReadString('\n') line, err = r.ReadString('\n')
if err != nil { if err != nil {
return return
......
...@@ -589,10 +589,6 @@ func (pr *pkgReader) objIdx(idx int, implicits, explicits []*types.Type) ir.Node ...@@ -589,10 +589,6 @@ func (pr *pkgReader) objIdx(idx int, implicits, explicits []*types.Type) ir.Node
if pri, ok := objReader[sym]; ok { if pri, ok := objReader[sym]; ok {
return pri.pr.objIdx(pri.idx, nil, explicits) return pri.pr.objIdx(pri.idx, nil, explicits)
} }
if haveLegacyImports {
assert(len(explicits) == 0)
return typecheck.Resolve(ir.NewIdent(src.NoXPos, sym))
}
base.Fatalf("unresolved stub: %v", sym) base.Fatalf("unresolved stub: %v", sym)
} }
...@@ -1972,12 +1968,6 @@ func InlineCall(call *ir.CallExpr, fn *ir.Func, inlIndex int) *ir.InlinedCallExp ...@@ -1972,12 +1968,6 @@ func InlineCall(call *ir.CallExpr, fn *ir.Func, inlIndex int) *ir.InlinedCallExp
pri, ok := bodyReader[fn] pri, ok := bodyReader[fn]
if !ok { if !ok {
// Assume it's an imported function or something that we don't
// have access to in quirks mode.
if haveLegacyImports {
return nil
}
base.FatalfAt(call.Pos(), "missing function body for call to %v", fn) base.FatalfAt(call.Pos(), "missing function body for call to %v", fn)
} }
......
...@@ -16,7 +16,6 @@ import ( ...@@ -16,7 +16,6 @@ import (
"sort" "sort"
"cmd/compile/internal/base" "cmd/compile/internal/base"
"cmd/compile/internal/importer"
"cmd/compile/internal/inline" "cmd/compile/internal/inline"
"cmd/compile/internal/ir" "cmd/compile/internal/ir"
"cmd/compile/internal/typecheck" "cmd/compile/internal/typecheck"
...@@ -74,17 +73,6 @@ var localPkgReader *pkgReader ...@@ -74,17 +73,6 @@ var localPkgReader *pkgReader
func unified(noders []*noder) { func unified(noders []*noder) {
inline.NewInline = InlineCall inline.NewInline = InlineCall
writeNewExportFunc = writeNewExport
newReadImportFunc = func(data string, pkg1 *types.Pkg, ctxt *types2.Context, packages map[string]*types2.Package) (pkg2 *types2.Package, err error) {
pr := pkgbits.NewPkgDecoder(pkg1.Path, data)
// Read package descriptors for both types2 and compiler backend.
readPackage(newPkgReader(pr), pkg1)
pkg2 = importer.ReadPackage(ctxt, packages, pr)
return
}
data := writePkgStub(noders) data := writePkgStub(noders)
// We already passed base.Flag.Lang to types2 to handle validating // We already passed base.Flag.Lang to types2 to handle validating
...@@ -266,7 +254,7 @@ func readPackage(pr *pkgReader, importpkg *types.Pkg) { ...@@ -266,7 +254,7 @@ func readPackage(pr *pkgReader, importpkg *types.Pkg) {
} }
} }
func writeNewExport(out io.Writer) { func writeUnifiedExport(out io.Writer) {
l := linker{ l := linker{
pw: pkgbits.NewPkgEncoder(base.Debug.SyncFrames), pw: pkgbits.NewPkgEncoder(base.Debug.SyncFrames),
...@@ -332,5 +320,5 @@ func writeNewExport(out io.Writer) { ...@@ -332,5 +320,5 @@ func writeNewExport(out io.Writer) {
w.Flush() w.Flush()
} }
l.pw.DumpTo(out) base.Ctxt.Fingerprint = l.pw.DumpTo(out)
} }
...@@ -125,10 +125,14 @@ func TestImportTestdata(t *testing.T) { ...@@ -125,10 +125,14 @@ func TestImportTestdata(t *testing.T) {
} }
testfiles := map[string][]string{ testfiles := map[string][]string{
"exports.go": {"go/ast", "go/token"}, "exports.go": {"go/ast", "go/token"},
"generics.go": nil,
} }
if !goexperiment.Unified { if goexperiment.Unified {
testfiles["generics.go"] = nil // TODO(mdempsky): Fix test below to flatten the transitive
// Package.Imports graph. Unified IR is more precise about
// recreating the package import graph.
testfiles["exports.go"] = []string{"go/ast"}
} }
for testfile, wantImports := range testfiles { for testfile, wantImports := range testfiles {
...@@ -153,11 +157,6 @@ func TestImportTestdata(t *testing.T) { ...@@ -153,11 +157,6 @@ func TestImportTestdata(t *testing.T) {
} }
func TestImportTypeparamTests(t *testing.T) { func TestImportTypeparamTests(t *testing.T) {
// This test doesn't yet work with the unified export format.
if goexperiment.Unified {
t.Skip("unified export data format is currently unsupported")
}
// This package only handles gc export data. // This package only handles gc export data.
if runtime.Compiler != "gc" { if runtime.Compiler != "gc" {
t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler) t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler)
...@@ -460,6 +459,14 @@ func verifyInterfaceMethodRecvs(t *testing.T, named *types.Named, level int) { ...@@ -460,6 +459,14 @@ func verifyInterfaceMethodRecvs(t *testing.T, named *types.Named, level int) {
return // not an interface return // not an interface
} }
// The unified IR importer always sets interface method receiver
// parameters to point to the Interface type, rather than the Named.
// See #49906.
var want types.Type = named
if goexperiment.Unified {
want = iface
}
// check explicitly declared methods // check explicitly declared methods
for i := 0; i < iface.NumExplicitMethods(); i++ { for i := 0; i < iface.NumExplicitMethods(); i++ {
m := iface.ExplicitMethod(i) m := iface.ExplicitMethod(i)
...@@ -468,8 +475,8 @@ func verifyInterfaceMethodRecvs(t *testing.T, named *types.Named, level int) { ...@@ -468,8 +475,8 @@ func verifyInterfaceMethodRecvs(t *testing.T, named *types.Named, level int) {
t.Errorf("%s: missing receiver type", m) t.Errorf("%s: missing receiver type", m)
continue continue
} }
if recv.Type() != named { if recv.Type() != want {
t.Errorf("%s: got recv type %s; want %s", m, recv.Type(), named) t.Errorf("%s: got recv type %s; want %s", m, recv.Type(), want)
} }
} }
......
...@@ -12,6 +12,7 @@ import ( ...@@ -12,6 +12,7 @@ import (
"go/importer" "go/importer"
"go/parser" "go/parser"
"go/token" "go/token"
"internal/goexperiment"
"internal/testenv" "internal/testenv"
"strings" "strings"
"testing" "testing"
...@@ -208,7 +209,7 @@ func TestCheckExpr(t *testing.T) { ...@@ -208,7 +209,7 @@ func TestCheckExpr(t *testing.T) {
// expr is an identifier or selector expression that is passed // expr is an identifier or selector expression that is passed
// to CheckExpr at the position of the comment, and object is // to CheckExpr at the position of the comment, and object is
// the string form of the object it denotes. // the string form of the object it denotes.
const src = ` src := `
package p package p
import "fmt" import "fmt"
...@@ -235,6 +236,13 @@ func f(a int, s string) S { ...@@ -235,6 +236,13 @@ func f(a int, s string) S {
return S{} return S{}
}` }`
// The unified IR importer always sets interface method receiver
// parameters to point to the Interface type, rather than the Named.
// See #49906.
if goexperiment.Unified {
src = strings.ReplaceAll(src, "func (fmt.Stringer).", "func (interface).")
}
fset := token.NewFileSet() fset := token.NewFileSet()
f, err := parser.ParseFile(fset, "p", src, parser.ParseComments) f, err := parser.ParseFile(fset, "p", src, parser.ParseComments)
if err != nil { if err != nil {
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment