From 60d3bcdec38eafbffe3086d8aea190ff8bcdece7 Mon Sep 17 00:00:00 2001
From: "khr@golang.org" <khr@golang.org>
Date: Thu, 8 May 2025 10:00:22 -0700
Subject: [PATCH] runtime: remove ptr/scalar bitmap metric

We don't use this mechanism any more, so the metric will always be zero.
Since CL 616255.

Update #73628

Change-Id: Ic179927a8bc24e6291876c218d88e8848b057c2a
Reviewed-on: https://go-review.googlesource.com/c/go/+/671096
Reviewed-by: Keith Randall <khr@google.com>
Reviewed-by: Michael Knyszek <mknyszek@google.com>
Auto-Submit: Keith Randall <khr@golang.org>
LUCI-TryBot-Result: Go LUCI <golang-scoped@luci-project-accounts.iam.gserviceaccount.com>
---
 src/runtime/align_runtime_test.go |  1 -
 src/runtime/metrics.go            |  5 ++---
 src/runtime/mheap.go              | 11 +++--------
 src/runtime/mstats.go             | 25 +++++++++++--------------
 4 files changed, 16 insertions(+), 26 deletions(-)

diff --git a/src/runtime/align_runtime_test.go b/src/runtime/align_runtime_test.go
index 6d77e0d3d4a..4bcb49db2f5 100644
--- a/src/runtime/align_runtime_test.go
+++ b/src/runtime/align_runtime_test.go
@@ -28,7 +28,6 @@ var AtomicFields = []uintptr{
 	unsafe.Offsetof(heapStatsDelta{}.released),
 	unsafe.Offsetof(heapStatsDelta{}.inHeap),
 	unsafe.Offsetof(heapStatsDelta{}.inStacks),
-	unsafe.Offsetof(heapStatsDelta{}.inPtrScalarBits),
 	unsafe.Offsetof(heapStatsDelta{}.inWorkBufs),
 	unsafe.Offsetof(lfnode{}.next),
 	unsafe.Offsetof(mstats{}.last_gc_nanotime),
diff --git a/src/runtime/metrics.go b/src/runtime/metrics.go
index 949a2d42bd0..48da745521a 100644
--- a/src/runtime/metrics.go
+++ b/src/runtime/metrics.go
@@ -333,8 +333,7 @@ func initMetrics() {
 			compute: func(in *statAggregate, out *metricValue) {
 				out.kind = metricKindUint64
 				out.scalar = uint64(in.heapStats.committed - in.heapStats.inHeap -
-					in.heapStats.inStacks - in.heapStats.inWorkBufs -
-					in.heapStats.inPtrScalarBits)
+					in.heapStats.inStacks - in.heapStats.inWorkBufs)
 			},
 		},
 		"/memory/classes/heap/objects:bytes": {
@@ -397,7 +396,7 @@ func initMetrics() {
 			deps: makeStatDepSet(heapStatsDep, sysStatsDep),
 			compute: func(in *statAggregate, out *metricValue) {
 				out.kind = metricKindUint64
-				out.scalar = uint64(in.heapStats.inWorkBufs+in.heapStats.inPtrScalarBits) + in.sysStats.gcMiscSys
+				out.scalar = uint64(in.heapStats.inWorkBufs) + in.sysStats.gcMiscSys
 			},
 		},
 		"/memory/classes/os-stacks:bytes": {
diff --git a/src/runtime/mheap.go b/src/runtime/mheap.go
index dbad51dcbf0..5a27ab5e78f 100644
--- a/src/runtime/mheap.go
+++ b/src/runtime/mheap.go
@@ -979,10 +979,9 @@ func (h *mheap) reclaimChunk(arenas []arenaIdx, pageIdx, n uintptr) uintptr {
 type spanAllocType uint8
 
 const (
-	spanAllocHeap          spanAllocType = iota // heap span
-	spanAllocStack                              // stack span
-	spanAllocPtrScalarBits                      // unrolled GC prog bitmap span
-	spanAllocWorkBuf                            // work buf span
+	spanAllocHeap    spanAllocType = iota // heap span
+	spanAllocStack                        // stack span
+	spanAllocWorkBuf                      // work buf span
 )
 
 // manual returns true if the span allocation is manually managed.
@@ -1407,8 +1406,6 @@ HaveSpan:
 		atomic.Xaddint64(&stats.inHeap, int64(nbytes))
 	case spanAllocStack:
 		atomic.Xaddint64(&stats.inStacks, int64(nbytes))
-	case spanAllocPtrScalarBits:
-		atomic.Xaddint64(&stats.inPtrScalarBits, int64(nbytes))
 	case spanAllocWorkBuf:
 		atomic.Xaddint64(&stats.inWorkBufs, int64(nbytes))
 	}
@@ -1719,8 +1716,6 @@ func (h *mheap) freeSpanLocked(s *mspan, typ spanAllocType) {
 		atomic.Xaddint64(&stats.inHeap, -int64(nbytes))
 	case spanAllocStack:
 		atomic.Xaddint64(&stats.inStacks, -int64(nbytes))
-	case spanAllocPtrScalarBits:
-		atomic.Xaddint64(&stats.inPtrScalarBits, -int64(nbytes))
 	case spanAllocWorkBuf:
 		atomic.Xaddint64(&stats.inWorkBufs, -int64(nbytes))
 	}
diff --git a/src/runtime/mstats.go b/src/runtime/mstats.go
index 5507b873e5b..29ace5ec16f 100644
--- a/src/runtime/mstats.go
+++ b/src/runtime/mstats.go
@@ -442,12 +442,11 @@ func readmemstats_m(stats *MemStats) {
 
 	stackInUse := uint64(consStats.inStacks)
 	gcWorkBufInUse := uint64(consStats.inWorkBufs)
-	gcProgPtrScalarBitsInUse := uint64(consStats.inPtrScalarBits)
 
 	totalMapped := gcController.heapInUse.load() + gcController.heapFree.load() + gcController.heapReleased.load() +
 		memstats.stacks_sys.load() + memstats.mspan_sys.load() + memstats.mcache_sys.load() +
 		memstats.buckhash_sys.load() + memstats.gcMiscSys.load() + memstats.other_sys.load() +
-		stackInUse + gcWorkBufInUse + gcProgPtrScalarBitsInUse
+		stackInUse + gcWorkBufInUse
 
 	heapGoal := gcController.heapGoal()
 
@@ -461,7 +460,7 @@ func readmemstats_m(stats *MemStats) {
 		//
 		// * memstats.heapInUse == inHeap
 		// * memstats.heapReleased == released
-		// * memstats.heapInUse + memstats.heapFree == committed - inStacks - inWorkBufs - inPtrScalarBits
+		// * memstats.heapInUse + memstats.heapFree == committed - inStacks - inWorkBufs
 		// * memstats.totalAlloc == totalAlloc
 		// * memstats.totalFree == totalFree
 		//
@@ -482,7 +481,7 @@ func readmemstats_m(stats *MemStats) {
 			throw("heapReleased and consistent stats are not equal")
 		}
 		heapRetained := gcController.heapInUse.load() + gcController.heapFree.load()
-		consRetained := uint64(consStats.committed - consStats.inStacks - consStats.inWorkBufs - consStats.inPtrScalarBits)
+		consRetained := uint64(consStats.committed - consStats.inStacks - consStats.inWorkBufs)
 		if heapRetained != consRetained {
 			print("runtime: global value=", heapRetained, "\n")
 			print("runtime: consistent value=", consRetained, "\n")
@@ -533,8 +532,8 @@ func readmemstats_m(stats *MemStats) {
 	//
 	// or
 	//
-	// HeapSys = sys - stacks_inuse - gcWorkBufInUse - gcProgPtrScalarBitsInUse
-	// HeapIdle = sys - stacks_inuse - gcWorkBufInUse - gcProgPtrScalarBitsInUse - heapInUse
+	// HeapSys = sys - stacks_inuse - gcWorkBufInUse
+	// HeapIdle = sys - stacks_inuse - gcWorkBufInUse - heapInUse
 	//
 	// => HeapIdle = HeapSys - heapInUse = heapFree + heapReleased
 	stats.HeapIdle = gcController.heapFree.load() + gcController.heapReleased.load()
@@ -553,7 +552,7 @@ func readmemstats_m(stats *MemStats) {
 	// MemStats defines GCSys as an aggregate of all memory related
 	// to the memory management system, but we track this memory
 	// at a more granular level in the runtime.
-	stats.GCSys = memstats.gcMiscSys.load() + gcWorkBufInUse + gcProgPtrScalarBitsInUse
+	stats.GCSys = memstats.gcMiscSys.load() + gcWorkBufInUse
 	stats.OtherSys = memstats.other_sys.load()
 	stats.NextGC = heapGoal
 	stats.LastGC = memstats.last_gc_unix
@@ -678,12 +677,11 @@ func (s *sysMemStat) add(n int64) {
 // consistent with one another.
 type heapStatsDelta struct {
 	// Memory stats.
-	committed       int64 // byte delta of memory committed
-	released        int64 // byte delta of released memory generated
-	inHeap          int64 // byte delta of memory placed in the heap
-	inStacks        int64 // byte delta of memory reserved for stacks
-	inWorkBufs      int64 // byte delta of memory reserved for work bufs
-	inPtrScalarBits int64 // byte delta of memory reserved for unrolled GC prog bits
+	committed  int64 // byte delta of memory committed
+	released   int64 // byte delta of released memory generated
+	inHeap     int64 // byte delta of memory placed in the heap
+	inStacks   int64 // byte delta of memory reserved for stacks
+	inWorkBufs int64 // byte delta of memory reserved for work bufs
 
 	// Allocator stats.
 	//
@@ -709,7 +707,6 @@ func (a *heapStatsDelta) merge(b *heapStatsDelta) {
 	a.inHeap += b.inHeap
 	a.inStacks += b.inStacks
 	a.inWorkBufs += b.inWorkBufs
-	a.inPtrScalarBits += b.inPtrScalarBits
 
 	a.tinyAllocCount += b.tinyAllocCount
 	a.largeAlloc += b.largeAlloc
-- 
GitLab