summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorAustin Clements <austin@google.com>2018-04-17 16:33:59 -0400
committerAustin Clements <austin@google.com>2018-05-22 20:44:01 +0000
commit577c05ca1cba1555ee56f57065481a75aefe64ab (patch)
tree6c1645fc3bcef86ab19936e5e3ac0dbc94ef131b /src
parent51be90a251b2030da02079d28ce6dc50364a351e (diff)
downloadgo-git-577c05ca1cba1555ee56f57065481a75aefe64ab.tar.gz
cmd/compile: single pass over Blocks in Liveness.epilogue
Currently Liveness.epilogue makes three passes over the Blocks, but there's no need to do this. Combine them into a single pass. This eliminates the need for blockEffects.lastbitmapindex, but, more importantly, will let us incrementally compact the liveness bitmaps and significantly reduce allocatons in Liveness.epilogue. Passes toolstash -cmp. Updates #24543. Change-Id: I27802bcd00d23aa122a7ec16cdfd739ae12dd7aa Reviewed-on: https://go-review.googlesource.com/110175 Run-TryBot: Austin Clements <austin@google.com> TryBot-Result: Gobot Gobot <gobot@golang.org> Reviewed-by: Keith Randall <khr@golang.org> Reviewed-by: David Chase <drchase@google.com>
Diffstat (limited to 'src')
-rw-r--r--src/cmd/compile/internal/gc/plive.go43
1 files changed, 17 insertions, 26 deletions
diff --git a/src/cmd/compile/internal/gc/plive.go b/src/cmd/compile/internal/gc/plive.go
index 88f265de02..47bc1a98a5 100644
--- a/src/cmd/compile/internal/gc/plive.go
+++ b/src/cmd/compile/internal/gc/plive.go
@@ -80,8 +80,6 @@ import (
// BlockEffects summarizes the liveness effects on an SSA block.
type BlockEffects struct {
- lastbitmapindex int // for Liveness.epilogue
-
// Computed during Liveness.prologue using only the content of
// individual blocks:
//
@@ -988,6 +986,7 @@ func (lv *Liveness) epilogue() {
for _, b := range lv.f.Blocks {
be := lv.blockEffects(b)
+ firstBitmapIndex := len(lv.livevars)
// Compute avarinitany and avarinitall for entry to block.
// This duplicates information known during Liveness.solve
@@ -1039,14 +1038,8 @@ func (lv *Liveness) epilogue() {
lv.livevars = append(lv.livevars, varRegVec{vars: live})
}
- be.lastbitmapindex = len(lv.livevars) - 1
- }
-
- for _, b := range lv.f.Blocks {
- be := lv.blockEffects(b)
-
// walk backward, construct maps at each safe point
- index := int32(be.lastbitmapindex)
+ index := int32(len(lv.livevars) - 1)
if index < 0 {
// the first block we encounter should have the ATEXT so
// at no point should pos ever be less than zero.
@@ -1089,6 +1082,21 @@ func (lv *Liveness) epilogue() {
live := &lv.livevars[index]
live.Or(*live, liveout)
}
+
+ // Check that no registers are live across calls.
+ // For closure calls, the CALLclosure is the last use
+ // of the context register, so it's dead after the call.
+ index = int32(firstBitmapIndex)
+ for _, v := range b.Values {
+ if lv.issafepoint(v) {
+ live := lv.livevars[index]
+ if v.Op.IsCall() && live.regs != 0 {
+ lv.printDebug()
+ v.Fatalf("internal error: %v register %s recorded as live at call", lv.fn.Func.Nname, live.regs.niceString(lv.f.Config))
+ }
+ index++
+ }
+ }
}
// Useful sanity check: on entry to the function,
@@ -1107,23 +1115,6 @@ func (lv *Liveness) epilogue() {
lv.printDebug()
lv.f.Fatalf("internal error: %v register %s recorded as live on entry", lv.fn.Func.Nname, regs.niceString(lv.f.Config))
}
- // Check that no registers are live across calls.
- // For closure calls, the CALLclosure is the last use
- // of the context register, so it's dead after the call.
- for _, b := range lv.f.Blocks {
- index := int32(lv.blockEffects(b).lastbitmapindex)
- for i := len(b.Values) - 1; i >= 0; i-- {
- v := b.Values[i]
- if lv.issafepoint(v) {
- live := lv.livevars[index]
- if v.Op.IsCall() && live.regs != 0 {
- lv.printDebug()
- v.Fatalf("internal error: %v register %s recorded as live at call", lv.fn.Func.Nname, live.regs.niceString(lv.f.Config))
- }
- index--
- }
- }
- }
}
func (lv *Liveness) clobber() {