From adef4deeb85ede59201f37f5145763ed55a807f7 Mon Sep 17 00:00:00 2001 From: David Chase Date: Fri, 7 Aug 2020 22:46:43 -0400 Subject: cmd/compile: enable late expansion for interface calls Includes a few tweaks to Value.copyOf(a) (make it a no-op for a self-copy) and new pattern hack "___" (3 underscores) is like ellipsis, except the replacement doesn't need to have matching ellipsis/underscores. Moved the arg-length check in generated pattern-matching code BEFORE the args are probed, because not all instances of variable length OpFoo will have all the args mentioned in some rule for OpFoo, and when that happens, the compiler panics without the early check. Change-Id: I66de40672b3794a6427890ff96c805a488d783f4 Reviewed-on: https://go-review.googlesource.com/c/go/+/247537 Trust: David Chase Run-TryBot: David Chase TryBot-Result: Go Bot Reviewed-by: Cherry Zhang --- src/cmd/compile/internal/ssa/rewritegeneric.go | 62 ++++++++++++++++++++++++-- 1 file changed, 58 insertions(+), 4 deletions(-) (limited to 'src/cmd/compile/internal/ssa/rewritegeneric.go') diff --git a/src/cmd/compile/internal/ssa/rewritegeneric.go b/src/cmd/compile/internal/ssa/rewritegeneric.go index 925ff53fd1..ade0a69a10 100644 --- a/src/cmd/compile/internal/ssa/rewritegeneric.go +++ b/src/cmd/compile/internal/ssa/rewritegeneric.go @@ -124,6 +124,8 @@ func rewriteValuegeneric(v *Value) bool { return rewriteValuegeneric_OpIMake(v) case OpInterCall: return rewriteValuegeneric_OpInterCall(v) + case OpInterLECall: + return rewriteValuegeneric_OpInterLECall(v) case OpIsInBounds: return rewriteValuegeneric_OpIsInBounds(v) case OpIsNonNil: @@ -8522,6 +8524,46 @@ func rewriteValuegeneric_OpInterCall(v *Value) bool { } return false } +func rewriteValuegeneric_OpInterLECall(v *Value) bool { + // match: (InterLECall [argsize] {auxCall} (Load (OffPtr [off] (ITab (IMake (Addr {itab} (SB)) _))) _) ___) + // cond: devirtLESym(v, auxCall, itab, off) != nil + // result: devirtLECall(v, devirtLESym(v, auxCall, itab, off)) + for { + if len(v.Args) < 1 { + break + } + auxCall := auxToCall(v.Aux) + v_0 := v.Args[0] + if v_0.Op != OpLoad { + break + } + v_0_0 := v_0.Args[0] + if v_0_0.Op != OpOffPtr { + break + } + off := auxIntToInt64(v_0_0.AuxInt) + v_0_0_0 := v_0_0.Args[0] + if v_0_0_0.Op != OpITab { + break + } + v_0_0_0_0 := v_0_0_0.Args[0] + if v_0_0_0_0.Op != OpIMake { + break + } + v_0_0_0_0_0 := v_0_0_0_0.Args[0] + if v_0_0_0_0_0.Op != OpAddr { + break + } + itab := auxToSym(v_0_0_0_0_0.Aux) + v_0_0_0_0_0_0 := v_0_0_0_0_0.Args[0] + if v_0_0_0_0_0_0.Op != OpSB || !(devirtLESym(v, auxCall, itab, off) != nil) { + break + } + v.copyOf(devirtLECall(v, devirtLESym(v, auxCall, itab, off))) + return true + } + return false +} func rewriteValuegeneric_OpIsInBounds(v *Value) bool { v_1 := v.Args[1] v_0 := v.Args[0] @@ -18549,6 +18591,9 @@ func rewriteValuegeneric_OpPhi(v *Value) bool { // match: (Phi (Const8 [c]) (Const8 [c])) // result: (Const8 [c]) for { + if len(v.Args) != 2 { + break + } _ = v.Args[1] v_0 := v.Args[0] if v_0.Op != OpConst8 { @@ -18556,7 +18601,7 @@ func rewriteValuegeneric_OpPhi(v *Value) bool { } c := auxIntToInt8(v_0.AuxInt) v_1 := v.Args[1] - if v_1.Op != OpConst8 || auxIntToInt8(v_1.AuxInt) != c || len(v.Args) != 2 { + if v_1.Op != OpConst8 || auxIntToInt8(v_1.AuxInt) != c { break } v.reset(OpConst8) @@ -18566,6 +18611,9 @@ func rewriteValuegeneric_OpPhi(v *Value) bool { // match: (Phi (Const16 [c]) (Const16 [c])) // result: (Const16 [c]) for { + if len(v.Args) != 2 { + break + } _ = v.Args[1] v_0 := v.Args[0] if v_0.Op != OpConst16 { @@ -18573,7 +18621,7 @@ func rewriteValuegeneric_OpPhi(v *Value) bool { } c := auxIntToInt16(v_0.AuxInt) v_1 := v.Args[1] - if v_1.Op != OpConst16 || auxIntToInt16(v_1.AuxInt) != c || len(v.Args) != 2 { + if v_1.Op != OpConst16 || auxIntToInt16(v_1.AuxInt) != c { break } v.reset(OpConst16) @@ -18583,6 +18631,9 @@ func rewriteValuegeneric_OpPhi(v *Value) bool { // match: (Phi (Const32 [c]) (Const32 [c])) // result: (Const32 [c]) for { + if len(v.Args) != 2 { + break + } _ = v.Args[1] v_0 := v.Args[0] if v_0.Op != OpConst32 { @@ -18590,7 +18641,7 @@ func rewriteValuegeneric_OpPhi(v *Value) bool { } c := auxIntToInt32(v_0.AuxInt) v_1 := v.Args[1] - if v_1.Op != OpConst32 || auxIntToInt32(v_1.AuxInt) != c || len(v.Args) != 2 { + if v_1.Op != OpConst32 || auxIntToInt32(v_1.AuxInt) != c { break } v.reset(OpConst32) @@ -18600,6 +18651,9 @@ func rewriteValuegeneric_OpPhi(v *Value) bool { // match: (Phi (Const64 [c]) (Const64 [c])) // result: (Const64 [c]) for { + if len(v.Args) != 2 { + break + } _ = v.Args[1] v_0 := v.Args[0] if v_0.Op != OpConst64 { @@ -18607,7 +18661,7 @@ func rewriteValuegeneric_OpPhi(v *Value) bool { } c := auxIntToInt64(v_0.AuxInt) v_1 := v.Args[1] - if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || len(v.Args) != 2 { + if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c { break } v.reset(OpConst64) -- cgit v1.2.1 From 7bda6154caa6f0c527f4a8302e38d450b44ae68b Mon Sep 17 00:00:00 2001 From: David Chase Date: Wed, 12 Aug 2020 23:47:57 -0400 Subject: cmd/compile: add generic optimization patterns for late-expanded calls. Repeats existing patterns for old calls, so that these will apply during the optimization phases that precede call expansion. Change-Id: I1ca0a78c159aa1a51004db217edde4ecc772b646 Reviewed-on: https://go-review.googlesource.com/c/go/+/248190 Trust: David Chase Run-TryBot: David Chase Reviewed-by: Cherry Zhang TryBot-Result: Go Bot --- src/cmd/compile/internal/ssa/rewritegeneric.go | 158 +++++++++++++++++++++++++ 1 file changed, 158 insertions(+) (limited to 'src/cmd/compile/internal/ssa/rewritegeneric.go') diff --git a/src/cmd/compile/internal/ssa/rewritegeneric.go b/src/cmd/compile/internal/ssa/rewritegeneric.go index ade0a69a10..11f4cc7c58 100644 --- a/src/cmd/compile/internal/ssa/rewritegeneric.go +++ b/src/cmd/compile/internal/ssa/rewritegeneric.go @@ -368,6 +368,8 @@ func rewriteValuegeneric(v *Value) bool { return rewriteValuegeneric_OpSelect0(v) case OpSelect1: return rewriteValuegeneric_OpSelect1(v) + case OpSelectN: + return rewriteValuegeneric_OpSelectN(v) case OpSignExt16to32: return rewriteValuegeneric_OpSignExt16to32(v) case OpSignExt16to64: @@ -16124,6 +16126,38 @@ func rewriteValuegeneric_OpNilCheck(v *Value) bool { v.reset(OpInvalid) return true } + // match: (NilCheck (SelectN [0] call:(StaticLECall _ _)) (SelectN [1] call)) + // cond: isSameCall(call.Aux, "runtime.newobject") && warnRule(fe.Debug_checknil(), v, "removed nil check") + // result: (Invalid) + for { + if v_0.Op != OpSelectN || auxIntToInt64(v_0.AuxInt) != 0 { + break + } + call := v_0.Args[0] + if call.Op != OpStaticLECall || len(call.Args) != 2 || v_1.Op != OpSelectN || auxIntToInt64(v_1.AuxInt) != 1 || call != v_1.Args[0] || !(isSameCall(call.Aux, "runtime.newobject") && warnRule(fe.Debug_checknil(), v, "removed nil check")) { + break + } + v.reset(OpInvalid) + return true + } + // match: (NilCheck (OffPtr (SelectN [0] call:(StaticLECall _ _))) (SelectN [1] call)) + // cond: isSameCall(call.Aux, "runtime.newobject") && warnRule(fe.Debug_checknil(), v, "removed nil check") + // result: (Invalid) + for { + if v_0.Op != OpOffPtr { + break + } + v_0_0 := v_0.Args[0] + if v_0_0.Op != OpSelectN || auxIntToInt64(v_0_0.AuxInt) != 0 { + break + } + call := v_0_0.Args[0] + if call.Op != OpStaticLECall || len(call.Args) != 2 || v_1.Op != OpSelectN || auxIntToInt64(v_1.AuxInt) != 1 || call != v_1.Args[0] || !(isSameCall(call.Aux, "runtime.newobject") && warnRule(fe.Debug_checknil(), v, "removed nil check")) { + break + } + v.reset(OpInvalid) + return true + } return false } func rewriteValuegeneric_OpNot(v *Value) bool { @@ -20669,6 +20703,70 @@ func rewriteValuegeneric_OpSelect1(v *Value) bool { } return false } +func rewriteValuegeneric_OpSelectN(v *Value) bool { + v_0 := v.Args[0] + b := v.Block + config := b.Func.Config + // match: (SelectN [0] call:(StaticLECall {sym} dst src (Const64 [sz]) mem)) + // cond: sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && dst.Type.IsPtr() && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call) + // result: (Move {dst.Type.Elem()} [int64(sz)] dst src mem) + for { + if auxIntToInt64(v.AuxInt) != 0 { + break + } + call := v_0 + if call.Op != OpStaticLECall || len(call.Args) != 4 { + break + } + sym := auxToCall(call.Aux) + mem := call.Args[3] + dst := call.Args[0] + src := call.Args[1] + call_2 := call.Args[2] + if call_2.Op != OpConst64 { + break + } + sz := auxIntToInt64(call_2.AuxInt) + if !(sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && dst.Type.IsPtr() && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)) { + break + } + v.reset(OpMove) + v.AuxInt = int64ToAuxInt(int64(sz)) + v.Aux = typeToAux(dst.Type.Elem()) + v.AddArg3(dst, src, mem) + return true + } + // match: (SelectN [0] call:(StaticLECall {sym} dst src (Const32 [sz]) mem)) + // cond: sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && dst.Type.IsPtr() && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call) + // result: (Move {dst.Type.Elem()} [int64(sz)] dst src mem) + for { + if auxIntToInt64(v.AuxInt) != 0 { + break + } + call := v_0 + if call.Op != OpStaticLECall || len(call.Args) != 4 { + break + } + sym := auxToCall(call.Aux) + mem := call.Args[3] + dst := call.Args[0] + src := call.Args[1] + call_2 := call.Args[2] + if call_2.Op != OpConst32 { + break + } + sz := auxIntToInt32(call_2.AuxInt) + if !(sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && dst.Type.IsPtr() && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)) { + break + } + v.reset(OpMove) + v.AuxInt = int64ToAuxInt(int64(sz)) + v.Aux = typeToAux(dst.Type.Elem()) + v.AddArg3(dst, src, mem) + return true + } + return false +} func rewriteValuegeneric_OpSignExt16to32(v *Value) bool { v_0 := v.Args[0] // match: (SignExt16to32 (Const16 [c])) @@ -21714,6 +21812,48 @@ func rewriteValuegeneric_OpStore(v *Value) bool { v.copyOf(mem) return true } + // match: (Store (SelectN [0] call:(StaticLECall _ _)) x mem:(SelectN [1] call)) + // cond: isConstZero(x) && isSameCall(call.Aux, "runtime.newobject") + // result: mem + for { + if v_0.Op != OpSelectN || auxIntToInt64(v_0.AuxInt) != 0 { + break + } + call := v_0.Args[0] + if call.Op != OpStaticLECall || len(call.Args) != 2 { + break + } + x := v_1 + mem := v_2 + if mem.Op != OpSelectN || auxIntToInt64(mem.AuxInt) != 1 || call != mem.Args[0] || !(isConstZero(x) && isSameCall(call.Aux, "runtime.newobject")) { + break + } + v.copyOf(mem) + return true + } + // match: (Store (OffPtr (SelectN [0] call:(StaticLECall _ _))) x mem:(SelectN [1] call)) + // cond: isConstZero(x) && isSameCall(call.Aux, "runtime.newobject") + // result: mem + for { + if v_0.Op != OpOffPtr { + break + } + v_0_0 := v_0.Args[0] + if v_0_0.Op != OpSelectN || auxIntToInt64(v_0_0.AuxInt) != 0 { + break + } + call := v_0_0.Args[0] + if call.Op != OpStaticLECall || len(call.Args) != 2 { + break + } + x := v_1 + mem := v_2 + if mem.Op != OpSelectN || auxIntToInt64(mem.AuxInt) != 1 || call != mem.Args[0] || !(isConstZero(x) && isSameCall(call.Aux, "runtime.newobject")) { + break + } + v.copyOf(mem) + return true + } // match: (Store {t1} op1:(OffPtr [o1] p1) d1 m2:(Store {t2} op2:(OffPtr [0] p2) d2 m3:(Move [n] p3 _ mem))) // cond: m2.Uses == 1 && m3.Uses == 1 && o1 == t2.Size() && n == t2.Size() + t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && clobber(m2, m3) // result: (Store {t1} op1 d1 (Store {t2} op2 d2 mem)) @@ -24411,6 +24551,24 @@ func rewriteValuegeneric_OpZero(v *Value) bool { v.copyOf(mem) return true } + // match: (Zero (SelectN [0] call:(StaticLECall _ _)) mem:(SelectN [1] call)) + // cond: isSameCall(call.Aux, "runtime.newobject") + // result: mem + for { + if v_0.Op != OpSelectN || auxIntToInt64(v_0.AuxInt) != 0 { + break + } + call := v_0.Args[0] + if call.Op != OpStaticLECall || len(call.Args) != 2 { + break + } + mem := v_1 + if mem.Op != OpSelectN || auxIntToInt64(mem.AuxInt) != 1 || call != mem.Args[0] || !(isSameCall(call.Aux, "runtime.newobject")) { + break + } + v.copyOf(mem) + return true + } // match: (Zero {t1} [n] p1 store:(Store {t2} (OffPtr [o2] p2) _ mem)) // cond: isSamePtr(p1, p2) && store.Uses == 1 && n >= o2 + t2.Size() && clobber(store) // result: (Zero {t1} [n] p1 mem) -- cgit v1.2.1