summaryrefslogtreecommitdiff
path: root/src/cmd/compile/internal/ssa/rewritegeneric.go
diff options
context:
space:
mode:
Diffstat (limited to 'src/cmd/compile/internal/ssa/rewritegeneric.go')
-rw-r--r--src/cmd/compile/internal/ssa/rewritegeneric.go51
1 files changed, 26 insertions, 25 deletions
diff --git a/src/cmd/compile/internal/ssa/rewritegeneric.go b/src/cmd/compile/internal/ssa/rewritegeneric.go
index 180e48b34c..925ff53fd1 100644
--- a/src/cmd/compile/internal/ssa/rewritegeneric.go
+++ b/src/cmd/compile/internal/ssa/rewritegeneric.go
@@ -8479,11 +8479,12 @@ func rewriteValuegeneric_OpIMake(v *Value) bool {
func rewriteValuegeneric_OpInterCall(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
- // match: (InterCall [argsize] (Load (OffPtr [off] (ITab (IMake (Addr {itab} (SB)) _))) _) mem)
- // cond: devirt(v, itab, off) != nil
- // result: (StaticCall [int32(argsize)] {devirt(v, itab, off)} mem)
+ // match: (InterCall [argsize] {auxCall} (Load (OffPtr [off] (ITab (IMake (Addr {itab} (SB)) _))) _) mem)
+ // cond: devirt(v, auxCall, itab, off) != nil
+ // result: (StaticCall [int32(argsize)] {devirt(v, auxCall, itab, off)} mem)
for {
- argsize := auxIntToInt64(v.AuxInt)
+ argsize := auxIntToInt32(v.AuxInt)
+ auxCall := auxToCall(v.Aux)
if v_0.Op != OpLoad {
break
}
@@ -8510,12 +8511,12 @@ func rewriteValuegeneric_OpInterCall(v *Value) bool {
break
}
mem := v_1
- if !(devirt(v, itab, off) != nil) {
+ if !(devirt(v, auxCall, itab, off) != nil) {
break
}
v.reset(OpStaticCall)
v.AuxInt = int32ToAuxInt(int32(argsize))
- v.Aux = symToAux(devirt(v, itab, off))
+ v.Aux = callToAux(devirt(v, auxCall, itab, off))
v.AddArg(mem)
return true
}
@@ -16022,7 +16023,7 @@ func rewriteValuegeneric_OpNilCheck(v *Value) bool {
return true
}
// match: (NilCheck (Load (OffPtr [c] (SP)) (StaticCall {sym} _)) _)
- // cond: symNamed(sym, "runtime.newobject") && c == config.ctxt.FixedFrameSize() + config.RegSize && warnRule(fe.Debug_checknil(), v, "removed nil check")
+ // cond: isSameCall(sym, "runtime.newobject") && c == config.ctxt.FixedFrameSize() + config.RegSize && warnRule(fe.Debug_checknil(), v, "removed nil check")
// result: (Invalid)
for {
if v_0.Op != OpLoad {
@@ -16042,15 +16043,15 @@ func rewriteValuegeneric_OpNilCheck(v *Value) bool {
if v_0_1.Op != OpStaticCall {
break
}
- sym := auxToSym(v_0_1.Aux)
- if !(symNamed(sym, "runtime.newobject") && c == config.ctxt.FixedFrameSize()+config.RegSize && warnRule(fe.Debug_checknil(), v, "removed nil check")) {
+ sym := auxToCall(v_0_1.Aux)
+ if !(isSameCall(sym, "runtime.newobject") && c == config.ctxt.FixedFrameSize()+config.RegSize && warnRule(fe.Debug_checknil(), v, "removed nil check")) {
break
}
v.reset(OpInvalid)
return true
}
// match: (NilCheck (OffPtr (Load (OffPtr [c] (SP)) (StaticCall {sym} _))) _)
- // cond: symNamed(sym, "runtime.newobject") && c == config.ctxt.FixedFrameSize() + config.RegSize && warnRule(fe.Debug_checknil(), v, "removed nil check")
+ // cond: isSameCall(sym, "runtime.newobject") && c == config.ctxt.FixedFrameSize() + config.RegSize && warnRule(fe.Debug_checknil(), v, "removed nil check")
// result: (Invalid)
for {
if v_0.Op != OpOffPtr {
@@ -16074,8 +16075,8 @@ func rewriteValuegeneric_OpNilCheck(v *Value) bool {
if v_0_0_1.Op != OpStaticCall {
break
}
- sym := auxToSym(v_0_0_1.Aux)
- if !(symNamed(sym, "runtime.newobject") && c == config.ctxt.FixedFrameSize()+config.RegSize && warnRule(fe.Debug_checknil(), v, "removed nil check")) {
+ sym := auxToCall(v_0_0_1.Aux)
+ if !(isSameCall(sym, "runtime.newobject") && c == config.ctxt.FixedFrameSize()+config.RegSize && warnRule(fe.Debug_checknil(), v, "removed nil check")) {
break
}
v.reset(OpInvalid)
@@ -21067,10 +21068,10 @@ func rewriteValuegeneric_OpStaticCall(v *Value) bool {
b := v.Block
config := b.Func.Config
// match: (StaticCall {sym} s1:(Store _ (Const64 [sz]) s2:(Store _ src s3:(Store {t} _ dst mem))))
- // cond: sz >= 0 && symNamed(sym, "runtime.memmove") && t.IsPtr() && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, int64(sz), config) && clobber(s1, s2, s3)
+ // cond: sz >= 0 && isSameCall(sym, "runtime.memmove") && t.IsPtr() && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, int64(sz), config) && clobber(s1, s2, s3)
// result: (Move {t.Elem()} [int64(sz)] dst src mem)
for {
- sym := auxToSym(v.Aux)
+ sym := auxToCall(v.Aux)
s1 := v_0
if s1.Op != OpStore {
break
@@ -21094,7 +21095,7 @@ func rewriteValuegeneric_OpStaticCall(v *Value) bool {
t := auxToType(s3.Aux)
mem := s3.Args[2]
dst := s3.Args[1]
- if !(sz >= 0 && symNamed(sym, "runtime.memmove") && t.IsPtr() && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, int64(sz), config) && clobber(s1, s2, s3)) {
+ if !(sz >= 0 && isSameCall(sym, "runtime.memmove") && t.IsPtr() && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, int64(sz), config) && clobber(s1, s2, s3)) {
break
}
v.reset(OpMove)
@@ -21104,10 +21105,10 @@ func rewriteValuegeneric_OpStaticCall(v *Value) bool {
return true
}
// match: (StaticCall {sym} s1:(Store _ (Const32 [sz]) s2:(Store _ src s3:(Store {t} _ dst mem))))
- // cond: sz >= 0 && symNamed(sym, "runtime.memmove") && t.IsPtr() && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, int64(sz), config) && clobber(s1, s2, s3)
+ // cond: sz >= 0 && isSameCall(sym, "runtime.memmove") && t.IsPtr() && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, int64(sz), config) && clobber(s1, s2, s3)
// result: (Move {t.Elem()} [int64(sz)] dst src mem)
for {
- sym := auxToSym(v.Aux)
+ sym := auxToCall(v.Aux)
s1 := v_0
if s1.Op != OpStore {
break
@@ -21131,7 +21132,7 @@ func rewriteValuegeneric_OpStaticCall(v *Value) bool {
t := auxToType(s3.Aux)
mem := s3.Args[2]
dst := s3.Args[1]
- if !(sz >= 0 && symNamed(sym, "runtime.memmove") && t.IsPtr() && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, int64(sz), config) && clobber(s1, s2, s3)) {
+ if !(sz >= 0 && isSameCall(sym, "runtime.memmove") && t.IsPtr() && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, int64(sz), config) && clobber(s1, s2, s3)) {
break
}
v.reset(OpMove)
@@ -21144,7 +21145,7 @@ func rewriteValuegeneric_OpStaticCall(v *Value) bool {
// cond: needRaceCleanup(sym, v)
// result: x
for {
- sym := auxToSym(v.Aux)
+ sym := auxToCall(v.Aux)
x := v_0
if !(needRaceCleanup(sym, v)) {
break
@@ -21608,7 +21609,7 @@ func rewriteValuegeneric_OpStore(v *Value) bool {
return true
}
// match: (Store (Load (OffPtr [c] (SP)) mem) x mem)
- // cond: isConstZero(x) && mem.Op == OpStaticCall && isSameSym(mem.Aux, "runtime.newobject") && c == config.ctxt.FixedFrameSize() + config.RegSize
+ // cond: isConstZero(x) && mem.Op == OpStaticCall && isSameCall(mem.Aux, "runtime.newobject") && c == config.ctxt.FixedFrameSize() + config.RegSize
// result: mem
for {
if v_0.Op != OpLoad {
@@ -21625,14 +21626,14 @@ func rewriteValuegeneric_OpStore(v *Value) bool {
break
}
x := v_1
- if mem != v_2 || !(isConstZero(x) && mem.Op == OpStaticCall && isSameSym(mem.Aux, "runtime.newobject") && c == config.ctxt.FixedFrameSize()+config.RegSize) {
+ if mem != v_2 || !(isConstZero(x) && mem.Op == OpStaticCall && isSameCall(mem.Aux, "runtime.newobject") && c == config.ctxt.FixedFrameSize()+config.RegSize) {
break
}
v.copyOf(mem)
return true
}
// match: (Store (OffPtr (Load (OffPtr [c] (SP)) mem)) x mem)
- // cond: isConstZero(x) && mem.Op == OpStaticCall && isSameSym(mem.Aux, "runtime.newobject") && c == config.ctxt.FixedFrameSize() + config.RegSize
+ // cond: isConstZero(x) && mem.Op == OpStaticCall && isSameCall(mem.Aux, "runtime.newobject") && c == config.ctxt.FixedFrameSize() + config.RegSize
// result: mem
for {
if v_0.Op != OpOffPtr {
@@ -21653,7 +21654,7 @@ func rewriteValuegeneric_OpStore(v *Value) bool {
break
}
x := v_1
- if mem != v_2 || !(isConstZero(x) && mem.Op == OpStaticCall && isSameSym(mem.Aux, "runtime.newobject") && c == config.ctxt.FixedFrameSize()+config.RegSize) {
+ if mem != v_2 || !(isConstZero(x) && mem.Op == OpStaticCall && isSameCall(mem.Aux, "runtime.newobject") && c == config.ctxt.FixedFrameSize()+config.RegSize) {
break
}
v.copyOf(mem)
@@ -24337,7 +24338,7 @@ func rewriteValuegeneric_OpZero(v *Value) bool {
b := v.Block
config := b.Func.Config
// match: (Zero (Load (OffPtr [c] (SP)) mem) mem)
- // cond: mem.Op == OpStaticCall && isSameSym(mem.Aux, "runtime.newobject") && c == config.ctxt.FixedFrameSize() + config.RegSize
+ // cond: mem.Op == OpStaticCall && isSameCall(mem.Aux, "runtime.newobject") && c == config.ctxt.FixedFrameSize() + config.RegSize
// result: mem
for {
if v_0.Op != OpLoad {
@@ -24350,7 +24351,7 @@ func rewriteValuegeneric_OpZero(v *Value) bool {
}
c := auxIntToInt64(v_0_0.AuxInt)
v_0_0_0 := v_0_0.Args[0]
- if v_0_0_0.Op != OpSP || mem != v_1 || !(mem.Op == OpStaticCall && isSameSym(mem.Aux, "runtime.newobject") && c == config.ctxt.FixedFrameSize()+config.RegSize) {
+ if v_0_0_0.Op != OpSP || mem != v_1 || !(mem.Op == OpStaticCall && isSameCall(mem.Aux, "runtime.newobject") && c == config.ctxt.FixedFrameSize()+config.RegSize) {
break
}
v.copyOf(mem)