1
2
3
4
5 package ssa
6
7 import (
8 "cmd/compile/internal/base"
9 "cmd/compile/internal/ir"
10 "cmd/compile/internal/logopt"
11 "cmd/compile/internal/reflectdata"
12 "cmd/compile/internal/rttype"
13 "cmd/compile/internal/typecheck"
14 "cmd/compile/internal/types"
15 "cmd/internal/obj"
16 "cmd/internal/obj/s390x"
17 "cmd/internal/objabi"
18 "cmd/internal/src"
19 "encoding/binary"
20 "fmt"
21 "internal/buildcfg"
22 "io"
23 "math"
24 "math/bits"
25 "os"
26 "path/filepath"
27 "strings"
28 )
29
30 type deadValueChoice bool
31
32 const (
33 leaveDeadValues deadValueChoice = false
34 removeDeadValues = true
35
36 repZeroThreshold = 1408
37 repMoveThreshold = 1408
38 )
39
40
41 func applyRewrite(f *Func, rb blockRewriter, rv valueRewriter, deadcode deadValueChoice) {
42
43 pendingLines := f.cachedLineStarts
44 pendingLines.clear()
45 debug := f.pass.debug
46 if debug > 1 {
47 fmt.Printf("%s: rewriting for %s\n", f.pass.name, f.Name)
48 }
49
50
51
52
53 itersLimit := f.NumBlocks()
54 if itersLimit < 20 {
55 itersLimit = 20
56 }
57 var iters int
58 var states map[string]bool
59 for {
60 if debug > 1 {
61 fmt.Printf("%s: iter %d\n", f.pass.name, iters)
62 }
63 change := false
64 deadChange := false
65 for _, b := range f.Blocks {
66 var b0 *Block
67 if debug > 1 {
68 fmt.Printf("%s: start block\n", f.pass.name)
69 b0 = new(Block)
70 *b0 = *b
71 b0.Succs = append([]Edge{}, b.Succs...)
72 }
73 for i, c := range b.ControlValues() {
74 for c.Op == OpCopy {
75 c = c.Args[0]
76 b.ReplaceControl(i, c)
77 }
78 }
79 if rb(b) {
80 change = true
81 if debug > 1 {
82 fmt.Printf("rewriting %s -> %s\n", b0.LongString(), b.LongString())
83 }
84 }
85 for j, v := range b.Values {
86 if debug > 1 {
87 fmt.Printf("%s: consider %v\n", f.pass.name, v.LongString())
88 }
89 var v0 *Value
90 if debug > 1 {
91 v0 = new(Value)
92 *v0 = *v
93 v0.Args = append([]*Value{}, v.Args...)
94 }
95 if v.Uses == 0 && v.removeable() {
96 if v.Op != OpInvalid && deadcode == removeDeadValues {
97
98
99
100
101 v.reset(OpInvalid)
102 deadChange = true
103 }
104
105 continue
106 }
107
108 vchange := phielimValue(v)
109 if vchange && debug > 1 {
110 fmt.Printf("rewriting %s -> %s\n", v0.LongString(), v.LongString())
111 }
112
113
114
115
116
117
118
119
120 for i, a := range v.Args {
121 if a.Op != OpCopy {
122 continue
123 }
124 aa := copySource(a)
125 v.SetArg(i, aa)
126
127
128
129
130
131 if a.Pos.IsStmt() == src.PosIsStmt {
132 if aa.Block == a.Block && aa.Pos.Line() == a.Pos.Line() && aa.Pos.IsStmt() != src.PosNotStmt {
133 aa.Pos = aa.Pos.WithIsStmt()
134 } else if v.Block == a.Block && v.Pos.Line() == a.Pos.Line() && v.Pos.IsStmt() != src.PosNotStmt {
135 v.Pos = v.Pos.WithIsStmt()
136 } else {
137
138
139
140
141 pendingLines.set(a.Pos, int32(a.Block.ID))
142 }
143 a.Pos = a.Pos.WithNotStmt()
144 }
145 vchange = true
146 for a.Uses == 0 {
147 b := a.Args[0]
148 a.reset(OpInvalid)
149 a = b
150 }
151 }
152 if vchange && debug > 1 {
153 fmt.Printf("rewriting %s -> %s\n", v0.LongString(), v.LongString())
154 }
155
156
157 if rv(v) {
158 vchange = true
159
160 if v.Pos.IsStmt() == src.PosIsStmt {
161 if k := nextGoodStatementIndex(v, j, b); k != j {
162 v.Pos = v.Pos.WithNotStmt()
163 b.Values[k].Pos = b.Values[k].Pos.WithIsStmt()
164 }
165 }
166 }
167
168 change = change || vchange
169 if vchange && debug > 1 {
170 fmt.Printf("rewriting %s -> %s\n", v0.LongString(), v.LongString())
171 }
172 }
173 }
174 if !change && !deadChange {
175 break
176 }
177 iters++
178 if (iters > itersLimit || debug >= 2) && change {
179
180
181
182
183
184 if states == nil {
185 states = make(map[string]bool)
186 }
187 h := f.rewriteHash()
188 if _, ok := states[h]; ok {
189
190
191
192
193 if debug < 2 {
194 debug = 2
195 states = make(map[string]bool)
196 } else {
197 f.Fatalf("rewrite cycle detected")
198 }
199 }
200 states[h] = true
201 }
202 }
203
204 for _, b := range f.Blocks {
205 j := 0
206 for i, v := range b.Values {
207 vl := v.Pos
208 if v.Op == OpInvalid {
209 if v.Pos.IsStmt() == src.PosIsStmt {
210 pendingLines.set(vl, int32(b.ID))
211 }
212 f.freeValue(v)
213 continue
214 }
215 if v.Pos.IsStmt() != src.PosNotStmt && !notStmtBoundary(v.Op) {
216 if pl, ok := pendingLines.get(vl); ok && pl == int32(b.ID) {
217 pendingLines.remove(vl)
218 v.Pos = v.Pos.WithIsStmt()
219 }
220 }
221 if i != j {
222 b.Values[j] = v
223 }
224 j++
225 }
226 if pl, ok := pendingLines.get(b.Pos); ok && pl == int32(b.ID) {
227 b.Pos = b.Pos.WithIsStmt()
228 pendingLines.remove(b.Pos)
229 }
230 b.truncateValues(j)
231 }
232 }
233
234
235
236 func is64BitFloat(t *types.Type) bool {
237 return t.Size() == 8 && t.IsFloat()
238 }
239
240 func is32BitFloat(t *types.Type) bool {
241 return t.Size() == 4 && t.IsFloat()
242 }
243
244 func is64BitInt(t *types.Type) bool {
245 return t.Size() == 8 && t.IsInteger()
246 }
247
248 func is32BitInt(t *types.Type) bool {
249 return t.Size() == 4 && t.IsInteger()
250 }
251
252 func is16BitInt(t *types.Type) bool {
253 return t.Size() == 2 && t.IsInteger()
254 }
255
256 func is8BitInt(t *types.Type) bool {
257 return t.Size() == 1 && t.IsInteger()
258 }
259
260 func isPtr(t *types.Type) bool {
261 return t.IsPtrShaped()
262 }
263
264 func copyCompatibleType(t1, t2 *types.Type) bool {
265 if t1.Size() != t2.Size() {
266 return false
267 }
268 if t1.IsInteger() {
269 return t2.IsInteger()
270 }
271 if isPtr(t1) {
272 return isPtr(t2)
273 }
274 return t1.Compare(t2) == types.CMPeq
275 }
276
277
278
279 func mergeSym(x, y Sym) Sym {
280 if x == nil {
281 return y
282 }
283 if y == nil {
284 return x
285 }
286 panic(fmt.Sprintf("mergeSym with two non-nil syms %v %v", x, y))
287 }
288
289 func canMergeSym(x, y Sym) bool {
290 return x == nil || y == nil
291 }
292
293
294
295
296
297 func canMergeLoadClobber(target, load, x *Value) bool {
298
299
300
301
302
303
304 switch {
305 case x.Uses == 2 && x.Op == OpPhi && len(x.Args) == 2 && (x.Args[0] == target || x.Args[1] == target) && target.Uses == 1:
306
307
308
309
310
311
312
313
314
315 case x.Uses > 1:
316 return false
317 }
318 loopnest := x.Block.Func.loopnest()
319 if loopnest.depth(target.Block.ID) > loopnest.depth(x.Block.ID) {
320 return false
321 }
322 return canMergeLoad(target, load)
323 }
324
325
326
327 func canMergeLoad(target, load *Value) bool {
328 if target.Block.ID != load.Block.ID {
329
330 return false
331 }
332
333
334
335 if load.Uses != 1 {
336 return false
337 }
338
339 mem := load.MemoryArg()
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356 var args []*Value
357 for _, a := range target.Args {
358 if a != load && a.Block.ID == target.Block.ID {
359 args = append(args, a)
360 }
361 }
362
363
364
365 var memPreds map[*Value]bool
366 for i := 0; len(args) > 0; i++ {
367 const limit = 100
368 if i >= limit {
369
370 return false
371 }
372 v := args[len(args)-1]
373 args = args[:len(args)-1]
374 if target.Block.ID != v.Block.ID {
375
376
377 continue
378 }
379 if v.Op == OpPhi {
380
381
382
383 continue
384 }
385 if v.Type.IsTuple() && v.Type.FieldType(1).IsMemory() {
386
387
388 return false
389 }
390 if v.Op.SymEffect()&SymAddr != 0 {
391
392
393
394
395
396
397
398
399
400
401
402 return false
403 }
404 if v.Type.IsMemory() {
405 if memPreds == nil {
406
407
408
409 memPreds = make(map[*Value]bool)
410 m := mem
411 const limit = 50
412 for i := 0; i < limit; i++ {
413 if m.Op == OpPhi {
414
415
416 break
417 }
418 if m.Block.ID != target.Block.ID {
419 break
420 }
421 if !m.Type.IsMemory() {
422 break
423 }
424 memPreds[m] = true
425 if len(m.Args) == 0 {
426 break
427 }
428 m = m.MemoryArg()
429 }
430 }
431
432
433
434
435
436
437
438
439
440 if memPreds[v] {
441 continue
442 }
443 return false
444 }
445 if len(v.Args) > 0 && v.Args[len(v.Args)-1] == mem {
446
447
448 continue
449 }
450 for _, a := range v.Args {
451 if target.Block.ID == a.Block.ID {
452 args = append(args, a)
453 }
454 }
455 }
456
457 return true
458 }
459
460
461 func isSameCall(aux Aux, name string) bool {
462 fn := aux.(*AuxCall).Fn
463 return fn != nil && fn.String() == name
464 }
465
466 func isMalloc(aux Aux) bool {
467 return isNewObject(aux) || isSpecializedMalloc(aux)
468 }
469
470 func isNewObject(aux Aux) bool {
471 fn := aux.(*AuxCall).Fn
472 return fn != nil && fn.String() == "runtime.newobject"
473 }
474
475 func isSpecializedMalloc(aux Aux) bool {
476 fn := aux.(*AuxCall).Fn
477 if fn == nil {
478 return false
479 }
480 name := fn.String()
481 return strings.HasPrefix(name, "runtime.mallocgcSmallNoScanSC") ||
482 strings.HasPrefix(name, "runtime.mallocgcSmallScanNoHeaderSC") ||
483 strings.HasPrefix(name, "runtime.mallocgcTinySize")
484 }
485
486
487 func canLoadUnaligned(c *Config) bool {
488 return c.ctxt.Arch.Alignment == 1
489 }
490
491
492 func nlz64(x int64) int { return bits.LeadingZeros64(uint64(x)) }
493 func nlz32(x int32) int { return bits.LeadingZeros32(uint32(x)) }
494 func nlz16(x int16) int { return bits.LeadingZeros16(uint16(x)) }
495 func nlz8(x int8) int { return bits.LeadingZeros8(uint8(x)) }
496
497
498 func ntz64(x int64) int { return bits.TrailingZeros64(uint64(x)) }
499 func ntz32(x int32) int { return bits.TrailingZeros32(uint32(x)) }
500 func ntz16(x int16) int { return bits.TrailingZeros16(uint16(x)) }
501 func ntz8(x int8) int { return bits.TrailingZeros8(uint8(x)) }
502
503
504 func oneBit[T int8 | int16 | int32 | int64](x T) bool {
505 return x&(x-1) == 0 && x != 0
506 }
507
508
509 func nto(x int64) int64 {
510 return int64(ntz64(^x))
511 }
512
513
514
515 func log8(n int8) int64 { return log8u(uint8(n)) }
516 func log16(n int16) int64 { return log16u(uint16(n)) }
517 func log32(n int32) int64 { return log32u(uint32(n)) }
518 func log64(n int64) int64 { return log64u(uint64(n)) }
519
520
521
522 func log8u(n uint8) int64 { return int64(bits.Len8(n)) - 1 }
523 func log16u(n uint16) int64 { return int64(bits.Len16(n)) - 1 }
524 func log32u(n uint32) int64 { return int64(bits.Len32(n)) - 1 }
525 func log64u(n uint64) int64 { return int64(bits.Len64(n)) - 1 }
526
527
528 func isPowerOfTwo[T int8 | int16 | int32 | int64 | uint8 | uint16 | uint32 | uint64](n T) bool {
529 return n > 0 && n&(n-1) == 0
530 }
531
532
533 func is32Bit(n int64) bool {
534 return n == int64(int32(n))
535 }
536
537
538 func is16Bit(n int64) bool {
539 return n == int64(int16(n))
540 }
541
542
543 func is8Bit(n int64) bool {
544 return n == int64(int8(n))
545 }
546
547
548 func isU8Bit(n int64) bool {
549 return n == int64(uint8(n))
550 }
551
552
553 func is12Bit(n int64) bool {
554 return -(1<<11) <= n && n < (1<<11)
555 }
556
557
558 func isU12Bit(n int64) bool {
559 return 0 <= n && n < (1<<12)
560 }
561
562
563 func isU16Bit(n int64) bool {
564 return n == int64(uint16(n))
565 }
566
567
568 func isU32Bit(n int64) bool {
569 return n == int64(uint32(n))
570 }
571
572
573 func is20Bit(n int64) bool {
574 return -(1<<19) <= n && n < (1<<19)
575 }
576
577
578 func b2i(b bool) int64 {
579 if b {
580 return 1
581 }
582 return 0
583 }
584
585
586 func b2i32(b bool) int32 {
587 if b {
588 return 1
589 }
590 return 0
591 }
592
593 func canMulStrengthReduce(config *Config, x int64) bool {
594 _, ok := config.mulRecipes[x]
595 return ok
596 }
597 func canMulStrengthReduce32(config *Config, x int32) bool {
598 _, ok := config.mulRecipes[int64(x)]
599 return ok
600 }
601
602
603
604
605 func mulStrengthReduce(m *Value, v *Value, x int64) *Value {
606 return v.Block.Func.Config.mulRecipes[x].build(m, v)
607 }
608
609
610
611
612
613 func mulStrengthReduce32(m *Value, v *Value, x int32) *Value {
614 return v.Block.Func.Config.mulRecipes[int64(x)].build(m, v)
615 }
616
617
618
619 func shiftIsBounded(v *Value) bool {
620 return v.AuxInt != 0
621 }
622
623
624
625 func canonLessThan(x, y *Value) bool {
626 if x.Op != y.Op {
627 return x.Op < y.Op
628 }
629 if !x.Pos.SameFileAndLine(y.Pos) {
630 return x.Pos.Before(y.Pos)
631 }
632 return x.ID < y.ID
633 }
634
635
636
637 func truncate64Fto32F(f float64) float32 {
638 if !isExactFloat32(f) {
639 panic("truncate64Fto32F: truncation is not exact")
640 }
641 if !math.IsNaN(f) {
642 return float32(f)
643 }
644
645
646 b := math.Float64bits(f)
647 m := b & ((1 << 52) - 1)
648
649 r := uint32(((b >> 32) & (1 << 31)) | 0x7f800000 | (m >> (52 - 23)))
650 return math.Float32frombits(r)
651 }
652
653
654 func DivisionNeedsFixUp(v *Value) bool {
655 return v.AuxInt == 0
656 }
657
658
659 func auxTo32F(i int64) float32 {
660 return truncate64Fto32F(math.Float64frombits(uint64(i)))
661 }
662
663 func auxIntToBool(i int64) bool {
664 if i == 0 {
665 return false
666 }
667 return true
668 }
669 func auxIntToInt8(i int64) int8 {
670 return int8(i)
671 }
672 func auxIntToInt16(i int64) int16 {
673 return int16(i)
674 }
675 func auxIntToInt32(i int64) int32 {
676 return int32(i)
677 }
678 func auxIntToInt64(i int64) int64 {
679 return i
680 }
681 func auxIntToUint8(i int64) uint8 {
682 return uint8(i)
683 }
684 func auxIntToFloat32(i int64) float32 {
685 return float32(math.Float64frombits(uint64(i)))
686 }
687 func auxIntToFloat64(i int64) float64 {
688 return math.Float64frombits(uint64(i))
689 }
690 func auxIntToValAndOff(i int64) ValAndOff {
691 return ValAndOff(i)
692 }
693 func auxIntToArm64BitField(i int64) arm64BitField {
694 return arm64BitField(i)
695 }
696 func auxIntToArm64ConditionalParams(i int64) arm64ConditionalParams {
697 var params arm64ConditionalParams
698 params.cond = Op(i & 0xffff)
699 i >>= 16
700 params.nzcv = uint8(i & 0x0f)
701 i >>= 4
702 params.constValue = uint8(i & 0x1f)
703 i >>= 5
704 params.ind = i == 1
705 return params
706 }
707 func auxIntToFlagConstant(x int64) flagConstant {
708 return flagConstant(x)
709 }
710
711 func auxIntToOp(cc int64) Op {
712 return Op(cc)
713 }
714
715 func boolToAuxInt(b bool) int64 {
716 if b {
717 return 1
718 }
719 return 0
720 }
721 func int8ToAuxInt(i int8) int64 {
722 return int64(i)
723 }
724 func int16ToAuxInt(i int16) int64 {
725 return int64(i)
726 }
727 func int32ToAuxInt(i int32) int64 {
728 return int64(i)
729 }
730 func int64ToAuxInt(i int64) int64 {
731 return i
732 }
733 func uint8ToAuxInt(i uint8) int64 {
734 return int64(int8(i))
735 }
736 func float32ToAuxInt(f float32) int64 {
737 return int64(math.Float64bits(float64(f)))
738 }
739 func float64ToAuxInt(f float64) int64 {
740 return int64(math.Float64bits(f))
741 }
742 func valAndOffToAuxInt(v ValAndOff) int64 {
743 return int64(v)
744 }
745 func arm64BitFieldToAuxInt(v arm64BitField) int64 {
746 return int64(v)
747 }
748 func arm64ConditionalParamsToAuxInt(v arm64ConditionalParams) int64 {
749 if v.cond&^0xffff != 0 {
750 panic("condition value exceeds 16 bits")
751 }
752
753 var i int64
754 if v.ind {
755 i = 1 << 25
756 }
757 i |= int64(v.constValue) << 20
758 i |= int64(v.nzcv) << 16
759 i |= int64(v.cond)
760 return i
761 }
762
763 func flagConstantToAuxInt(x flagConstant) int64 {
764 return int64(x)
765 }
766
767 func opToAuxInt(o Op) int64 {
768 return int64(o)
769 }
770
771
772 type Aux interface {
773 CanBeAnSSAAux()
774 }
775
776
777 type auxMark bool
778
779 func (auxMark) CanBeAnSSAAux() {}
780
781 var AuxMark auxMark
782
783
784 type stringAux string
785
786 func (stringAux) CanBeAnSSAAux() {}
787
788 func auxToString(i Aux) string {
789 return string(i.(stringAux))
790 }
791 func auxToSym(i Aux) Sym {
792
793 s, _ := i.(Sym)
794 return s
795 }
796 func auxToType(i Aux) *types.Type {
797 return i.(*types.Type)
798 }
799 func auxToCall(i Aux) *AuxCall {
800 return i.(*AuxCall)
801 }
802 func auxToS390xCCMask(i Aux) s390x.CCMask {
803 return i.(s390x.CCMask)
804 }
805 func auxToS390xRotateParams(i Aux) s390x.RotateParams {
806 return i.(s390x.RotateParams)
807 }
808
809 func StringToAux(s string) Aux {
810 return stringAux(s)
811 }
812 func symToAux(s Sym) Aux {
813 return s
814 }
815 func callToAux(s *AuxCall) Aux {
816 return s
817 }
818 func typeToAux(t *types.Type) Aux {
819 return t
820 }
821 func s390xCCMaskToAux(c s390x.CCMask) Aux {
822 return c
823 }
824 func s390xRotateParamsToAux(r s390x.RotateParams) Aux {
825 return r
826 }
827
828
829 func uaddOvf(a, b int64) bool {
830 return uint64(a)+uint64(b) < uint64(a)
831 }
832
833 func devirtLECall(v *Value, sym *obj.LSym) *Value {
834 v.Op = OpStaticLECall
835 auxcall := v.Aux.(*AuxCall)
836 auxcall.Fn = sym
837
838 v.Args[0].Uses--
839 copy(v.Args[0:], v.Args[1:])
840 v.Args[len(v.Args)-1] = nil
841 v.Args = v.Args[:len(v.Args)-1]
842 if f := v.Block.Func; f.pass.debug > 0 {
843 f.Warnl(v.Pos, "de-virtualizing call")
844 }
845 return v
846 }
847
848
849 func isSamePtr(p1, p2 *Value) bool {
850 if p1 == p2 {
851 return true
852 }
853 if p1.Op != p2.Op {
854 for p1.Op == OpOffPtr && p1.AuxInt == 0 {
855 p1 = p1.Args[0]
856 }
857 for p2.Op == OpOffPtr && p2.AuxInt == 0 {
858 p2 = p2.Args[0]
859 }
860 if p1 == p2 {
861 return true
862 }
863 if p1.Op != p2.Op {
864 return false
865 }
866 }
867 switch p1.Op {
868 case OpOffPtr:
869 return p1.AuxInt == p2.AuxInt && isSamePtr(p1.Args[0], p2.Args[0])
870 case OpAddr, OpLocalAddr:
871 return p1.Aux == p2.Aux
872 case OpAddPtr:
873 return p1.Args[1] == p2.Args[1] && isSamePtr(p1.Args[0], p2.Args[0])
874 }
875 return false
876 }
877
878 func isStackPtr(v *Value) bool {
879 for v.Op == OpOffPtr || v.Op == OpAddPtr {
880 v = v.Args[0]
881 }
882 return v.Op == OpSP || v.Op == OpLocalAddr
883 }
884
885
886
887
888 func disjoint(p1 *Value, n1 int64, p2 *Value, n2 int64) bool {
889 if n1 == 0 || n2 == 0 {
890 return true
891 }
892 if p1 == p2 {
893 return false
894 }
895 baseAndOffset := func(ptr *Value) (base *Value, offset int64) {
896 base, offset = ptr, 0
897 for base.Op == OpOffPtr {
898 offset += base.AuxInt
899 base = base.Args[0]
900 }
901 if opcodeTable[base.Op].nilCheck {
902 base = base.Args[0]
903 }
904 return base, offset
905 }
906
907
908 if disjointTypes(p1.Type, p2.Type) {
909 return true
910 }
911
912 p1, off1 := baseAndOffset(p1)
913 p2, off2 := baseAndOffset(p2)
914 if isSamePtr(p1, p2) {
915 return !overlap(off1, n1, off2, n2)
916 }
917
918
919
920
921 switch p1.Op {
922 case OpAddr, OpLocalAddr:
923 if p2.Op == OpAddr || p2.Op == OpLocalAddr || p2.Op == OpSP {
924 return true
925 }
926 return (p2.Op == OpArg || p2.Op == OpArgIntReg) && p1.Args[0].Op == OpSP
927 case OpArg, OpArgIntReg:
928 if p2.Op == OpSP || p2.Op == OpLocalAddr {
929 return true
930 }
931 case OpSP:
932 return p2.Op == OpAddr || p2.Op == OpLocalAddr || p2.Op == OpArg || p2.Op == OpArgIntReg || p2.Op == OpSP
933 }
934 return false
935 }
936
937
938
939
940 func disjointTypes(t1 *types.Type, t2 *types.Type) bool {
941
942 if t1.IsUnsafePtr() || t2.IsUnsafePtr() {
943 return false
944 }
945
946 if !t1.IsPtr() || !t2.IsPtr() {
947
948 return false
949 }
950
951 t1 = t1.Elem()
952 t2 = t2.Elem()
953
954
955
956 if t1.NotInHeap() || t2.NotInHeap() {
957 return false
958 }
959
960 isPtrShaped := func(t *types.Type) bool { return int(t.Size()) == types.PtrSize && t.HasPointers() }
961
962
963 if (isPtrShaped(t1) && !t2.HasPointers()) ||
964 (isPtrShaped(t2) && !t1.HasPointers()) {
965 return true
966 }
967
968 return false
969 }
970
971
972 func moveSize(align int64, c *Config) int64 {
973 switch {
974 case align%8 == 0 && c.PtrSize == 8:
975 return 8
976 case align%4 == 0:
977 return 4
978 case align%2 == 0:
979 return 2
980 }
981 return 1
982 }
983
984
985
986
987 func mergePoint(b *Block, a ...*Value) *Block {
988
989
990
991 d := 100
992
993 for d > 0 {
994 for _, x := range a {
995 if b == x.Block {
996 goto found
997 }
998 }
999 if len(b.Preds) > 1 {
1000
1001 return nil
1002 }
1003 b = b.Preds[0].b
1004 d--
1005 }
1006 return nil
1007 found:
1008
1009
1010 r := b
1011
1012
1013 na := 0
1014 for d > 0 {
1015 for _, x := range a {
1016 if b == x.Block {
1017 na++
1018 }
1019 }
1020 if na == len(a) {
1021
1022 return r
1023 }
1024 if len(b.Preds) > 1 {
1025 return nil
1026 }
1027 b = b.Preds[0].b
1028 d--
1029
1030 }
1031 return nil
1032 }
1033
1034
1035
1036
1037
1038
1039 func clobber(vv ...*Value) bool {
1040 for _, v := range vv {
1041 v.reset(OpInvalid)
1042
1043 }
1044 return true
1045 }
1046
1047
1048
1049 func resetCopy(v *Value, arg *Value) bool {
1050 v.reset(OpCopy)
1051 v.AddArg(arg)
1052 return true
1053 }
1054
1055
1056
1057
1058 func clobberIfDead(v *Value) bool {
1059 if v.Uses == 1 {
1060 v.reset(OpInvalid)
1061 }
1062
1063 return true
1064 }
1065
1066
1067
1068
1069
1070
1071
1072 func noteRule(s string) bool {
1073 fmt.Println(s)
1074 return true
1075 }
1076
1077
1078
1079
1080
1081
1082 func countRule(v *Value, key string) bool {
1083 f := v.Block.Func
1084 if f.ruleMatches == nil {
1085 f.ruleMatches = make(map[string]int)
1086 }
1087 f.ruleMatches[key]++
1088 return true
1089 }
1090
1091
1092
1093 func warnRule(cond bool, v *Value, s string) bool {
1094 if pos := v.Pos; pos.Line() > 1 && cond {
1095 v.Block.Func.Warnl(pos, s)
1096 }
1097 return true
1098 }
1099
1100
1101 func flagArg(v *Value) *Value {
1102 if len(v.Args) != 1 || !v.Args[0].Type.IsFlags() {
1103 return nil
1104 }
1105 return v.Args[0]
1106 }
1107
1108
1109
1110
1111
1112
1113 func arm64Negate(op Op) Op {
1114 switch op {
1115 case OpARM64LessThan:
1116 return OpARM64GreaterEqual
1117 case OpARM64LessThanU:
1118 return OpARM64GreaterEqualU
1119 case OpARM64GreaterThan:
1120 return OpARM64LessEqual
1121 case OpARM64GreaterThanU:
1122 return OpARM64LessEqualU
1123 case OpARM64LessEqual:
1124 return OpARM64GreaterThan
1125 case OpARM64LessEqualU:
1126 return OpARM64GreaterThanU
1127 case OpARM64GreaterEqual:
1128 return OpARM64LessThan
1129 case OpARM64GreaterEqualU:
1130 return OpARM64LessThanU
1131 case OpARM64Equal:
1132 return OpARM64NotEqual
1133 case OpARM64NotEqual:
1134 return OpARM64Equal
1135 case OpARM64LessThanF:
1136 return OpARM64NotLessThanF
1137 case OpARM64NotLessThanF:
1138 return OpARM64LessThanF
1139 case OpARM64LessEqualF:
1140 return OpARM64NotLessEqualF
1141 case OpARM64NotLessEqualF:
1142 return OpARM64LessEqualF
1143 case OpARM64GreaterThanF:
1144 return OpARM64NotGreaterThanF
1145 case OpARM64NotGreaterThanF:
1146 return OpARM64GreaterThanF
1147 case OpARM64GreaterEqualF:
1148 return OpARM64NotGreaterEqualF
1149 case OpARM64NotGreaterEqualF:
1150 return OpARM64GreaterEqualF
1151 default:
1152 panic("unreachable")
1153 }
1154 }
1155
1156
1157
1158
1159
1160
1161 func arm64Invert(op Op) Op {
1162 switch op {
1163 case OpARM64LessThan:
1164 return OpARM64GreaterThan
1165 case OpARM64LessThanU:
1166 return OpARM64GreaterThanU
1167 case OpARM64GreaterThan:
1168 return OpARM64LessThan
1169 case OpARM64GreaterThanU:
1170 return OpARM64LessThanU
1171 case OpARM64LessEqual:
1172 return OpARM64GreaterEqual
1173 case OpARM64LessEqualU:
1174 return OpARM64GreaterEqualU
1175 case OpARM64GreaterEqual:
1176 return OpARM64LessEqual
1177 case OpARM64GreaterEqualU:
1178 return OpARM64LessEqualU
1179 case OpARM64Equal, OpARM64NotEqual:
1180 return op
1181 case OpARM64LessThanF:
1182 return OpARM64GreaterThanF
1183 case OpARM64GreaterThanF:
1184 return OpARM64LessThanF
1185 case OpARM64LessEqualF:
1186 return OpARM64GreaterEqualF
1187 case OpARM64GreaterEqualF:
1188 return OpARM64LessEqualF
1189 case OpARM64NotLessThanF:
1190 return OpARM64NotGreaterThanF
1191 case OpARM64NotGreaterThanF:
1192 return OpARM64NotLessThanF
1193 case OpARM64NotLessEqualF:
1194 return OpARM64NotGreaterEqualF
1195 case OpARM64NotGreaterEqualF:
1196 return OpARM64NotLessEqualF
1197 default:
1198 panic("unreachable")
1199 }
1200 }
1201
1202
1203
1204
1205 func ccARM64Eval(op Op, flags *Value) int {
1206 fop := flags.Op
1207 if fop == OpARM64InvertFlags {
1208 return -ccARM64Eval(op, flags.Args[0])
1209 }
1210 if fop != OpARM64FlagConstant {
1211 return 0
1212 }
1213 fc := flagConstant(flags.AuxInt)
1214 b2i := func(b bool) int {
1215 if b {
1216 return 1
1217 }
1218 return -1
1219 }
1220 switch op {
1221 case OpARM64Equal:
1222 return b2i(fc.eq())
1223 case OpARM64NotEqual:
1224 return b2i(fc.ne())
1225 case OpARM64LessThan:
1226 return b2i(fc.lt())
1227 case OpARM64LessThanU:
1228 return b2i(fc.ult())
1229 case OpARM64GreaterThan:
1230 return b2i(fc.gt())
1231 case OpARM64GreaterThanU:
1232 return b2i(fc.ugt())
1233 case OpARM64LessEqual:
1234 return b2i(fc.le())
1235 case OpARM64LessEqualU:
1236 return b2i(fc.ule())
1237 case OpARM64GreaterEqual:
1238 return b2i(fc.ge())
1239 case OpARM64GreaterEqualU:
1240 return b2i(fc.uge())
1241 }
1242 return 0
1243 }
1244
1245
1246
1247 func logRule(s string) {
1248 if ruleFile == nil {
1249
1250
1251
1252
1253
1254
1255 w, err := os.OpenFile(filepath.Join(os.Getenv("GOROOT"), "src", "rulelog"),
1256 os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0666)
1257 if err != nil {
1258 panic(err)
1259 }
1260 ruleFile = w
1261 }
1262
1263 fmt.Fprintln(ruleFile, s)
1264 }
1265
1266 var ruleFile io.Writer
1267
1268 func isConstZero(v *Value) bool {
1269 switch v.Op {
1270 case OpConstNil:
1271 return true
1272 case OpConst64, OpConst32, OpConst16, OpConst8, OpConstBool, OpConst32F, OpConst64F:
1273 return v.AuxInt == 0
1274 case OpStringMake, OpIMake, OpComplexMake:
1275 return isConstZero(v.Args[0]) && isConstZero(v.Args[1])
1276 case OpSliceMake:
1277 return isConstZero(v.Args[0]) && isConstZero(v.Args[1]) && isConstZero(v.Args[2])
1278 case OpStringPtr, OpStringLen, OpSlicePtr, OpSliceLen, OpSliceCap, OpITab, OpIData, OpComplexReal, OpComplexImag:
1279 return isConstZero(v.Args[0])
1280 }
1281 return false
1282 }
1283
1284
1285 func reciprocalExact64(c float64) bool {
1286 b := math.Float64bits(c)
1287 man := b & (1<<52 - 1)
1288 if man != 0 {
1289 return false
1290 }
1291 exp := b >> 52 & (1<<11 - 1)
1292
1293
1294 switch exp {
1295 case 0:
1296 return false
1297 case 0x7ff:
1298 return false
1299 case 0x7fe:
1300 return false
1301 default:
1302 return true
1303 }
1304 }
1305
1306
1307 func reciprocalExact32(c float32) bool {
1308 b := math.Float32bits(c)
1309 man := b & (1<<23 - 1)
1310 if man != 0 {
1311 return false
1312 }
1313 exp := b >> 23 & (1<<8 - 1)
1314
1315
1316 switch exp {
1317 case 0:
1318 return false
1319 case 0xff:
1320 return false
1321 case 0xfe:
1322 return false
1323 default:
1324 return true
1325 }
1326 }
1327
1328
1329 func isARMImmRot(v uint32) bool {
1330 for i := 0; i < 16; i++ {
1331 if v&^0xff == 0 {
1332 return true
1333 }
1334 v = v<<2 | v>>30
1335 }
1336
1337 return false
1338 }
1339
1340
1341
1342 func overlap(offset1, size1, offset2, size2 int64) bool {
1343 if offset1 >= offset2 && offset2+size2 > offset1 {
1344 return true
1345 }
1346 if offset2 >= offset1 && offset1+size1 > offset2 {
1347 return true
1348 }
1349 return false
1350 }
1351
1352
1353
1354
1355 func ZeroUpper32Bits(x *Value, depth int) bool {
1356 if x.Type.IsSigned() && x.Type.Size() < 8 {
1357
1358
1359 return false
1360 }
1361 switch x.Op {
1362 case OpAMD64MOVLconst, OpAMD64MOVLload, OpAMD64MOVLQZX, OpAMD64MOVLloadidx1,
1363 OpAMD64MOVWload, OpAMD64MOVWloadidx1, OpAMD64MOVBload, OpAMD64MOVBloadidx1,
1364 OpAMD64MOVLloadidx4, OpAMD64ADDLload, OpAMD64SUBLload, OpAMD64ANDLload,
1365 OpAMD64ORLload, OpAMD64XORLload, OpAMD64CVTTSD2SL,
1366 OpAMD64ADDL, OpAMD64ADDLconst, OpAMD64SUBL, OpAMD64SUBLconst,
1367 OpAMD64ANDL, OpAMD64ANDLconst, OpAMD64ORL, OpAMD64ORLconst,
1368 OpAMD64XORL, OpAMD64XORLconst, OpAMD64NEGL, OpAMD64NOTL,
1369 OpAMD64SHRL, OpAMD64SHRLconst, OpAMD64SARL, OpAMD64SARLconst,
1370 OpAMD64SHLL, OpAMD64SHLLconst:
1371 return true
1372 case OpAMD64MOVQconst:
1373 return uint64(uint32(x.AuxInt)) == uint64(x.AuxInt)
1374 case OpARM64REV16W, OpARM64REVW, OpARM64RBITW, OpARM64CLZW, OpARM64EXTRWconst,
1375 OpARM64MULW, OpARM64MNEGW, OpARM64UDIVW, OpARM64DIVW, OpARM64UMODW,
1376 OpARM64MADDW, OpARM64MSUBW, OpARM64RORW, OpARM64RORWconst:
1377 return true
1378 case OpArg:
1379
1380
1381 return x.Type.Size() == 4 && x.Block.Func.Config.arch == "amd64"
1382 case OpPhi, OpSelect0, OpSelect1:
1383
1384
1385 if depth <= 0 {
1386 return false
1387 }
1388 for i := range x.Args {
1389 if !ZeroUpper32Bits(x.Args[i], depth-1) {
1390 return false
1391 }
1392 }
1393 return true
1394
1395 }
1396 return false
1397 }
1398
1399
1400 func ZeroUpper48Bits(x *Value, depth int) bool {
1401 if x.Type.IsSigned() && x.Type.Size() < 8 {
1402 return false
1403 }
1404 switch x.Op {
1405 case OpAMD64MOVWQZX, OpAMD64MOVWload, OpAMD64MOVWloadidx1, OpAMD64MOVWloadidx2:
1406 return true
1407 case OpAMD64MOVQconst, OpAMD64MOVLconst:
1408 return uint64(uint16(x.AuxInt)) == uint64(x.AuxInt)
1409 case OpArg:
1410 return x.Type.Size() == 2 && x.Block.Func.Config.arch == "amd64"
1411 case OpPhi, OpSelect0, OpSelect1:
1412
1413
1414 if depth <= 0 {
1415 return false
1416 }
1417 for i := range x.Args {
1418 if !ZeroUpper48Bits(x.Args[i], depth-1) {
1419 return false
1420 }
1421 }
1422 return true
1423
1424 }
1425 return false
1426 }
1427
1428
1429 func ZeroUpper56Bits(x *Value, depth int) bool {
1430 if x.Type.IsSigned() && x.Type.Size() < 8 {
1431 return false
1432 }
1433 switch x.Op {
1434 case OpAMD64MOVBQZX, OpAMD64MOVBload, OpAMD64MOVBloadidx1:
1435 return true
1436 case OpAMD64MOVQconst, OpAMD64MOVLconst:
1437 return uint64(uint8(x.AuxInt)) == uint64(x.AuxInt)
1438 case OpArg:
1439 return x.Type.Size() == 1 && x.Block.Func.Config.arch == "amd64"
1440 case OpPhi, OpSelect0, OpSelect1:
1441
1442
1443 if depth <= 0 {
1444 return false
1445 }
1446 for i := range x.Args {
1447 if !ZeroUpper56Bits(x.Args[i], depth-1) {
1448 return false
1449 }
1450 }
1451 return true
1452
1453 }
1454 return false
1455 }
1456
1457 func isInlinableMemclr(c *Config, sz int64) bool {
1458 if sz < 0 {
1459 return false
1460 }
1461
1462
1463 switch c.arch {
1464 case "amd64", "arm64":
1465 return true
1466 case "ppc64le", "ppc64", "loong64":
1467 return sz < 512
1468 }
1469 return false
1470 }
1471
1472
1473
1474
1475
1476
1477 func isInlinableMemmove(dst, src *Value, sz int64, c *Config) bool {
1478
1479
1480
1481
1482 switch c.arch {
1483 case "amd64":
1484 return sz <= 16 || (sz < 1024 && disjoint(dst, sz, src, sz))
1485 case "arm64":
1486 return sz <= 64 || (sz <= 1024 && disjoint(dst, sz, src, sz))
1487 case "loong64":
1488 return sz <= 16 || (sz <= 64 && disjoint(dst, sz, src, sz))
1489 case "386":
1490 return sz <= 8
1491 case "s390x", "ppc64", "ppc64le":
1492 return sz <= 8 || disjoint(dst, sz, src, sz)
1493 case "arm", "mips", "mips64", "mipsle", "mips64le":
1494 return sz <= 4
1495 }
1496 return false
1497 }
1498 func IsInlinableMemmove(dst, src *Value, sz int64, c *Config) bool {
1499 return isInlinableMemmove(dst, src, sz, c)
1500 }
1501
1502
1503
1504
1505 func logLargeCopy(v *Value, s int64) bool {
1506 if s < 128 {
1507 return true
1508 }
1509 if logopt.Enabled() {
1510 logopt.LogOpt(v.Pos, "copy", "lower", v.Block.Func.Name, fmt.Sprintf("%d bytes", s))
1511 }
1512 return true
1513 }
1514 func LogLargeCopy(funcName string, pos src.XPos, s int64) {
1515 if s < 128 {
1516 return
1517 }
1518 if logopt.Enabled() {
1519 logopt.LogOpt(pos, "copy", "lower", funcName, fmt.Sprintf("%d bytes", s))
1520 }
1521 }
1522
1523
1524
1525 func hasSmallRotate(c *Config) bool {
1526 switch c.arch {
1527 case "amd64", "386":
1528 return true
1529 default:
1530 return false
1531 }
1532 }
1533
1534 func supportsPPC64PCRel() bool {
1535
1536
1537 return buildcfg.GOPPC64 >= 10 && buildcfg.GOOS == "linux"
1538 }
1539
1540 func newPPC64ShiftAuxInt(sh, mb, me, sz int64) int32 {
1541 if sh < 0 || sh >= sz {
1542 panic("PPC64 shift arg sh out of range")
1543 }
1544 if mb < 0 || mb >= sz {
1545 panic("PPC64 shift arg mb out of range")
1546 }
1547 if me < 0 || me >= sz {
1548 panic("PPC64 shift arg me out of range")
1549 }
1550 return int32(sh<<16 | mb<<8 | me)
1551 }
1552
1553 func GetPPC64Shiftsh(auxint int64) int64 {
1554 return int64(int8(auxint >> 16))
1555 }
1556
1557 func GetPPC64Shiftmb(auxint int64) int64 {
1558 return int64(int8(auxint >> 8))
1559 }
1560
1561
1562
1563
1564
1565
1566
1567
1568
1569
1570 func isPPC64WordRotateMask(v64 int64) bool {
1571
1572 v := uint32(v64)
1573 vp := (v & -v) + v
1574
1575 vn := ^v
1576 vpn := (vn & -vn) + vn
1577 return (v&vp == 0 || vn&vpn == 0) && v != 0
1578 }
1579
1580
1581
1582
1583 func isPPC64WordRotateMaskNonWrapping(v64 int64) bool {
1584
1585 v := uint32(v64)
1586 vp := (v & -v) + v
1587 return (v&vp == 0) && v != 0 && uint64(uint32(v64)) == uint64(v64)
1588 }
1589
1590
1591
1592
1593 func encodePPC64RotateMask(rotate, mask, nbits int64) int64 {
1594 var mb, me, mbn, men int
1595
1596
1597 if mask == 0 || ^mask == 0 || rotate >= nbits {
1598 panic(fmt.Sprintf("invalid PPC64 rotate mask: %x %d %d", uint64(mask), rotate, nbits))
1599 } else if nbits == 32 {
1600 mb = bits.LeadingZeros32(uint32(mask))
1601 me = 32 - bits.TrailingZeros32(uint32(mask))
1602 mbn = bits.LeadingZeros32(^uint32(mask))
1603 men = 32 - bits.TrailingZeros32(^uint32(mask))
1604 } else {
1605 mb = bits.LeadingZeros64(uint64(mask))
1606 me = 64 - bits.TrailingZeros64(uint64(mask))
1607 mbn = bits.LeadingZeros64(^uint64(mask))
1608 men = 64 - bits.TrailingZeros64(^uint64(mask))
1609 }
1610
1611 if mb == 0 && me == int(nbits) {
1612
1613 mb, me = men, mbn
1614 }
1615
1616 return int64(me) | int64(mb<<8) | rotate<<16 | nbits<<24
1617 }
1618
1619
1620
1621
1622
1623
1624 func mergePPC64RLDICLandSRDconst(encoded, s int64) int64 {
1625 mb := s
1626 r := 64 - s
1627
1628 if (encoded>>8)&0xFF < mb {
1629 encoded = (encoded &^ 0xFF00) | mb<<8
1630 }
1631
1632 if (encoded & 0xFF0000) != 0 {
1633 panic("non-zero rotate")
1634 }
1635 return encoded | r<<16
1636 }
1637
1638
1639
1640 func DecodePPC64RotateMask(sauxint int64) (rotate, mb, me int64, mask uint64) {
1641 auxint := uint64(sauxint)
1642 rotate = int64((auxint >> 16) & 0xFF)
1643 mb = int64((auxint >> 8) & 0xFF)
1644 me = int64((auxint >> 0) & 0xFF)
1645 nbits := int64((auxint >> 24) & 0xFF)
1646 mask = ((1 << uint(nbits-mb)) - 1) ^ ((1 << uint(nbits-me)) - 1)
1647 if mb > me {
1648 mask = ^mask
1649 }
1650 if nbits == 32 {
1651 mask = uint64(uint32(mask))
1652 }
1653
1654
1655
1656 me = (me - 1) & (nbits - 1)
1657 return
1658 }
1659
1660
1661
1662
1663 func isPPC64ValidShiftMask(v int64) bool {
1664 if (v != 0) && ((v+1)&v) == 0 {
1665 return true
1666 }
1667 return false
1668 }
1669
1670 func getPPC64ShiftMaskLength(v int64) int64 {
1671 return int64(bits.Len64(uint64(v)))
1672 }
1673
1674
1675
1676 func mergePPC64RShiftMask(m, s, nbits int64) int64 {
1677 smask := uint64((1<<uint(nbits))-1) >> uint(s)
1678 return m & int64(smask)
1679 }
1680
1681
1682 func mergePPC64AndSrwi(m, s int64) int64 {
1683 mask := mergePPC64RShiftMask(m, s, 32)
1684 if !isPPC64WordRotateMask(mask) {
1685 return 0
1686 }
1687 return encodePPC64RotateMask((32-s)&31, mask, 32)
1688 }
1689
1690
1691 func mergePPC64AndSrdi(m, s int64) int64 {
1692 mask := mergePPC64RShiftMask(m, s, 64)
1693
1694
1695 rv := bits.RotateLeft64(0xFFFFFFFF00000000, -int(s))
1696 if rv&uint64(mask) != 0 {
1697 return 0
1698 }
1699 if !isPPC64WordRotateMaskNonWrapping(mask) {
1700 return 0
1701 }
1702 return encodePPC64RotateMask((32-s)&31, mask, 32)
1703 }
1704
1705
1706 func mergePPC64AndSldi(m, s int64) int64 {
1707 mask := -1 << s & m
1708
1709
1710 rv := bits.RotateLeft64(0xFFFFFFFF00000000, int(s))
1711 if rv&uint64(mask) != 0 {
1712 return 0
1713 }
1714 if !isPPC64WordRotateMaskNonWrapping(mask) {
1715 return 0
1716 }
1717 return encodePPC64RotateMask(s&31, mask, 32)
1718 }
1719
1720
1721
1722 func mergePPC64ClrlsldiSrw(sld, srw int64) int64 {
1723 mask_1 := uint64(0xFFFFFFFF >> uint(srw))
1724
1725 mask_2 := uint64(0xFFFFFFFFFFFFFFFF) >> uint(GetPPC64Shiftmb(sld))
1726
1727
1728 mask_3 := (mask_1 & mask_2) << uint(GetPPC64Shiftsh(sld))
1729
1730 r_1 := 32 - srw
1731 r_2 := GetPPC64Shiftsh(sld)
1732 r_3 := (r_1 + r_2) & 31
1733
1734 if uint64(uint32(mask_3)) != mask_3 || mask_3 == 0 {
1735 return 0
1736 }
1737 return encodePPC64RotateMask(r_3, int64(mask_3), 32)
1738 }
1739
1740
1741
1742 func mergePPC64ClrlsldiSrd(sld, srd int64) int64 {
1743 mask_1 := uint64(0xFFFFFFFFFFFFFFFF) >> uint(srd)
1744
1745 mask_2 := uint64(0xFFFFFFFFFFFFFFFF) >> uint(GetPPC64Shiftmb(sld))
1746
1747
1748 mask_3 := (mask_1 & mask_2) << uint(GetPPC64Shiftsh(sld))
1749
1750 r_1 := 64 - srd
1751 r_2 := GetPPC64Shiftsh(sld)
1752 r_3 := (r_1 + r_2) & 63
1753
1754 if uint64(uint32(mask_3)) != mask_3 || mask_3 == 0 {
1755 return 0
1756 }
1757
1758 v1 := bits.RotateLeft64(0xFFFFFFFF00000000, int(r_3))
1759 if v1&mask_3 != 0 {
1760 return 0
1761 }
1762 return encodePPC64RotateMask(r_3&31, int64(mask_3), 32)
1763 }
1764
1765
1766
1767 func mergePPC64ClrlsldiRlwinm(sld int32, rlw int64) int64 {
1768 r_1, _, _, mask_1 := DecodePPC64RotateMask(rlw)
1769
1770 mask_2 := uint64(0xFFFFFFFFFFFFFFFF) >> uint(GetPPC64Shiftmb(int64(sld)))
1771
1772
1773 mask_3 := (mask_1 & mask_2) << uint(GetPPC64Shiftsh(int64(sld)))
1774 r_2 := GetPPC64Shiftsh(int64(sld))
1775 r_3 := (r_1 + r_2) & 31
1776
1777
1778 if !isPPC64WordRotateMask(int64(mask_3)) || uint64(uint32(mask_3)) != mask_3 {
1779 return 0
1780 }
1781 return encodePPC64RotateMask(r_3, int64(mask_3), 32)
1782 }
1783
1784
1785
1786 func mergePPC64AndRlwinm(mask uint32, rlw int64) int64 {
1787 r, _, _, mask_rlw := DecodePPC64RotateMask(rlw)
1788 mask_out := (mask_rlw & uint64(mask))
1789
1790
1791 if !isPPC64WordRotateMask(int64(mask_out)) {
1792 return 0
1793 }
1794 return encodePPC64RotateMask(r, int64(mask_out), 32)
1795 }
1796
1797
1798
1799 func mergePPC64MovwzregRlwinm(rlw int64) int64 {
1800 _, mb, me, _ := DecodePPC64RotateMask(rlw)
1801 if mb > me {
1802 return 0
1803 }
1804 return rlw
1805 }
1806
1807
1808
1809 func mergePPC64RlwinmAnd(rlw int64, mask uint32) int64 {
1810 r, _, _, mask_rlw := DecodePPC64RotateMask(rlw)
1811
1812
1813 r_mask := bits.RotateLeft32(mask, int(r))
1814
1815 mask_out := (mask_rlw & uint64(r_mask))
1816
1817
1818 if !isPPC64WordRotateMask(int64(mask_out)) {
1819 return 0
1820 }
1821 return encodePPC64RotateMask(r, int64(mask_out), 32)
1822 }
1823
1824
1825
1826 func mergePPC64SldiRlwinm(sldi, rlw int64) int64 {
1827 r_1, mb, me, mask_1 := DecodePPC64RotateMask(rlw)
1828 if mb > me || mb < sldi {
1829
1830
1831 return 0
1832 }
1833
1834 mask_3 := mask_1 << sldi
1835 r_3 := (r_1 + sldi) & 31
1836
1837
1838 if uint64(uint32(mask_3)) != mask_3 {
1839 return 0
1840 }
1841 return encodePPC64RotateMask(r_3, int64(mask_3), 32)
1842 }
1843
1844
1845
1846 func mergePPC64SldiSrw(sld, srw int64) int64 {
1847 if sld > srw || srw >= 32 {
1848 return 0
1849 }
1850 mask_r := uint32(0xFFFFFFFF) >> uint(srw)
1851 mask_l := uint32(0xFFFFFFFF) >> uint(sld)
1852 mask := (mask_r & mask_l) << uint(sld)
1853 return encodePPC64RotateMask((32-srw+sld)&31, int64(mask), 32)
1854 }
1855
1856
1857
1858
1859
1860
1861
1862
1863
1864
1865
1866
1867
1868
1869
1870
1871
1872
1873
1874
1875
1876
1877
1878
1879
1880
1881 func convertPPC64OpToOpCC(op *Value) *Value {
1882 ccOpMap := map[Op]Op{
1883 OpPPC64ADD: OpPPC64ADDCC,
1884 OpPPC64ADDconst: OpPPC64ADDCCconst,
1885 OpPPC64AND: OpPPC64ANDCC,
1886 OpPPC64ANDN: OpPPC64ANDNCC,
1887 OpPPC64ANDconst: OpPPC64ANDCCconst,
1888 OpPPC64CNTLZD: OpPPC64CNTLZDCC,
1889 OpPPC64MULHDU: OpPPC64MULHDUCC,
1890 OpPPC64NEG: OpPPC64NEGCC,
1891 OpPPC64NOR: OpPPC64NORCC,
1892 OpPPC64OR: OpPPC64ORCC,
1893 OpPPC64RLDICL: OpPPC64RLDICLCC,
1894 OpPPC64SUB: OpPPC64SUBCC,
1895 OpPPC64XOR: OpPPC64XORCC,
1896 }
1897 b := op.Block
1898 opCC := b.NewValue0I(op.Pos, ccOpMap[op.Op], types.NewTuple(op.Type, types.TypeFlags), op.AuxInt)
1899 opCC.AddArgs(op.Args...)
1900 op.reset(OpSelect0)
1901 op.AddArgs(opCC)
1902 return op
1903 }
1904
1905
1906 func convertPPC64RldiclAndccconst(sauxint int64) int64 {
1907 r, _, _, mask := DecodePPC64RotateMask(sauxint)
1908 if r != 0 || mask&0xFFFF != mask {
1909 return 0
1910 }
1911 return int64(mask)
1912 }
1913
1914
1915 func rotateLeft32(v, rotate int64) int64 {
1916 return int64(bits.RotateLeft32(uint32(v), int(rotate)))
1917 }
1918
1919 func rotateRight64(v, rotate int64) int64 {
1920 return int64(bits.RotateLeft64(uint64(v), int(-rotate)))
1921 }
1922
1923
1924 func armBFAuxInt(lsb, width int64) arm64BitField {
1925 if lsb < 0 || lsb > 63 {
1926 panic("ARM(64) bit field lsb constant out of range")
1927 }
1928 if width < 1 || lsb+width > 64 {
1929 panic("ARM(64) bit field width constant out of range")
1930 }
1931 return arm64BitField(width | lsb<<8)
1932 }
1933
1934
1935 func (bfc arm64BitField) lsb() int64 {
1936 return int64(uint64(bfc) >> 8)
1937 }
1938
1939
1940 func (bfc arm64BitField) width() int64 {
1941 return int64(bfc) & 0xff
1942 }
1943
1944
1945 func isARM64BFMask(lsb, mask, rshift int64) bool {
1946 shiftedMask := int64(uint64(mask) >> uint64(rshift))
1947 return shiftedMask != 0 && isPowerOfTwo(shiftedMask+1) && nto(shiftedMask)+lsb < 64
1948 }
1949
1950
1951 func arm64BFWidth(mask, rshift int64) int64 {
1952 shiftedMask := int64(uint64(mask) >> uint64(rshift))
1953 if shiftedMask == 0 {
1954 panic("ARM64 BF mask is zero")
1955 }
1956 return nto(shiftedMask)
1957 }
1958
1959
1960 func arm64ConditionalParamsAuxInt(cond Op, nzcv uint8) arm64ConditionalParams {
1961 if cond < OpARM64Equal || cond > OpARM64GreaterEqualU {
1962 panic("Wrong conditional operation")
1963 }
1964 if nzcv&0x0f != nzcv {
1965 panic("Wrong value of NZCV flag")
1966 }
1967 return arm64ConditionalParams{cond, nzcv, 0, false}
1968 }
1969
1970
1971 func arm64ConditionalParamsAuxIntWithValue(cond Op, nzcv uint8, value uint8) arm64ConditionalParams {
1972 if value&0x1f != value {
1973 panic("Wrong value of constant")
1974 }
1975 params := arm64ConditionalParamsAuxInt(cond, nzcv)
1976 params.constValue = value
1977 params.ind = true
1978 return params
1979 }
1980
1981
1982 func (condParams arm64ConditionalParams) Cond() Op {
1983 return condParams.cond
1984 }
1985
1986
1987 func (condParams arm64ConditionalParams) Nzcv() int64 {
1988 return int64(condParams.nzcv)
1989 }
1990
1991
1992 func (condParams arm64ConditionalParams) ConstValue() (int64, bool) {
1993 return int64(condParams.constValue), condParams.ind
1994 }
1995
1996
1997
1998
1999 func registerizable(b *Block, typ *types.Type) bool {
2000 if typ.IsPtrShaped() || typ.IsFloat() || typ.IsBoolean() {
2001 return true
2002 }
2003 if typ.IsInteger() {
2004 return typ.Size() <= b.Func.Config.RegSize
2005 }
2006 return false
2007 }
2008
2009
2010 func needRaceCleanup(sym *AuxCall, v *Value) bool {
2011 f := v.Block.Func
2012 if !f.Config.Race {
2013 return false
2014 }
2015 if !isSameCall(sym, "runtime.racefuncenter") && !isSameCall(sym, "runtime.racefuncexit") {
2016 return false
2017 }
2018 for _, b := range f.Blocks {
2019 for _, v := range b.Values {
2020 switch v.Op {
2021 case OpStaticCall, OpStaticLECall:
2022
2023
2024 s := v.Aux.(*AuxCall).Fn.String()
2025 switch s {
2026 case "runtime.racefuncenter", "runtime.racefuncexit",
2027 "runtime.panicdivide", "runtime.panicwrap",
2028 "runtime.panicshift":
2029 continue
2030 }
2031
2032
2033 return false
2034 case OpPanicBounds, OpPanicExtend:
2035
2036 case OpClosureCall, OpInterCall, OpClosureLECall, OpInterLECall:
2037
2038 return false
2039 }
2040 }
2041 }
2042 if isSameCall(sym, "runtime.racefuncenter") {
2043
2044
2045 if v.Args[0].Op != OpStore {
2046 if v.Op == OpStaticLECall {
2047
2048 return true
2049 }
2050 return false
2051 }
2052 mem := v.Args[0].Args[2]
2053 v.Args[0].reset(OpCopy)
2054 v.Args[0].AddArg(mem)
2055 }
2056 return true
2057 }
2058
2059
2060 func symIsRO(sym Sym) bool {
2061 lsym := sym.(*obj.LSym)
2062 return lsym.Type == objabi.SRODATA && len(lsym.R) == 0
2063 }
2064
2065
2066 func symIsROZero(sym Sym) bool {
2067 lsym := sym.(*obj.LSym)
2068 if lsym.Type != objabi.SRODATA || len(lsym.R) != 0 {
2069 return false
2070 }
2071 for _, b := range lsym.P {
2072 if b != 0 {
2073 return false
2074 }
2075 }
2076 return true
2077 }
2078
2079
2080
2081 func isFixedLoad(v *Value, sym Sym, off int64) bool {
2082 lsym := sym.(*obj.LSym)
2083 if (v.Type.IsPtrShaped() || v.Type.IsUintptr()) && lsym.Type == objabi.SRODATA {
2084 for _, r := range lsym.R {
2085 if (r.Type == objabi.R_ADDR || r.Type == objabi.R_WEAKADDR) && int64(r.Off) == off && r.Add == 0 {
2086 return true
2087 }
2088 }
2089 return false
2090 }
2091
2092 if ti := lsym.TypeInfo(); ti != nil {
2093
2094
2095
2096
2097 t := ti.Type.(*types.Type)
2098
2099 for _, f := range rttype.Type.Fields() {
2100 if f.Offset == off && copyCompatibleType(v.Type, f.Type) {
2101 switch f.Sym.Name {
2102 case "Size_", "PtrBytes", "Hash", "Kind_", "GCData":
2103 return true
2104 default:
2105
2106 return false
2107 }
2108 }
2109 }
2110
2111 if t.IsPtr() && off == rttype.PtrType.OffsetOf("Elem") {
2112 return true
2113 }
2114
2115 return false
2116 }
2117
2118 return false
2119 }
2120
2121
2122 func rewriteFixedLoad(v *Value, sym Sym, sb *Value, off int64) *Value {
2123 b := v.Block
2124 f := b.Func
2125
2126 lsym := sym.(*obj.LSym)
2127 if (v.Type.IsPtrShaped() || v.Type.IsUintptr()) && lsym.Type == objabi.SRODATA {
2128 for _, r := range lsym.R {
2129 if (r.Type == objabi.R_ADDR || r.Type == objabi.R_WEAKADDR) && int64(r.Off) == off && r.Add == 0 {
2130 if strings.HasPrefix(r.Sym.Name, "type:") {
2131
2132
2133
2134
2135
2136 reflectdata.MarkTypeSymUsedInInterface(r.Sym, f.fe.Func().Linksym())
2137 } else if strings.HasPrefix(r.Sym.Name, "go:itab") {
2138
2139
2140 reflectdata.MarkTypeSymUsedInInterface(r.Sym, f.fe.Func().Linksym())
2141 }
2142 v.reset(OpAddr)
2143 v.Aux = symToAux(r.Sym)
2144 v.AddArg(sb)
2145 return v
2146 }
2147 }
2148 base.Fatalf("fixedLoad data not known for %s:%d", sym, off)
2149 }
2150
2151 if ti := lsym.TypeInfo(); ti != nil {
2152
2153
2154
2155
2156 t := ti.Type.(*types.Type)
2157
2158 ptrSizedOpConst := OpConst64
2159 if f.Config.PtrSize == 4 {
2160 ptrSizedOpConst = OpConst32
2161 }
2162
2163 for _, f := range rttype.Type.Fields() {
2164 if f.Offset == off && copyCompatibleType(v.Type, f.Type) {
2165 switch f.Sym.Name {
2166 case "Size_":
2167 v.reset(ptrSizedOpConst)
2168 v.AuxInt = t.Size()
2169 return v
2170 case "PtrBytes":
2171 v.reset(ptrSizedOpConst)
2172 v.AuxInt = types.PtrDataSize(t)
2173 return v
2174 case "Hash":
2175 v.reset(OpConst32)
2176 v.AuxInt = int64(int32(types.TypeHash(t)))
2177 return v
2178 case "Kind_":
2179 v.reset(OpConst8)
2180 v.AuxInt = int64(int8(reflectdata.ABIKindOfType(t)))
2181 return v
2182 case "GCData":
2183 gcdata, _ := reflectdata.GCSym(t, true)
2184 v.reset(OpAddr)
2185 v.Aux = symToAux(gcdata)
2186 v.AddArg(sb)
2187 return v
2188 default:
2189 base.Fatalf("unknown field %s for fixedLoad of %s at offset %d", f.Sym.Name, lsym.Name, off)
2190 }
2191 }
2192 }
2193
2194 if t.IsPtr() && off == rttype.PtrType.OffsetOf("Elem") {
2195 elemSym := reflectdata.TypeLinksym(t.Elem())
2196 reflectdata.MarkTypeSymUsedInInterface(elemSym, f.fe.Func().Linksym())
2197 v.reset(OpAddr)
2198 v.Aux = symToAux(elemSym)
2199 v.AddArg(sb)
2200 return v
2201 }
2202
2203 base.Fatalf("fixedLoad data not known for %s:%d", sym, off)
2204 }
2205
2206 base.Fatalf("fixedLoad data not known for %s:%d", sym, off)
2207 return nil
2208 }
2209
2210
2211 func read8(sym Sym, off int64) uint8 {
2212 lsym := sym.(*obj.LSym)
2213 if off >= int64(len(lsym.P)) || off < 0 {
2214
2215
2216
2217
2218 return 0
2219 }
2220 return lsym.P[off]
2221 }
2222
2223
2224 func read16(sym Sym, off int64, byteorder binary.ByteOrder) uint16 {
2225 lsym := sym.(*obj.LSym)
2226
2227
2228 var src []byte
2229 if 0 <= off && off < int64(len(lsym.P)) {
2230 src = lsym.P[off:]
2231 }
2232 buf := make([]byte, 2)
2233 copy(buf, src)
2234 return byteorder.Uint16(buf)
2235 }
2236
2237
2238 func read32(sym Sym, off int64, byteorder binary.ByteOrder) uint32 {
2239 lsym := sym.(*obj.LSym)
2240 var src []byte
2241 if 0 <= off && off < int64(len(lsym.P)) {
2242 src = lsym.P[off:]
2243 }
2244 buf := make([]byte, 4)
2245 copy(buf, src)
2246 return byteorder.Uint32(buf)
2247 }
2248
2249
2250 func read64(sym Sym, off int64, byteorder binary.ByteOrder) uint64 {
2251 lsym := sym.(*obj.LSym)
2252 var src []byte
2253 if 0 <= off && off < int64(len(lsym.P)) {
2254 src = lsym.P[off:]
2255 }
2256 buf := make([]byte, 8)
2257 copy(buf, src)
2258 return byteorder.Uint64(buf)
2259 }
2260
2261
2262 func sequentialAddresses(x, y *Value, n int64) bool {
2263 if x == y && n == 0 {
2264 return true
2265 }
2266 if x.Op == Op386ADDL && y.Op == Op386LEAL1 && y.AuxInt == n && y.Aux == nil &&
2267 (x.Args[0] == y.Args[0] && x.Args[1] == y.Args[1] ||
2268 x.Args[0] == y.Args[1] && x.Args[1] == y.Args[0]) {
2269 return true
2270 }
2271 if x.Op == Op386LEAL1 && y.Op == Op386LEAL1 && y.AuxInt == x.AuxInt+n && x.Aux == y.Aux &&
2272 (x.Args[0] == y.Args[0] && x.Args[1] == y.Args[1] ||
2273 x.Args[0] == y.Args[1] && x.Args[1] == y.Args[0]) {
2274 return true
2275 }
2276 if x.Op == OpAMD64ADDQ && y.Op == OpAMD64LEAQ1 && y.AuxInt == n && y.Aux == nil &&
2277 (x.Args[0] == y.Args[0] && x.Args[1] == y.Args[1] ||
2278 x.Args[0] == y.Args[1] && x.Args[1] == y.Args[0]) {
2279 return true
2280 }
2281 if x.Op == OpAMD64LEAQ1 && y.Op == OpAMD64LEAQ1 && y.AuxInt == x.AuxInt+n && x.Aux == y.Aux &&
2282 (x.Args[0] == y.Args[0] && x.Args[1] == y.Args[1] ||
2283 x.Args[0] == y.Args[1] && x.Args[1] == y.Args[0]) {
2284 return true
2285 }
2286 return false
2287 }
2288
2289
2290
2291
2292
2293
2294
2295
2296
2297
2298
2299
2300
2301 type flagConstant uint8
2302
2303
2304 func (fc flagConstant) N() bool {
2305 return fc&1 != 0
2306 }
2307
2308
2309 func (fc flagConstant) Z() bool {
2310 return fc&2 != 0
2311 }
2312
2313
2314
2315 func (fc flagConstant) C() bool {
2316 return fc&4 != 0
2317 }
2318
2319
2320 func (fc flagConstant) V() bool {
2321 return fc&8 != 0
2322 }
2323
2324 func (fc flagConstant) eq() bool {
2325 return fc.Z()
2326 }
2327 func (fc flagConstant) ne() bool {
2328 return !fc.Z()
2329 }
2330 func (fc flagConstant) lt() bool {
2331 return fc.N() != fc.V()
2332 }
2333 func (fc flagConstant) le() bool {
2334 return fc.Z() || fc.lt()
2335 }
2336 func (fc flagConstant) gt() bool {
2337 return !fc.Z() && fc.ge()
2338 }
2339 func (fc flagConstant) ge() bool {
2340 return fc.N() == fc.V()
2341 }
2342 func (fc flagConstant) ult() bool {
2343 return !fc.C()
2344 }
2345 func (fc flagConstant) ule() bool {
2346 return fc.Z() || fc.ult()
2347 }
2348 func (fc flagConstant) ugt() bool {
2349 return !fc.Z() && fc.uge()
2350 }
2351 func (fc flagConstant) uge() bool {
2352 return fc.C()
2353 }
2354
2355 func (fc flagConstant) ltNoov() bool {
2356 return fc.lt() && !fc.V()
2357 }
2358 func (fc flagConstant) leNoov() bool {
2359 return fc.le() && !fc.V()
2360 }
2361 func (fc flagConstant) gtNoov() bool {
2362 return fc.gt() && !fc.V()
2363 }
2364 func (fc flagConstant) geNoov() bool {
2365 return fc.ge() && !fc.V()
2366 }
2367
2368 func (fc flagConstant) String() string {
2369 return fmt.Sprintf("N=%v,Z=%v,C=%v,V=%v", fc.N(), fc.Z(), fc.C(), fc.V())
2370 }
2371
2372 type flagConstantBuilder struct {
2373 N bool
2374 Z bool
2375 C bool
2376 V bool
2377 }
2378
2379 func (fcs flagConstantBuilder) encode() flagConstant {
2380 var fc flagConstant
2381 if fcs.N {
2382 fc |= 1
2383 }
2384 if fcs.Z {
2385 fc |= 2
2386 }
2387 if fcs.C {
2388 fc |= 4
2389 }
2390 if fcs.V {
2391 fc |= 8
2392 }
2393 return fc
2394 }
2395
2396
2397
2398
2399
2400
2401 func addFlags64(x, y int64) flagConstant {
2402 var fcb flagConstantBuilder
2403 fcb.Z = x+y == 0
2404 fcb.N = x+y < 0
2405 fcb.C = uint64(x+y) < uint64(x)
2406 fcb.V = x >= 0 && y >= 0 && x+y < 0 || x < 0 && y < 0 && x+y >= 0
2407 return fcb.encode()
2408 }
2409
2410
2411 func subFlags64(x, y int64) flagConstant {
2412 var fcb flagConstantBuilder
2413 fcb.Z = x-y == 0
2414 fcb.N = x-y < 0
2415 fcb.C = uint64(y) <= uint64(x)
2416 fcb.V = x >= 0 && y < 0 && x-y < 0 || x < 0 && y >= 0 && x-y >= 0
2417 return fcb.encode()
2418 }
2419
2420
2421 func addFlags32(x, y int32) flagConstant {
2422 var fcb flagConstantBuilder
2423 fcb.Z = x+y == 0
2424 fcb.N = x+y < 0
2425 fcb.C = uint32(x+y) < uint32(x)
2426 fcb.V = x >= 0 && y >= 0 && x+y < 0 || x < 0 && y < 0 && x+y >= 0
2427 return fcb.encode()
2428 }
2429
2430
2431 func subFlags32(x, y int32) flagConstant {
2432 var fcb flagConstantBuilder
2433 fcb.Z = x-y == 0
2434 fcb.N = x-y < 0
2435 fcb.C = uint32(y) <= uint32(x)
2436 fcb.V = x >= 0 && y < 0 && x-y < 0 || x < 0 && y >= 0 && x-y >= 0
2437 return fcb.encode()
2438 }
2439
2440
2441
2442 func logicFlags64(x int64) flagConstant {
2443 var fcb flagConstantBuilder
2444 fcb.Z = x == 0
2445 fcb.N = x < 0
2446 return fcb.encode()
2447 }
2448
2449
2450
2451 func logicFlags32(x int32) flagConstant {
2452 var fcb flagConstantBuilder
2453 fcb.Z = x == 0
2454 fcb.N = x < 0
2455 return fcb.encode()
2456 }
2457
2458 func makeJumpTableSym(b *Block) *obj.LSym {
2459 s := base.Ctxt.Lookup(fmt.Sprintf("%s.jump%d", b.Func.fe.Func().LSym.Name, b.ID))
2460
2461 s.Set(obj.AttrStatic, true)
2462 return s
2463 }
2464
2465
2466
2467 func canRotate(c *Config, bits int64) bool {
2468 if bits > c.PtrSize*8 {
2469
2470 return false
2471 }
2472 switch c.arch {
2473 case "386", "amd64", "arm64", "loong64", "riscv64":
2474 return true
2475 case "arm", "s390x", "ppc64", "ppc64le", "wasm":
2476 return bits >= 32
2477 default:
2478 return false
2479 }
2480 }
2481
2482
2483 func isARM64bitcon(x uint64) bool {
2484 if x == 1<<64-1 || x == 0 {
2485 return false
2486 }
2487
2488 switch {
2489 case x != x>>32|x<<32:
2490
2491
2492 case x != x>>16|x<<48:
2493
2494 x = uint64(int64(int32(x)))
2495 case x != x>>8|x<<56:
2496
2497 x = uint64(int64(int16(x)))
2498 case x != x>>4|x<<60:
2499
2500 x = uint64(int64(int8(x)))
2501 default:
2502
2503
2504
2505
2506
2507 return true
2508 }
2509 return sequenceOfOnes(x) || sequenceOfOnes(^x)
2510 }
2511
2512
2513 func sequenceOfOnes(x uint64) bool {
2514 y := x & -x
2515 y += x
2516 return (y-1)&y == 0
2517 }
2518
2519
2520 func isARM64addcon(v int64) bool {
2521
2522 if v < 0 {
2523 return false
2524 }
2525 if (v & 0xFFF) == 0 {
2526 v >>= 12
2527 }
2528 return v <= 0xFFF
2529 }
2530
2531
2532
2533
2534 func setPos(v *Value, pos src.XPos) bool {
2535 v.Pos = pos
2536 return true
2537 }
2538
2539
2540
2541
2542 func isNonNegative(v *Value) bool {
2543 if !v.Type.IsInteger() {
2544 v.Fatalf("isNonNegative bad type: %v", v.Type)
2545 }
2546
2547
2548
2549
2550 switch v.Op {
2551 case OpConst64:
2552 return v.AuxInt >= 0
2553
2554 case OpConst32:
2555 return int32(v.AuxInt) >= 0
2556
2557 case OpConst16:
2558 return int16(v.AuxInt) >= 0
2559
2560 case OpConst8:
2561 return int8(v.AuxInt) >= 0
2562
2563 case OpStringLen, OpSliceLen, OpSliceCap,
2564 OpZeroExt8to64, OpZeroExt16to64, OpZeroExt32to64,
2565 OpZeroExt8to32, OpZeroExt16to32, OpZeroExt8to16,
2566 OpCtz64, OpCtz32, OpCtz16, OpCtz8,
2567 OpCtz64NonZero, OpCtz32NonZero, OpCtz16NonZero, OpCtz8NonZero,
2568 OpBitLen64, OpBitLen32, OpBitLen16, OpBitLen8:
2569 return true
2570
2571 case OpRsh64Ux64, OpRsh32Ux64:
2572 by := v.Args[1]
2573 return by.Op == OpConst64 && by.AuxInt > 0
2574
2575 case OpRsh64x64, OpRsh32x64, OpRsh8x64, OpRsh16x64, OpRsh32x32, OpRsh64x32,
2576 OpSignExt32to64, OpSignExt16to64, OpSignExt8to64, OpSignExt16to32, OpSignExt8to32:
2577 return isNonNegative(v.Args[0])
2578
2579 case OpAnd64, OpAnd32, OpAnd16, OpAnd8:
2580 return isNonNegative(v.Args[0]) || isNonNegative(v.Args[1])
2581
2582 case OpMod64, OpMod32, OpMod16, OpMod8,
2583 OpDiv64, OpDiv32, OpDiv16, OpDiv8,
2584 OpOr64, OpOr32, OpOr16, OpOr8,
2585 OpXor64, OpXor32, OpXor16, OpXor8:
2586 return isNonNegative(v.Args[0]) && isNonNegative(v.Args[1])
2587
2588
2589
2590 }
2591 return false
2592 }
2593
2594 func rewriteStructLoad(v *Value) *Value {
2595 b := v.Block
2596 ptr := v.Args[0]
2597 mem := v.Args[1]
2598
2599 t := v.Type
2600 args := make([]*Value, t.NumFields())
2601 for i := range args {
2602 ft := t.FieldType(i)
2603 addr := b.NewValue1I(v.Pos, OpOffPtr, ft.PtrTo(), t.FieldOff(i), ptr)
2604 args[i] = b.NewValue2(v.Pos, OpLoad, ft, addr, mem)
2605 }
2606
2607 v.reset(OpStructMake)
2608 v.AddArgs(args...)
2609 return v
2610 }
2611
2612 func rewriteStructStore(v *Value) *Value {
2613 b := v.Block
2614 dst := v.Args[0]
2615 x := v.Args[1]
2616 if x.Op != OpStructMake {
2617 base.Fatalf("invalid struct store: %v", x)
2618 }
2619 mem := v.Args[2]
2620
2621 t := x.Type
2622 for i, arg := range x.Args {
2623 ft := t.FieldType(i)
2624
2625 addr := b.NewValue1I(v.Pos, OpOffPtr, ft.PtrTo(), t.FieldOff(i), dst)
2626 mem = b.NewValue3A(v.Pos, OpStore, types.TypeMem, typeToAux(ft), addr, arg, mem)
2627 }
2628
2629 return mem
2630 }
2631
2632
2633
2634
2635 func isDirectAndComparableType(v *Value) bool {
2636 return isDirectAndComparableType1(v)
2637 }
2638
2639
2640 func isDirectAndComparableType1(v *Value) bool {
2641 switch v.Op {
2642 case OpITab:
2643 return isDirectAndComparableType2(v.Args[0])
2644 case OpAddr:
2645 lsym := v.Aux.(*obj.LSym)
2646 if ti := lsym.TypeInfo(); ti != nil {
2647 t := ti.Type.(*types.Type)
2648 return types.IsDirectIface(t) && types.IsComparable(t)
2649 }
2650 }
2651 return false
2652 }
2653
2654
2655 func isDirectAndComparableType2(v *Value) bool {
2656 switch v.Op {
2657 case OpIMake:
2658 return isDirectAndComparableType1(v.Args[0])
2659 }
2660 return false
2661 }
2662
2663
2664
2665
2666 func isDirectAndComparableIface(v *Value) bool {
2667 return isDirectAndComparableIface1(v, 9)
2668 }
2669
2670
2671 func isDirectAndComparableIface1(v *Value, depth int) bool {
2672 if depth == 0 {
2673 return false
2674 }
2675 switch v.Op {
2676 case OpITab:
2677 return isDirectAndComparableIface2(v.Args[0], depth-1)
2678 case OpAddr:
2679 lsym := v.Aux.(*obj.LSym)
2680 if ii := lsym.ItabInfo(); ii != nil {
2681 t := ii.Type.(*types.Type)
2682 return types.IsDirectIface(t) && types.IsComparable(t)
2683 }
2684 case OpConstNil:
2685
2686
2687 return true
2688 }
2689 return false
2690 }
2691
2692
2693 func isDirectAndComparableIface2(v *Value, depth int) bool {
2694 if depth == 0 {
2695 return false
2696 }
2697 switch v.Op {
2698 case OpIMake:
2699 return isDirectAndComparableIface1(v.Args[0], depth-1)
2700 case OpPhi:
2701 for _, a := range v.Args {
2702 if !isDirectAndComparableIface2(a, depth-1) {
2703 return false
2704 }
2705 }
2706 return true
2707 }
2708 return false
2709 }
2710
2711 func bitsAdd64(x, y, carry int64) (r struct{ sum, carry int64 }) {
2712 s, c := bits.Add64(uint64(x), uint64(y), uint64(carry))
2713 r.sum, r.carry = int64(s), int64(c)
2714 return
2715 }
2716
2717 func bitsMulU64(x, y int64) (r struct{ hi, lo int64 }) {
2718 hi, lo := bits.Mul64(uint64(x), uint64(y))
2719 r.hi, r.lo = int64(hi), int64(lo)
2720 return
2721 }
2722 func bitsMulU32(x, y int32) (r struct{ hi, lo int32 }) {
2723 hi, lo := bits.Mul32(uint32(x), uint32(y))
2724 r.hi, r.lo = int32(hi), int32(lo)
2725 return
2726 }
2727
2728
2729 func flagify(v *Value) bool {
2730 var flagVersion Op
2731 switch v.Op {
2732 case OpAMD64ADDQconst:
2733 flagVersion = OpAMD64ADDQconstflags
2734 case OpAMD64ADDLconst:
2735 flagVersion = OpAMD64ADDLconstflags
2736 default:
2737 base.Fatalf("can't flagify op %s", v.Op)
2738 }
2739 inner := v.copyInto(v.Block)
2740 inner.Op = flagVersion
2741 inner.Type = types.NewTuple(v.Type, types.TypeFlags)
2742 v.reset(OpSelect0)
2743 v.AddArg(inner)
2744 return true
2745 }
2746
2747
2748 type PanicBoundsC struct {
2749 C int64
2750 }
2751
2752
2753 type PanicBoundsCC struct {
2754 Cx int64
2755 Cy int64
2756 }
2757
2758 func (p PanicBoundsC) CanBeAnSSAAux() {
2759 }
2760 func (p PanicBoundsCC) CanBeAnSSAAux() {
2761 }
2762
2763 func auxToPanicBoundsC(i Aux) PanicBoundsC {
2764 return i.(PanicBoundsC)
2765 }
2766 func auxToPanicBoundsCC(i Aux) PanicBoundsCC {
2767 return i.(PanicBoundsCC)
2768 }
2769 func panicBoundsCToAux(p PanicBoundsC) Aux {
2770 return p
2771 }
2772 func panicBoundsCCToAux(p PanicBoundsCC) Aux {
2773 return p
2774 }
2775
2776 func isDictArgSym(sym Sym) bool {
2777 return sym.(*ir.Name).Sym().Name == typecheck.LocalDictName
2778 }
2779
2780
2781
2782
2783 func imakeOfStructMake(v *Value) *Value {
2784 var arg *Value
2785 for _, a := range v.Args[1].Args {
2786 if a.Type.Size() > 0 {
2787 arg = a
2788 break
2789 }
2790 }
2791 return v.Block.NewValue2(v.Pos, OpIMake, v.Type, v.Args[0], arg)
2792 }
2793
2794
2795 func bool2int(x bool) int {
2796 var b int
2797 if x {
2798 b = 1
2799 }
2800 return b
2801 }
2802
View as plain text