1
2
3
4
5 package ssa
6
7 import (
8 "cmd/compile/internal/base"
9 "cmd/compile/internal/ir"
10 "cmd/compile/internal/logopt"
11 "cmd/compile/internal/reflectdata"
12 "cmd/compile/internal/rttype"
13 "cmd/compile/internal/typecheck"
14 "cmd/compile/internal/types"
15 "cmd/internal/obj"
16 "cmd/internal/obj/s390x"
17 "cmd/internal/objabi"
18 "cmd/internal/src"
19 "encoding/binary"
20 "fmt"
21 "internal/buildcfg"
22 "io"
23 "math"
24 "math/bits"
25 "os"
26 "path/filepath"
27 "strings"
28 )
29
30 type deadValueChoice bool
31
32 const (
33 leaveDeadValues deadValueChoice = false
34 removeDeadValues = true
35
36 repZeroThreshold = 1408
37 repMoveThreshold = 1408
38 )
39
40
41 func applyRewrite(f *Func, rb blockRewriter, rv valueRewriter, deadcode deadValueChoice) {
42
43 pendingLines := f.cachedLineStarts
44 pendingLines.clear()
45 debug := f.pass.debug
46 if debug > 1 {
47 fmt.Printf("%s: rewriting for %s\n", f.pass.name, f.Name)
48 }
49
50
51
52
53 itersLimit := f.NumBlocks()
54 if itersLimit < 20 {
55 itersLimit = 20
56 }
57 var iters int
58 var states map[string]bool
59 for {
60 change := false
61 deadChange := false
62 for _, b := range f.Blocks {
63 var b0 *Block
64 if debug > 1 {
65 b0 = new(Block)
66 *b0 = *b
67 b0.Succs = append([]Edge{}, b.Succs...)
68 }
69 for i, c := range b.ControlValues() {
70 for c.Op == OpCopy {
71 c = c.Args[0]
72 b.ReplaceControl(i, c)
73 }
74 }
75 if rb(b) {
76 change = true
77 if debug > 1 {
78 fmt.Printf("rewriting %s -> %s\n", b0.LongString(), b.LongString())
79 }
80 }
81 for j, v := range b.Values {
82 var v0 *Value
83 if debug > 1 {
84 v0 = new(Value)
85 *v0 = *v
86 v0.Args = append([]*Value{}, v.Args...)
87 }
88 if v.Uses == 0 && v.removeable() {
89 if v.Op != OpInvalid && deadcode == removeDeadValues {
90
91
92
93
94 v.reset(OpInvalid)
95 deadChange = true
96 }
97
98 continue
99 }
100
101 vchange := phielimValue(v)
102 if vchange && debug > 1 {
103 fmt.Printf("rewriting %s -> %s\n", v0.LongString(), v.LongString())
104 }
105
106
107
108
109
110
111
112
113 for i, a := range v.Args {
114 if a.Op != OpCopy {
115 continue
116 }
117 aa := copySource(a)
118 v.SetArg(i, aa)
119
120
121
122
123
124 if a.Pos.IsStmt() == src.PosIsStmt {
125 if aa.Block == a.Block && aa.Pos.Line() == a.Pos.Line() && aa.Pos.IsStmt() != src.PosNotStmt {
126 aa.Pos = aa.Pos.WithIsStmt()
127 } else if v.Block == a.Block && v.Pos.Line() == a.Pos.Line() && v.Pos.IsStmt() != src.PosNotStmt {
128 v.Pos = v.Pos.WithIsStmt()
129 } else {
130
131
132
133
134 pendingLines.set(a.Pos, int32(a.Block.ID))
135 }
136 a.Pos = a.Pos.WithNotStmt()
137 }
138 vchange = true
139 for a.Uses == 0 {
140 b := a.Args[0]
141 a.reset(OpInvalid)
142 a = b
143 }
144 }
145 if vchange && debug > 1 {
146 fmt.Printf("rewriting %s -> %s\n", v0.LongString(), v.LongString())
147 }
148
149
150 if rv(v) {
151 vchange = true
152
153 if v.Pos.IsStmt() == src.PosIsStmt {
154 if k := nextGoodStatementIndex(v, j, b); k != j {
155 v.Pos = v.Pos.WithNotStmt()
156 b.Values[k].Pos = b.Values[k].Pos.WithIsStmt()
157 }
158 }
159 }
160
161 change = change || vchange
162 if vchange && debug > 1 {
163 fmt.Printf("rewriting %s -> %s\n", v0.LongString(), v.LongString())
164 }
165 }
166 }
167 if !change && !deadChange {
168 break
169 }
170 iters++
171 if (iters > itersLimit || debug >= 2) && change {
172
173
174
175
176
177 if states == nil {
178 states = make(map[string]bool)
179 }
180 h := f.rewriteHash()
181 if _, ok := states[h]; ok {
182
183
184
185
186 if debug < 2 {
187 debug = 2
188 states = make(map[string]bool)
189 } else {
190 f.Fatalf("rewrite cycle detected")
191 }
192 }
193 states[h] = true
194 }
195 }
196
197 for _, b := range f.Blocks {
198 j := 0
199 for i, v := range b.Values {
200 vl := v.Pos
201 if v.Op == OpInvalid {
202 if v.Pos.IsStmt() == src.PosIsStmt {
203 pendingLines.set(vl, int32(b.ID))
204 }
205 f.freeValue(v)
206 continue
207 }
208 if v.Pos.IsStmt() != src.PosNotStmt && !notStmtBoundary(v.Op) {
209 if pl, ok := pendingLines.get(vl); ok && pl == int32(b.ID) {
210 pendingLines.remove(vl)
211 v.Pos = v.Pos.WithIsStmt()
212 }
213 }
214 if i != j {
215 b.Values[j] = v
216 }
217 j++
218 }
219 if pl, ok := pendingLines.get(b.Pos); ok && pl == int32(b.ID) {
220 b.Pos = b.Pos.WithIsStmt()
221 pendingLines.remove(b.Pos)
222 }
223 b.truncateValues(j)
224 }
225 }
226
227
228
229 func is64BitFloat(t *types.Type) bool {
230 return t.Size() == 8 && t.IsFloat()
231 }
232
233 func is32BitFloat(t *types.Type) bool {
234 return t.Size() == 4 && t.IsFloat()
235 }
236
237 func is64BitInt(t *types.Type) bool {
238 return t.Size() == 8 && t.IsInteger()
239 }
240
241 func is32BitInt(t *types.Type) bool {
242 return t.Size() == 4 && t.IsInteger()
243 }
244
245 func is16BitInt(t *types.Type) bool {
246 return t.Size() == 2 && t.IsInteger()
247 }
248
249 func is8BitInt(t *types.Type) bool {
250 return t.Size() == 1 && t.IsInteger()
251 }
252
253 func isPtr(t *types.Type) bool {
254 return t.IsPtrShaped()
255 }
256
257 func copyCompatibleType(t1, t2 *types.Type) bool {
258 if t1.Size() != t2.Size() {
259 return false
260 }
261 if t1.IsInteger() {
262 return t2.IsInteger()
263 }
264 if isPtr(t1) {
265 return isPtr(t2)
266 }
267 return t1.Compare(t2) == types.CMPeq
268 }
269
270
271
272 func mergeSym(x, y Sym) Sym {
273 if x == nil {
274 return y
275 }
276 if y == nil {
277 return x
278 }
279 panic(fmt.Sprintf("mergeSym with two non-nil syms %v %v", x, y))
280 }
281
282 func canMergeSym(x, y Sym) bool {
283 return x == nil || y == nil
284 }
285
286
287
288
289
290 func canMergeLoadClobber(target, load, x *Value) bool {
291
292
293
294
295
296
297 switch {
298 case x.Uses == 2 && x.Op == OpPhi && len(x.Args) == 2 && (x.Args[0] == target || x.Args[1] == target) && target.Uses == 1:
299
300
301
302
303
304
305
306
307
308 case x.Uses > 1:
309 return false
310 }
311 loopnest := x.Block.Func.loopnest()
312 if loopnest.depth(target.Block.ID) > loopnest.depth(x.Block.ID) {
313 return false
314 }
315 return canMergeLoad(target, load)
316 }
317
318
319
320 func canMergeLoad(target, load *Value) bool {
321 if target.Block.ID != load.Block.ID {
322
323 return false
324 }
325
326
327
328 if load.Uses != 1 {
329 return false
330 }
331
332 mem := load.MemoryArg()
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349 var args []*Value
350 for _, a := range target.Args {
351 if a != load && a.Block.ID == target.Block.ID {
352 args = append(args, a)
353 }
354 }
355
356
357
358 var memPreds map[*Value]bool
359 for i := 0; len(args) > 0; i++ {
360 const limit = 100
361 if i >= limit {
362
363 return false
364 }
365 v := args[len(args)-1]
366 args = args[:len(args)-1]
367 if target.Block.ID != v.Block.ID {
368
369
370 continue
371 }
372 if v.Op == OpPhi {
373
374
375
376 continue
377 }
378 if v.Type.IsTuple() && v.Type.FieldType(1).IsMemory() {
379
380
381 return false
382 }
383 if v.Op.SymEffect()&SymAddr != 0 {
384
385
386
387
388
389
390
391
392
393
394
395 return false
396 }
397 if v.Type.IsMemory() {
398 if memPreds == nil {
399
400
401
402 memPreds = make(map[*Value]bool)
403 m := mem
404 const limit = 50
405 for i := 0; i < limit; i++ {
406 if m.Op == OpPhi {
407
408
409 break
410 }
411 if m.Block.ID != target.Block.ID {
412 break
413 }
414 if !m.Type.IsMemory() {
415 break
416 }
417 memPreds[m] = true
418 if len(m.Args) == 0 {
419 break
420 }
421 m = m.MemoryArg()
422 }
423 }
424
425
426
427
428
429
430
431
432
433 if memPreds[v] {
434 continue
435 }
436 return false
437 }
438 if len(v.Args) > 0 && v.Args[len(v.Args)-1] == mem {
439
440
441 continue
442 }
443 for _, a := range v.Args {
444 if target.Block.ID == a.Block.ID {
445 args = append(args, a)
446 }
447 }
448 }
449
450 return true
451 }
452
453
454 func isSameCall(aux Aux, name string) bool {
455 fn := aux.(*AuxCall).Fn
456 return fn != nil && fn.String() == name
457 }
458
459 func isMalloc(aux Aux) bool {
460 return isNewObject(aux) || isSpecializedMalloc(aux)
461 }
462
463 func isNewObject(aux Aux) bool {
464 fn := aux.(*AuxCall).Fn
465 return fn != nil && fn.String() == "runtime.newobject"
466 }
467
468 func isSpecializedMalloc(aux Aux) bool {
469 fn := aux.(*AuxCall).Fn
470 if fn == nil {
471 return false
472 }
473 name := fn.String()
474 return strings.HasPrefix(name, "runtime.mallocgcSmallNoScanSC") ||
475 strings.HasPrefix(name, "runtime.mallocgcSmallScanNoHeaderSC") ||
476 strings.HasPrefix(name, "runtime.mallocTiny")
477 }
478
479
480 func canLoadUnaligned(c *Config) bool {
481 return c.ctxt.Arch.Alignment == 1
482 }
483
484
485 func nlz64(x int64) int { return bits.LeadingZeros64(uint64(x)) }
486 func nlz32(x int32) int { return bits.LeadingZeros32(uint32(x)) }
487 func nlz16(x int16) int { return bits.LeadingZeros16(uint16(x)) }
488 func nlz8(x int8) int { return bits.LeadingZeros8(uint8(x)) }
489
490
491 func ntz64(x int64) int { return bits.TrailingZeros64(uint64(x)) }
492 func ntz32(x int32) int { return bits.TrailingZeros32(uint32(x)) }
493 func ntz16(x int16) int { return bits.TrailingZeros16(uint16(x)) }
494 func ntz8(x int8) int { return bits.TrailingZeros8(uint8(x)) }
495
496
497 func oneBit[T int8 | int16 | int32 | int64](x T) bool {
498 return x&(x-1) == 0 && x != 0
499 }
500
501
502 func nto(x int64) int64 {
503 return int64(ntz64(^x))
504 }
505
506
507
508 func log8(n int8) int64 { return log8u(uint8(n)) }
509 func log16(n int16) int64 { return log16u(uint16(n)) }
510 func log32(n int32) int64 { return log32u(uint32(n)) }
511 func log64(n int64) int64 { return log64u(uint64(n)) }
512
513
514
515 func log8u(n uint8) int64 { return int64(bits.Len8(n)) - 1 }
516 func log16u(n uint16) int64 { return int64(bits.Len16(n)) - 1 }
517 func log32u(n uint32) int64 { return int64(bits.Len32(n)) - 1 }
518 func log64u(n uint64) int64 { return int64(bits.Len64(n)) - 1 }
519
520
521 func isPowerOfTwo[T int8 | int16 | int32 | int64](n T) bool {
522 return n > 0 && n&(n-1) == 0
523 }
524
525
526 func isUnsignedPowerOfTwo[T uint8 | uint16 | uint32 | uint64](n T) bool {
527 return n != 0 && n&(n-1) == 0
528 }
529
530
531 func is32Bit(n int64) bool {
532 return n == int64(int32(n))
533 }
534
535
536 func is16Bit(n int64) bool {
537 return n == int64(int16(n))
538 }
539
540
541 func is8Bit(n int64) bool {
542 return n == int64(int8(n))
543 }
544
545
546 func isU8Bit(n int64) bool {
547 return n == int64(uint8(n))
548 }
549
550
551 func is12Bit(n int64) bool {
552 return -(1<<11) <= n && n < (1<<11)
553 }
554
555
556 func isU12Bit(n int64) bool {
557 return 0 <= n && n < (1<<12)
558 }
559
560
561 func isU16Bit(n int64) bool {
562 return n == int64(uint16(n))
563 }
564
565
566 func isU32Bit(n int64) bool {
567 return n == int64(uint32(n))
568 }
569
570
571 func is20Bit(n int64) bool {
572 return -(1<<19) <= n && n < (1<<19)
573 }
574
575
576 func b2i(b bool) int64 {
577 if b {
578 return 1
579 }
580 return 0
581 }
582
583
584 func b2i32(b bool) int32 {
585 if b {
586 return 1
587 }
588 return 0
589 }
590
591 func canMulStrengthReduce(config *Config, x int64) bool {
592 _, ok := config.mulRecipes[x]
593 return ok
594 }
595 func canMulStrengthReduce32(config *Config, x int32) bool {
596 _, ok := config.mulRecipes[int64(x)]
597 return ok
598 }
599
600
601
602
603 func mulStrengthReduce(m *Value, v *Value, x int64) *Value {
604 return v.Block.Func.Config.mulRecipes[x].build(m, v)
605 }
606
607
608
609
610
611 func mulStrengthReduce32(m *Value, v *Value, x int32) *Value {
612 return v.Block.Func.Config.mulRecipes[int64(x)].build(m, v)
613 }
614
615
616
617 func shiftIsBounded(v *Value) bool {
618 return v.AuxInt != 0
619 }
620
621
622
623 func canonLessThan(x, y *Value) bool {
624 if x.Op != y.Op {
625 return x.Op < y.Op
626 }
627 if !x.Pos.SameFileAndLine(y.Pos) {
628 return x.Pos.Before(y.Pos)
629 }
630 return x.ID < y.ID
631 }
632
633
634
635 func truncate64Fto32F(f float64) float32 {
636 if !isExactFloat32(f) {
637 panic("truncate64Fto32F: truncation is not exact")
638 }
639 if !math.IsNaN(f) {
640 return float32(f)
641 }
642
643
644 b := math.Float64bits(f)
645 m := b & ((1 << 52) - 1)
646
647 r := uint32(((b >> 32) & (1 << 31)) | 0x7f800000 | (m >> (52 - 23)))
648 return math.Float32frombits(r)
649 }
650
651
652 func DivisionNeedsFixUp(v *Value) bool {
653 return v.AuxInt == 0
654 }
655
656
657 func auxTo32F(i int64) float32 {
658 return truncate64Fto32F(math.Float64frombits(uint64(i)))
659 }
660
661 func auxIntToBool(i int64) bool {
662 if i == 0 {
663 return false
664 }
665 return true
666 }
667 func auxIntToInt8(i int64) int8 {
668 return int8(i)
669 }
670 func auxIntToInt16(i int64) int16 {
671 return int16(i)
672 }
673 func auxIntToInt32(i int64) int32 {
674 return int32(i)
675 }
676 func auxIntToInt64(i int64) int64 {
677 return i
678 }
679 func auxIntToUint8(i int64) uint8 {
680 return uint8(i)
681 }
682 func auxIntToFloat32(i int64) float32 {
683 return float32(math.Float64frombits(uint64(i)))
684 }
685 func auxIntToFloat64(i int64) float64 {
686 return math.Float64frombits(uint64(i))
687 }
688 func auxIntToValAndOff(i int64) ValAndOff {
689 return ValAndOff(i)
690 }
691 func auxIntToArm64BitField(i int64) arm64BitField {
692 return arm64BitField(i)
693 }
694 func auxIntToArm64ConditionalParams(i int64) arm64ConditionalParams {
695 var params arm64ConditionalParams
696 params.cond = Op(i & 0xffff)
697 i >>= 16
698 params.nzcv = uint8(i & 0x0f)
699 i >>= 4
700 params.constValue = uint8(i & 0x1f)
701 i >>= 5
702 params.ind = i == 1
703 return params
704 }
705 func auxIntToFlagConstant(x int64) flagConstant {
706 return flagConstant(x)
707 }
708
709 func auxIntToOp(cc int64) Op {
710 return Op(cc)
711 }
712
713 func boolToAuxInt(b bool) int64 {
714 if b {
715 return 1
716 }
717 return 0
718 }
719 func int8ToAuxInt(i int8) int64 {
720 return int64(i)
721 }
722 func int16ToAuxInt(i int16) int64 {
723 return int64(i)
724 }
725 func int32ToAuxInt(i int32) int64 {
726 return int64(i)
727 }
728 func int64ToAuxInt(i int64) int64 {
729 return i
730 }
731 func uint8ToAuxInt(i uint8) int64 {
732 return int64(int8(i))
733 }
734 func float32ToAuxInt(f float32) int64 {
735 return int64(math.Float64bits(float64(f)))
736 }
737 func float64ToAuxInt(f float64) int64 {
738 return int64(math.Float64bits(f))
739 }
740 func valAndOffToAuxInt(v ValAndOff) int64 {
741 return int64(v)
742 }
743 func arm64BitFieldToAuxInt(v arm64BitField) int64 {
744 return int64(v)
745 }
746 func arm64ConditionalParamsToAuxInt(v arm64ConditionalParams) int64 {
747 if v.cond&^0xffff != 0 {
748 panic("condition value exceeds 16 bits")
749 }
750
751 var i int64
752 if v.ind {
753 i = 1 << 25
754 }
755 i |= int64(v.constValue) << 20
756 i |= int64(v.nzcv) << 16
757 i |= int64(v.cond)
758 return i
759 }
760 func flagConstantToAuxInt(x flagConstant) int64 {
761 return int64(x)
762 }
763
764 func opToAuxInt(o Op) int64 {
765 return int64(o)
766 }
767
768
769 type Aux interface {
770 CanBeAnSSAAux()
771 }
772
773
774 type auxMark bool
775
776 func (auxMark) CanBeAnSSAAux() {}
777
778 var AuxMark auxMark
779
780
781 type stringAux string
782
783 func (stringAux) CanBeAnSSAAux() {}
784
785 func auxToString(i Aux) string {
786 return string(i.(stringAux))
787 }
788 func auxToSym(i Aux) Sym {
789
790 s, _ := i.(Sym)
791 return s
792 }
793 func auxToType(i Aux) *types.Type {
794 return i.(*types.Type)
795 }
796 func auxToCall(i Aux) *AuxCall {
797 return i.(*AuxCall)
798 }
799 func auxToS390xCCMask(i Aux) s390x.CCMask {
800 return i.(s390x.CCMask)
801 }
802 func auxToS390xRotateParams(i Aux) s390x.RotateParams {
803 return i.(s390x.RotateParams)
804 }
805
806 func StringToAux(s string) Aux {
807 return stringAux(s)
808 }
809 func symToAux(s Sym) Aux {
810 return s
811 }
812 func callToAux(s *AuxCall) Aux {
813 return s
814 }
815 func typeToAux(t *types.Type) Aux {
816 return t
817 }
818 func s390xCCMaskToAux(c s390x.CCMask) Aux {
819 return c
820 }
821 func s390xRotateParamsToAux(r s390x.RotateParams) Aux {
822 return r
823 }
824
825
826 func uaddOvf(a, b int64) bool {
827 return uint64(a)+uint64(b) < uint64(a)
828 }
829
830 func devirtLECall(v *Value, sym *obj.LSym) *Value {
831 v.Op = OpStaticLECall
832 auxcall := v.Aux.(*AuxCall)
833 auxcall.Fn = sym
834
835 v.Args[0].Uses--
836 copy(v.Args[0:], v.Args[1:])
837 v.Args[len(v.Args)-1] = nil
838 v.Args = v.Args[:len(v.Args)-1]
839 if f := v.Block.Func; f.pass.debug > 0 {
840 f.Warnl(v.Pos, "de-virtualizing call")
841 }
842 return v
843 }
844
845
846 func isSamePtr(p1, p2 *Value) bool {
847 if p1 == p2 {
848 return true
849 }
850 if p1.Op != p2.Op {
851 for p1.Op == OpOffPtr && p1.AuxInt == 0 {
852 p1 = p1.Args[0]
853 }
854 for p2.Op == OpOffPtr && p2.AuxInt == 0 {
855 p2 = p2.Args[0]
856 }
857 if p1 == p2 {
858 return true
859 }
860 if p1.Op != p2.Op {
861 return false
862 }
863 }
864 switch p1.Op {
865 case OpOffPtr:
866 return p1.AuxInt == p2.AuxInt && isSamePtr(p1.Args[0], p2.Args[0])
867 case OpAddr, OpLocalAddr:
868 return p1.Aux == p2.Aux
869 case OpAddPtr:
870 return p1.Args[1] == p2.Args[1] && isSamePtr(p1.Args[0], p2.Args[0])
871 }
872 return false
873 }
874
875 func isStackPtr(v *Value) bool {
876 for v.Op == OpOffPtr || v.Op == OpAddPtr {
877 v = v.Args[0]
878 }
879 return v.Op == OpSP || v.Op == OpLocalAddr
880 }
881
882
883
884
885 func disjoint(p1 *Value, n1 int64, p2 *Value, n2 int64) bool {
886 if n1 == 0 || n2 == 0 {
887 return true
888 }
889 if p1 == p2 {
890 return false
891 }
892 baseAndOffset := func(ptr *Value) (base *Value, offset int64) {
893 base, offset = ptr, 0
894 for base.Op == OpOffPtr {
895 offset += base.AuxInt
896 base = base.Args[0]
897 }
898 if opcodeTable[base.Op].nilCheck {
899 base = base.Args[0]
900 }
901 return base, offset
902 }
903
904
905 if disjointTypes(p1.Type, p2.Type) {
906 return true
907 }
908
909 p1, off1 := baseAndOffset(p1)
910 p2, off2 := baseAndOffset(p2)
911 if isSamePtr(p1, p2) {
912 return !overlap(off1, n1, off2, n2)
913 }
914
915
916
917
918 switch p1.Op {
919 case OpAddr, OpLocalAddr:
920 if p2.Op == OpAddr || p2.Op == OpLocalAddr || p2.Op == OpSP {
921 return true
922 }
923 return (p2.Op == OpArg || p2.Op == OpArgIntReg) && p1.Args[0].Op == OpSP
924 case OpArg, OpArgIntReg:
925 if p2.Op == OpSP || p2.Op == OpLocalAddr {
926 return true
927 }
928 case OpSP:
929 return p2.Op == OpAddr || p2.Op == OpLocalAddr || p2.Op == OpArg || p2.Op == OpArgIntReg || p2.Op == OpSP
930 }
931 return false
932 }
933
934
935
936
937 func disjointTypes(t1 *types.Type, t2 *types.Type) bool {
938
939 if t1.IsUnsafePtr() || t2.IsUnsafePtr() {
940 return false
941 }
942
943 if !t1.IsPtr() || !t2.IsPtr() {
944 panic("disjointTypes: one of arguments is not a pointer")
945 }
946
947 t1 = t1.Elem()
948 t2 = t2.Elem()
949
950
951
952 if t1.NotInHeap() || t2.NotInHeap() {
953 return false
954 }
955
956 isPtrShaped := func(t *types.Type) bool { return int(t.Size()) == types.PtrSize && t.HasPointers() }
957
958
959 if (isPtrShaped(t1) && !t2.HasPointers()) ||
960 (isPtrShaped(t2) && !t1.HasPointers()) {
961 return true
962 }
963
964 return false
965 }
966
967
968 func moveSize(align int64, c *Config) int64 {
969 switch {
970 case align%8 == 0 && c.PtrSize == 8:
971 return 8
972 case align%4 == 0:
973 return 4
974 case align%2 == 0:
975 return 2
976 }
977 return 1
978 }
979
980
981
982
983 func mergePoint(b *Block, a ...*Value) *Block {
984
985
986
987 d := 100
988
989 for d > 0 {
990 for _, x := range a {
991 if b == x.Block {
992 goto found
993 }
994 }
995 if len(b.Preds) > 1 {
996
997 return nil
998 }
999 b = b.Preds[0].b
1000 d--
1001 }
1002 return nil
1003 found:
1004
1005
1006 r := b
1007
1008
1009 na := 0
1010 for d > 0 {
1011 for _, x := range a {
1012 if b == x.Block {
1013 na++
1014 }
1015 }
1016 if na == len(a) {
1017
1018 return r
1019 }
1020 if len(b.Preds) > 1 {
1021 return nil
1022 }
1023 b = b.Preds[0].b
1024 d--
1025
1026 }
1027 return nil
1028 }
1029
1030
1031
1032
1033
1034
1035 func clobber(vv ...*Value) bool {
1036 for _, v := range vv {
1037 v.reset(OpInvalid)
1038
1039 }
1040 return true
1041 }
1042
1043
1044
1045 func resetCopy(v *Value, arg *Value) bool {
1046 v.reset(OpCopy)
1047 v.AddArg(arg)
1048 return true
1049 }
1050
1051
1052
1053
1054 func clobberIfDead(v *Value) bool {
1055 if v.Uses == 1 {
1056 v.reset(OpInvalid)
1057 }
1058
1059 return true
1060 }
1061
1062
1063
1064
1065
1066
1067
1068 func noteRule(s string) bool {
1069 fmt.Println(s)
1070 return true
1071 }
1072
1073
1074
1075
1076
1077
1078 func countRule(v *Value, key string) bool {
1079 f := v.Block.Func
1080 if f.ruleMatches == nil {
1081 f.ruleMatches = make(map[string]int)
1082 }
1083 f.ruleMatches[key]++
1084 return true
1085 }
1086
1087
1088
1089 func warnRule(cond bool, v *Value, s string) bool {
1090 if pos := v.Pos; pos.Line() > 1 && cond {
1091 v.Block.Func.Warnl(pos, s)
1092 }
1093 return true
1094 }
1095
1096
1097 func flagArg(v *Value) *Value {
1098 if len(v.Args) != 1 || !v.Args[0].Type.IsFlags() {
1099 return nil
1100 }
1101 return v.Args[0]
1102 }
1103
1104
1105
1106
1107
1108
1109 func arm64Negate(op Op) Op {
1110 switch op {
1111 case OpARM64LessThan:
1112 return OpARM64GreaterEqual
1113 case OpARM64LessThanU:
1114 return OpARM64GreaterEqualU
1115 case OpARM64GreaterThan:
1116 return OpARM64LessEqual
1117 case OpARM64GreaterThanU:
1118 return OpARM64LessEqualU
1119 case OpARM64LessEqual:
1120 return OpARM64GreaterThan
1121 case OpARM64LessEqualU:
1122 return OpARM64GreaterThanU
1123 case OpARM64GreaterEqual:
1124 return OpARM64LessThan
1125 case OpARM64GreaterEqualU:
1126 return OpARM64LessThanU
1127 case OpARM64Equal:
1128 return OpARM64NotEqual
1129 case OpARM64NotEqual:
1130 return OpARM64Equal
1131 case OpARM64LessThanF:
1132 return OpARM64NotLessThanF
1133 case OpARM64NotLessThanF:
1134 return OpARM64LessThanF
1135 case OpARM64LessEqualF:
1136 return OpARM64NotLessEqualF
1137 case OpARM64NotLessEqualF:
1138 return OpARM64LessEqualF
1139 case OpARM64GreaterThanF:
1140 return OpARM64NotGreaterThanF
1141 case OpARM64NotGreaterThanF:
1142 return OpARM64GreaterThanF
1143 case OpARM64GreaterEqualF:
1144 return OpARM64NotGreaterEqualF
1145 case OpARM64NotGreaterEqualF:
1146 return OpARM64GreaterEqualF
1147 default:
1148 panic("unreachable")
1149 }
1150 }
1151
1152
1153
1154
1155
1156
1157 func arm64Invert(op Op) Op {
1158 switch op {
1159 case OpARM64LessThan:
1160 return OpARM64GreaterThan
1161 case OpARM64LessThanU:
1162 return OpARM64GreaterThanU
1163 case OpARM64GreaterThan:
1164 return OpARM64LessThan
1165 case OpARM64GreaterThanU:
1166 return OpARM64LessThanU
1167 case OpARM64LessEqual:
1168 return OpARM64GreaterEqual
1169 case OpARM64LessEqualU:
1170 return OpARM64GreaterEqualU
1171 case OpARM64GreaterEqual:
1172 return OpARM64LessEqual
1173 case OpARM64GreaterEqualU:
1174 return OpARM64LessEqualU
1175 case OpARM64Equal, OpARM64NotEqual:
1176 return op
1177 case OpARM64LessThanF:
1178 return OpARM64GreaterThanF
1179 case OpARM64GreaterThanF:
1180 return OpARM64LessThanF
1181 case OpARM64LessEqualF:
1182 return OpARM64GreaterEqualF
1183 case OpARM64GreaterEqualF:
1184 return OpARM64LessEqualF
1185 case OpARM64NotLessThanF:
1186 return OpARM64NotGreaterThanF
1187 case OpARM64NotGreaterThanF:
1188 return OpARM64NotLessThanF
1189 case OpARM64NotLessEqualF:
1190 return OpARM64NotGreaterEqualF
1191 case OpARM64NotGreaterEqualF:
1192 return OpARM64NotLessEqualF
1193 default:
1194 panic("unreachable")
1195 }
1196 }
1197
1198
1199
1200
1201 func ccARM64Eval(op Op, flags *Value) int {
1202 fop := flags.Op
1203 if fop == OpARM64InvertFlags {
1204 return -ccARM64Eval(op, flags.Args[0])
1205 }
1206 if fop != OpARM64FlagConstant {
1207 return 0
1208 }
1209 fc := flagConstant(flags.AuxInt)
1210 b2i := func(b bool) int {
1211 if b {
1212 return 1
1213 }
1214 return -1
1215 }
1216 switch op {
1217 case OpARM64Equal:
1218 return b2i(fc.eq())
1219 case OpARM64NotEqual:
1220 return b2i(fc.ne())
1221 case OpARM64LessThan:
1222 return b2i(fc.lt())
1223 case OpARM64LessThanU:
1224 return b2i(fc.ult())
1225 case OpARM64GreaterThan:
1226 return b2i(fc.gt())
1227 case OpARM64GreaterThanU:
1228 return b2i(fc.ugt())
1229 case OpARM64LessEqual:
1230 return b2i(fc.le())
1231 case OpARM64LessEqualU:
1232 return b2i(fc.ule())
1233 case OpARM64GreaterEqual:
1234 return b2i(fc.ge())
1235 case OpARM64GreaterEqualU:
1236 return b2i(fc.uge())
1237 }
1238 return 0
1239 }
1240
1241
1242
1243 func logRule(s string) {
1244 if ruleFile == nil {
1245
1246
1247
1248
1249
1250
1251 w, err := os.OpenFile(filepath.Join(os.Getenv("GOROOT"), "src", "rulelog"),
1252 os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0666)
1253 if err != nil {
1254 panic(err)
1255 }
1256 ruleFile = w
1257 }
1258 _, err := fmt.Fprintln(ruleFile, s)
1259 if err != nil {
1260 panic(err)
1261 }
1262 }
1263
1264 var ruleFile io.Writer
1265
1266 func isConstZero(v *Value) bool {
1267 switch v.Op {
1268 case OpConstNil:
1269 return true
1270 case OpConst64, OpConst32, OpConst16, OpConst8, OpConstBool, OpConst32F, OpConst64F:
1271 return v.AuxInt == 0
1272 case OpStringMake, OpIMake, OpComplexMake:
1273 return isConstZero(v.Args[0]) && isConstZero(v.Args[1])
1274 case OpSliceMake:
1275 return isConstZero(v.Args[0]) && isConstZero(v.Args[1]) && isConstZero(v.Args[2])
1276 case OpStringPtr, OpStringLen, OpSlicePtr, OpSliceLen, OpSliceCap, OpITab, OpIData, OpComplexReal, OpComplexImag:
1277 return isConstZero(v.Args[0])
1278 }
1279 return false
1280 }
1281
1282
1283 func reciprocalExact64(c float64) bool {
1284 b := math.Float64bits(c)
1285 man := b & (1<<52 - 1)
1286 if man != 0 {
1287 return false
1288 }
1289 exp := b >> 52 & (1<<11 - 1)
1290
1291
1292 switch exp {
1293 case 0:
1294 return false
1295 case 0x7ff:
1296 return false
1297 case 0x7fe:
1298 return false
1299 default:
1300 return true
1301 }
1302 }
1303
1304
1305 func reciprocalExact32(c float32) bool {
1306 b := math.Float32bits(c)
1307 man := b & (1<<23 - 1)
1308 if man != 0 {
1309 return false
1310 }
1311 exp := b >> 23 & (1<<8 - 1)
1312
1313
1314 switch exp {
1315 case 0:
1316 return false
1317 case 0xff:
1318 return false
1319 case 0xfe:
1320 return false
1321 default:
1322 return true
1323 }
1324 }
1325
1326
1327 func isARMImmRot(v uint32) bool {
1328 for i := 0; i < 16; i++ {
1329 if v&^0xff == 0 {
1330 return true
1331 }
1332 v = v<<2 | v>>30
1333 }
1334
1335 return false
1336 }
1337
1338
1339
1340 func overlap(offset1, size1, offset2, size2 int64) bool {
1341 if offset1 >= offset2 && offset2+size2 > offset1 {
1342 return true
1343 }
1344 if offset2 >= offset1 && offset1+size1 > offset2 {
1345 return true
1346 }
1347 return false
1348 }
1349
1350
1351
1352
1353 func zeroUpper32Bits(x *Value, depth int) bool {
1354 if x.Type.IsSigned() && x.Type.Size() < 8 {
1355
1356
1357 return false
1358 }
1359 switch x.Op {
1360 case OpAMD64MOVLconst, OpAMD64MOVLload, OpAMD64MOVLQZX, OpAMD64MOVLloadidx1,
1361 OpAMD64MOVWload, OpAMD64MOVWloadidx1, OpAMD64MOVBload, OpAMD64MOVBloadidx1,
1362 OpAMD64MOVLloadidx4, OpAMD64ADDLload, OpAMD64SUBLload, OpAMD64ANDLload,
1363 OpAMD64ORLload, OpAMD64XORLload, OpAMD64CVTTSD2SL,
1364 OpAMD64ADDL, OpAMD64ADDLconst, OpAMD64SUBL, OpAMD64SUBLconst,
1365 OpAMD64ANDL, OpAMD64ANDLconst, OpAMD64ORL, OpAMD64ORLconst,
1366 OpAMD64XORL, OpAMD64XORLconst, OpAMD64NEGL, OpAMD64NOTL,
1367 OpAMD64SHRL, OpAMD64SHRLconst, OpAMD64SARL, OpAMD64SARLconst,
1368 OpAMD64SHLL, OpAMD64SHLLconst:
1369 return true
1370 case OpARM64REV16W, OpARM64REVW, OpARM64RBITW, OpARM64CLZW, OpARM64EXTRWconst,
1371 OpARM64MULW, OpARM64MNEGW, OpARM64UDIVW, OpARM64DIVW, OpARM64UMODW,
1372 OpARM64MADDW, OpARM64MSUBW, OpARM64RORW, OpARM64RORWconst:
1373 return true
1374 case OpArg:
1375
1376
1377 return x.Type.Size() == 4 && x.Block.Func.Config.arch == "amd64"
1378 case OpPhi, OpSelect0, OpSelect1:
1379
1380
1381 if depth <= 0 {
1382 return false
1383 }
1384 for i := range x.Args {
1385 if !zeroUpper32Bits(x.Args[i], depth-1) {
1386 return false
1387 }
1388 }
1389 return true
1390
1391 }
1392 return false
1393 }
1394
1395
1396 func zeroUpper48Bits(x *Value, depth int) bool {
1397 if x.Type.IsSigned() && x.Type.Size() < 8 {
1398 return false
1399 }
1400 switch x.Op {
1401 case OpAMD64MOVWQZX, OpAMD64MOVWload, OpAMD64MOVWloadidx1, OpAMD64MOVWloadidx2:
1402 return true
1403 case OpArg:
1404 return x.Type.Size() == 2 && x.Block.Func.Config.arch == "amd64"
1405 case OpPhi, OpSelect0, OpSelect1:
1406
1407
1408 if depth <= 0 {
1409 return false
1410 }
1411 for i := range x.Args {
1412 if !zeroUpper48Bits(x.Args[i], depth-1) {
1413 return false
1414 }
1415 }
1416 return true
1417
1418 }
1419 return false
1420 }
1421
1422
1423 func zeroUpper56Bits(x *Value, depth int) bool {
1424 if x.Type.IsSigned() && x.Type.Size() < 8 {
1425 return false
1426 }
1427 switch x.Op {
1428 case OpAMD64MOVBQZX, OpAMD64MOVBload, OpAMD64MOVBloadidx1:
1429 return true
1430 case OpArg:
1431 return x.Type.Size() == 1 && x.Block.Func.Config.arch == "amd64"
1432 case OpPhi, OpSelect0, OpSelect1:
1433
1434
1435 if depth <= 0 {
1436 return false
1437 }
1438 for i := range x.Args {
1439 if !zeroUpper56Bits(x.Args[i], depth-1) {
1440 return false
1441 }
1442 }
1443 return true
1444
1445 }
1446 return false
1447 }
1448
1449 func isInlinableMemclr(c *Config, sz int64) bool {
1450 if sz < 0 {
1451 return false
1452 }
1453
1454
1455 switch c.arch {
1456 case "amd64", "arm64":
1457 return true
1458 case "ppc64le", "ppc64", "loong64":
1459 return sz < 512
1460 }
1461 return false
1462 }
1463
1464
1465
1466
1467
1468
1469 func isInlinableMemmove(dst, src *Value, sz int64, c *Config) bool {
1470
1471
1472
1473
1474 switch c.arch {
1475 case "amd64":
1476 return sz <= 16 || (sz < 1024 && disjoint(dst, sz, src, sz))
1477 case "arm64":
1478 return sz <= 64 || (sz <= 1024 && disjoint(dst, sz, src, sz))
1479 case "386":
1480 return sz <= 8
1481 case "s390x", "ppc64", "ppc64le":
1482 return sz <= 8 || disjoint(dst, sz, src, sz)
1483 case "arm", "loong64", "mips", "mips64", "mipsle", "mips64le":
1484 return sz <= 4
1485 }
1486 return false
1487 }
1488 func IsInlinableMemmove(dst, src *Value, sz int64, c *Config) bool {
1489 return isInlinableMemmove(dst, src, sz, c)
1490 }
1491
1492
1493
1494
1495 func logLargeCopy(v *Value, s int64) bool {
1496 if s < 128 {
1497 return true
1498 }
1499 if logopt.Enabled() {
1500 logopt.LogOpt(v.Pos, "copy", "lower", v.Block.Func.Name, fmt.Sprintf("%d bytes", s))
1501 }
1502 return true
1503 }
1504 func LogLargeCopy(funcName string, pos src.XPos, s int64) {
1505 if s < 128 {
1506 return
1507 }
1508 if logopt.Enabled() {
1509 logopt.LogOpt(pos, "copy", "lower", funcName, fmt.Sprintf("%d bytes", s))
1510 }
1511 }
1512
1513
1514
1515 func hasSmallRotate(c *Config) bool {
1516 switch c.arch {
1517 case "amd64", "386":
1518 return true
1519 default:
1520 return false
1521 }
1522 }
1523
1524 func supportsPPC64PCRel() bool {
1525
1526
1527 return buildcfg.GOPPC64 >= 10 && buildcfg.GOOS == "linux"
1528 }
1529
1530 func newPPC64ShiftAuxInt(sh, mb, me, sz int64) int32 {
1531 if sh < 0 || sh >= sz {
1532 panic("PPC64 shift arg sh out of range")
1533 }
1534 if mb < 0 || mb >= sz {
1535 panic("PPC64 shift arg mb out of range")
1536 }
1537 if me < 0 || me >= sz {
1538 panic("PPC64 shift arg me out of range")
1539 }
1540 return int32(sh<<16 | mb<<8 | me)
1541 }
1542
1543 func GetPPC64Shiftsh(auxint int64) int64 {
1544 return int64(int8(auxint >> 16))
1545 }
1546
1547 func GetPPC64Shiftmb(auxint int64) int64 {
1548 return int64(int8(auxint >> 8))
1549 }
1550
1551
1552
1553
1554
1555
1556
1557
1558
1559
1560 func isPPC64WordRotateMask(v64 int64) bool {
1561
1562 v := uint32(v64)
1563 vp := (v & -v) + v
1564
1565 vn := ^v
1566 vpn := (vn & -vn) + vn
1567 return (v&vp == 0 || vn&vpn == 0) && v != 0
1568 }
1569
1570
1571
1572
1573 func isPPC64WordRotateMaskNonWrapping(v64 int64) bool {
1574
1575 v := uint32(v64)
1576 vp := (v & -v) + v
1577 return (v&vp == 0) && v != 0 && uint64(uint32(v64)) == uint64(v64)
1578 }
1579
1580
1581
1582
1583 func encodePPC64RotateMask(rotate, mask, nbits int64) int64 {
1584 var mb, me, mbn, men int
1585
1586
1587 if mask == 0 || ^mask == 0 || rotate >= nbits {
1588 panic(fmt.Sprintf("invalid PPC64 rotate mask: %x %d %d", uint64(mask), rotate, nbits))
1589 } else if nbits == 32 {
1590 mb = bits.LeadingZeros32(uint32(mask))
1591 me = 32 - bits.TrailingZeros32(uint32(mask))
1592 mbn = bits.LeadingZeros32(^uint32(mask))
1593 men = 32 - bits.TrailingZeros32(^uint32(mask))
1594 } else {
1595 mb = bits.LeadingZeros64(uint64(mask))
1596 me = 64 - bits.TrailingZeros64(uint64(mask))
1597 mbn = bits.LeadingZeros64(^uint64(mask))
1598 men = 64 - bits.TrailingZeros64(^uint64(mask))
1599 }
1600
1601 if mb == 0 && me == int(nbits) {
1602
1603 mb, me = men, mbn
1604 }
1605
1606 return int64(me) | int64(mb<<8) | rotate<<16 | nbits<<24
1607 }
1608
1609
1610
1611
1612
1613
1614 func mergePPC64RLDICLandSRDconst(encoded, s int64) int64 {
1615 mb := s
1616 r := 64 - s
1617
1618 if (encoded>>8)&0xFF < mb {
1619 encoded = (encoded &^ 0xFF00) | mb<<8
1620 }
1621
1622 if (encoded & 0xFF0000) != 0 {
1623 panic("non-zero rotate")
1624 }
1625 return encoded | r<<16
1626 }
1627
1628
1629
1630 func DecodePPC64RotateMask(sauxint int64) (rotate, mb, me int64, mask uint64) {
1631 auxint := uint64(sauxint)
1632 rotate = int64((auxint >> 16) & 0xFF)
1633 mb = int64((auxint >> 8) & 0xFF)
1634 me = int64((auxint >> 0) & 0xFF)
1635 nbits := int64((auxint >> 24) & 0xFF)
1636 mask = ((1 << uint(nbits-mb)) - 1) ^ ((1 << uint(nbits-me)) - 1)
1637 if mb > me {
1638 mask = ^mask
1639 }
1640 if nbits == 32 {
1641 mask = uint64(uint32(mask))
1642 }
1643
1644
1645
1646 me = (me - 1) & (nbits - 1)
1647 return
1648 }
1649
1650
1651
1652
1653 func isPPC64ValidShiftMask(v int64) bool {
1654 if (v != 0) && ((v+1)&v) == 0 {
1655 return true
1656 }
1657 return false
1658 }
1659
1660 func getPPC64ShiftMaskLength(v int64) int64 {
1661 return int64(bits.Len64(uint64(v)))
1662 }
1663
1664
1665
1666 func mergePPC64RShiftMask(m, s, nbits int64) int64 {
1667 smask := uint64((1<<uint(nbits))-1) >> uint(s)
1668 return m & int64(smask)
1669 }
1670
1671
1672 func mergePPC64AndSrwi(m, s int64) int64 {
1673 mask := mergePPC64RShiftMask(m, s, 32)
1674 if !isPPC64WordRotateMask(mask) {
1675 return 0
1676 }
1677 return encodePPC64RotateMask((32-s)&31, mask, 32)
1678 }
1679
1680
1681 func mergePPC64AndSrdi(m, s int64) int64 {
1682 mask := mergePPC64RShiftMask(m, s, 64)
1683
1684
1685 rv := bits.RotateLeft64(0xFFFFFFFF00000000, -int(s))
1686 if rv&uint64(mask) != 0 {
1687 return 0
1688 }
1689 if !isPPC64WordRotateMaskNonWrapping(mask) {
1690 return 0
1691 }
1692 return encodePPC64RotateMask((32-s)&31, mask, 32)
1693 }
1694
1695
1696 func mergePPC64AndSldi(m, s int64) int64 {
1697 mask := -1 << s & m
1698
1699
1700 rv := bits.RotateLeft64(0xFFFFFFFF00000000, int(s))
1701 if rv&uint64(mask) != 0 {
1702 return 0
1703 }
1704 if !isPPC64WordRotateMaskNonWrapping(mask) {
1705 return 0
1706 }
1707 return encodePPC64RotateMask(s&31, mask, 32)
1708 }
1709
1710
1711
1712 func mergePPC64ClrlsldiSrw(sld, srw int64) int64 {
1713 mask_1 := uint64(0xFFFFFFFF >> uint(srw))
1714
1715 mask_2 := uint64(0xFFFFFFFFFFFFFFFF) >> uint(GetPPC64Shiftmb(sld))
1716
1717
1718 mask_3 := (mask_1 & mask_2) << uint(GetPPC64Shiftsh(sld))
1719
1720 r_1 := 32 - srw
1721 r_2 := GetPPC64Shiftsh(sld)
1722 r_3 := (r_1 + r_2) & 31
1723
1724 if uint64(uint32(mask_3)) != mask_3 || mask_3 == 0 {
1725 return 0
1726 }
1727 return encodePPC64RotateMask(r_3, int64(mask_3), 32)
1728 }
1729
1730
1731
1732 func mergePPC64ClrlsldiSrd(sld, srd int64) int64 {
1733 mask_1 := uint64(0xFFFFFFFFFFFFFFFF) >> uint(srd)
1734
1735 mask_2 := uint64(0xFFFFFFFFFFFFFFFF) >> uint(GetPPC64Shiftmb(sld))
1736
1737
1738 mask_3 := (mask_1 & mask_2) << uint(GetPPC64Shiftsh(sld))
1739
1740 r_1 := 64 - srd
1741 r_2 := GetPPC64Shiftsh(sld)
1742 r_3 := (r_1 + r_2) & 63
1743
1744 if uint64(uint32(mask_3)) != mask_3 || mask_3 == 0 {
1745 return 0
1746 }
1747
1748 v1 := bits.RotateLeft64(0xFFFFFFFF00000000, int(r_3))
1749 if v1&mask_3 != 0 {
1750 return 0
1751 }
1752 return encodePPC64RotateMask(r_3&31, int64(mask_3), 32)
1753 }
1754
1755
1756
1757 func mergePPC64ClrlsldiRlwinm(sld int32, rlw int64) int64 {
1758 r_1, _, _, mask_1 := DecodePPC64RotateMask(rlw)
1759
1760 mask_2 := uint64(0xFFFFFFFFFFFFFFFF) >> uint(GetPPC64Shiftmb(int64(sld)))
1761
1762
1763 mask_3 := (mask_1 & mask_2) << uint(GetPPC64Shiftsh(int64(sld)))
1764 r_2 := GetPPC64Shiftsh(int64(sld))
1765 r_3 := (r_1 + r_2) & 31
1766
1767
1768 if !isPPC64WordRotateMask(int64(mask_3)) || uint64(uint32(mask_3)) != mask_3 {
1769 return 0
1770 }
1771 return encodePPC64RotateMask(r_3, int64(mask_3), 32)
1772 }
1773
1774
1775
1776 func mergePPC64AndRlwinm(mask uint32, rlw int64) int64 {
1777 r, _, _, mask_rlw := DecodePPC64RotateMask(rlw)
1778 mask_out := (mask_rlw & uint64(mask))
1779
1780
1781 if !isPPC64WordRotateMask(int64(mask_out)) {
1782 return 0
1783 }
1784 return encodePPC64RotateMask(r, int64(mask_out), 32)
1785 }
1786
1787
1788
1789 func mergePPC64MovwzregRlwinm(rlw int64) int64 {
1790 _, mb, me, _ := DecodePPC64RotateMask(rlw)
1791 if mb > me {
1792 return 0
1793 }
1794 return rlw
1795 }
1796
1797
1798
1799 func mergePPC64RlwinmAnd(rlw int64, mask uint32) int64 {
1800 r, _, _, mask_rlw := DecodePPC64RotateMask(rlw)
1801
1802
1803 r_mask := bits.RotateLeft32(mask, int(r))
1804
1805 mask_out := (mask_rlw & uint64(r_mask))
1806
1807
1808 if !isPPC64WordRotateMask(int64(mask_out)) {
1809 return 0
1810 }
1811 return encodePPC64RotateMask(r, int64(mask_out), 32)
1812 }
1813
1814
1815
1816 func mergePPC64SldiRlwinm(sldi, rlw int64) int64 {
1817 r_1, mb, me, mask_1 := DecodePPC64RotateMask(rlw)
1818 if mb > me || mb < sldi {
1819
1820
1821 return 0
1822 }
1823
1824 mask_3 := mask_1 << sldi
1825 r_3 := (r_1 + sldi) & 31
1826
1827
1828 if uint64(uint32(mask_3)) != mask_3 {
1829 return 0
1830 }
1831 return encodePPC64RotateMask(r_3, int64(mask_3), 32)
1832 }
1833
1834
1835
1836 func mergePPC64SldiSrw(sld, srw int64) int64 {
1837 if sld > srw || srw >= 32 {
1838 return 0
1839 }
1840 mask_r := uint32(0xFFFFFFFF) >> uint(srw)
1841 mask_l := uint32(0xFFFFFFFF) >> uint(sld)
1842 mask := (mask_r & mask_l) << uint(sld)
1843 return encodePPC64RotateMask((32-srw+sld)&31, int64(mask), 32)
1844 }
1845
1846
1847
1848
1849
1850
1851
1852
1853
1854
1855
1856
1857
1858
1859
1860
1861
1862
1863
1864
1865
1866
1867
1868
1869
1870
1871 func convertPPC64OpToOpCC(op *Value) *Value {
1872 ccOpMap := map[Op]Op{
1873 OpPPC64ADD: OpPPC64ADDCC,
1874 OpPPC64ADDconst: OpPPC64ADDCCconst,
1875 OpPPC64AND: OpPPC64ANDCC,
1876 OpPPC64ANDN: OpPPC64ANDNCC,
1877 OpPPC64ANDconst: OpPPC64ANDCCconst,
1878 OpPPC64CNTLZD: OpPPC64CNTLZDCC,
1879 OpPPC64MULHDU: OpPPC64MULHDUCC,
1880 OpPPC64NEG: OpPPC64NEGCC,
1881 OpPPC64NOR: OpPPC64NORCC,
1882 OpPPC64OR: OpPPC64ORCC,
1883 OpPPC64RLDICL: OpPPC64RLDICLCC,
1884 OpPPC64SUB: OpPPC64SUBCC,
1885 OpPPC64XOR: OpPPC64XORCC,
1886 }
1887 b := op.Block
1888 opCC := b.NewValue0I(op.Pos, ccOpMap[op.Op], types.NewTuple(op.Type, types.TypeFlags), op.AuxInt)
1889 opCC.AddArgs(op.Args...)
1890 op.reset(OpSelect0)
1891 op.AddArgs(opCC)
1892 return op
1893 }
1894
1895
1896 func convertPPC64RldiclAndccconst(sauxint int64) int64 {
1897 r, _, _, mask := DecodePPC64RotateMask(sauxint)
1898 if r != 0 || mask&0xFFFF != mask {
1899 return 0
1900 }
1901 return int64(mask)
1902 }
1903
1904
1905 func rotateLeft32(v, rotate int64) int64 {
1906 return int64(bits.RotateLeft32(uint32(v), int(rotate)))
1907 }
1908
1909 func rotateRight64(v, rotate int64) int64 {
1910 return int64(bits.RotateLeft64(uint64(v), int(-rotate)))
1911 }
1912
1913
1914 func armBFAuxInt(lsb, width int64) arm64BitField {
1915 if lsb < 0 || lsb > 63 {
1916 panic("ARM(64) bit field lsb constant out of range")
1917 }
1918 if width < 1 || lsb+width > 64 {
1919 panic("ARM(64) bit field width constant out of range")
1920 }
1921 return arm64BitField(width | lsb<<8)
1922 }
1923
1924
1925 func (bfc arm64BitField) lsb() int64 {
1926 return int64(uint64(bfc) >> 8)
1927 }
1928
1929
1930 func (bfc arm64BitField) width() int64 {
1931 return int64(bfc) & 0xff
1932 }
1933
1934
1935 func isARM64BFMask(lsb, mask, rshift int64) bool {
1936 shiftedMask := int64(uint64(mask) >> uint64(rshift))
1937 return shiftedMask != 0 && isPowerOfTwo(shiftedMask+1) && nto(shiftedMask)+lsb < 64
1938 }
1939
1940
1941 func arm64BFWidth(mask, rshift int64) int64 {
1942 shiftedMask := int64(uint64(mask) >> uint64(rshift))
1943 if shiftedMask == 0 {
1944 panic("ARM64 BF mask is zero")
1945 }
1946 return nto(shiftedMask)
1947 }
1948
1949
1950 func arm64ConditionalParamsAuxInt(cond Op, nzcv uint8) arm64ConditionalParams {
1951 if cond < OpARM64Equal || cond > OpARM64GreaterEqualU {
1952 panic("Wrong conditional operation")
1953 }
1954 if nzcv&0x0f != nzcv {
1955 panic("Wrong value of NZCV flag")
1956 }
1957 return arm64ConditionalParams{cond, nzcv, 0, false}
1958 }
1959
1960
1961 func arm64ConditionalParamsAuxIntWithValue(cond Op, nzcv uint8, value uint8) arm64ConditionalParams {
1962 if value&0x1f != value {
1963 panic("Wrong value of constant")
1964 }
1965 params := arm64ConditionalParamsAuxInt(cond, nzcv)
1966 params.constValue = value
1967 params.ind = true
1968 return params
1969 }
1970
1971
1972 func (condParams arm64ConditionalParams) Cond() Op {
1973 return condParams.cond
1974 }
1975
1976
1977 func (condParams arm64ConditionalParams) Nzcv() int64 {
1978 return int64(condParams.nzcv)
1979 }
1980
1981
1982 func (condParams arm64ConditionalParams) ConstValue() (int64, bool) {
1983 return int64(condParams.constValue), condParams.ind
1984 }
1985
1986
1987
1988
1989 func registerizable(b *Block, typ *types.Type) bool {
1990 if typ.IsPtrShaped() || typ.IsFloat() || typ.IsBoolean() {
1991 return true
1992 }
1993 if typ.IsInteger() {
1994 return typ.Size() <= b.Func.Config.RegSize
1995 }
1996 return false
1997 }
1998
1999
2000 func needRaceCleanup(sym *AuxCall, v *Value) bool {
2001 f := v.Block.Func
2002 if !f.Config.Race {
2003 return false
2004 }
2005 if !isSameCall(sym, "runtime.racefuncenter") && !isSameCall(sym, "runtime.racefuncexit") {
2006 return false
2007 }
2008 for _, b := range f.Blocks {
2009 for _, v := range b.Values {
2010 switch v.Op {
2011 case OpStaticCall, OpStaticLECall:
2012
2013
2014 s := v.Aux.(*AuxCall).Fn.String()
2015 switch s {
2016 case "runtime.racefuncenter", "runtime.racefuncexit",
2017 "runtime.panicdivide", "runtime.panicwrap",
2018 "runtime.panicshift":
2019 continue
2020 }
2021
2022
2023 return false
2024 case OpPanicBounds, OpPanicExtend:
2025
2026 case OpClosureCall, OpInterCall, OpClosureLECall, OpInterLECall:
2027
2028 return false
2029 }
2030 }
2031 }
2032 if isSameCall(sym, "runtime.racefuncenter") {
2033
2034
2035 if v.Args[0].Op != OpStore {
2036 if v.Op == OpStaticLECall {
2037
2038 return true
2039 }
2040 return false
2041 }
2042 mem := v.Args[0].Args[2]
2043 v.Args[0].reset(OpCopy)
2044 v.Args[0].AddArg(mem)
2045 }
2046 return true
2047 }
2048
2049
2050 func symIsRO(sym Sym) bool {
2051 lsym := sym.(*obj.LSym)
2052 return lsym.Type == objabi.SRODATA && len(lsym.R) == 0
2053 }
2054
2055
2056 func symIsROZero(sym Sym) bool {
2057 lsym := sym.(*obj.LSym)
2058 if lsym.Type != objabi.SRODATA || len(lsym.R) != 0 {
2059 return false
2060 }
2061 for _, b := range lsym.P {
2062 if b != 0 {
2063 return false
2064 }
2065 }
2066 return true
2067 }
2068
2069
2070
2071 func isFixedLoad(v *Value, sym Sym, off int64) bool {
2072 lsym := sym.(*obj.LSym)
2073 if (v.Type.IsPtrShaped() || v.Type.IsUintptr()) && lsym.Type == objabi.SRODATA {
2074 for _, r := range lsym.R {
2075 if (r.Type == objabi.R_ADDR || r.Type == objabi.R_WEAKADDR) && int64(r.Off) == off && r.Add == 0 {
2076 return true
2077 }
2078 }
2079 return false
2080 }
2081
2082 if ti := lsym.TypeInfo(); ti != nil {
2083
2084
2085
2086
2087 t := ti.Type.(*types.Type)
2088
2089 for _, f := range rttype.Type.Fields() {
2090 if f.Offset == off && copyCompatibleType(v.Type, f.Type) {
2091 switch f.Sym.Name {
2092 case "Size_", "PtrBytes", "Hash", "Kind_", "GCData":
2093 return true
2094 default:
2095
2096 return false
2097 }
2098 }
2099 }
2100
2101 if t.IsPtr() && off == rttype.PtrType.OffsetOf("Elem") {
2102 return true
2103 }
2104
2105 return false
2106 }
2107
2108 return false
2109 }
2110
2111
2112 func rewriteFixedLoad(v *Value, sym Sym, sb *Value, off int64) *Value {
2113 b := v.Block
2114 f := b.Func
2115
2116 lsym := sym.(*obj.LSym)
2117 if (v.Type.IsPtrShaped() || v.Type.IsUintptr()) && lsym.Type == objabi.SRODATA {
2118 for _, r := range lsym.R {
2119 if (r.Type == objabi.R_ADDR || r.Type == objabi.R_WEAKADDR) && int64(r.Off) == off && r.Add == 0 {
2120 if strings.HasPrefix(r.Sym.Name, "type:") {
2121
2122
2123
2124
2125
2126 reflectdata.MarkTypeSymUsedInInterface(r.Sym, f.fe.Func().Linksym())
2127 } else if strings.HasPrefix(r.Sym.Name, "go:itab") {
2128
2129
2130 reflectdata.MarkTypeSymUsedInInterface(r.Sym, f.fe.Func().Linksym())
2131 }
2132 v.reset(OpAddr)
2133 v.Aux = symToAux(r.Sym)
2134 v.AddArg(sb)
2135 return v
2136 }
2137 }
2138 base.Fatalf("fixedLoad data not known for %s:%d", sym, off)
2139 }
2140
2141 if ti := lsym.TypeInfo(); ti != nil {
2142
2143
2144
2145
2146 t := ti.Type.(*types.Type)
2147
2148 ptrSizedOpConst := OpConst64
2149 if f.Config.PtrSize == 4 {
2150 ptrSizedOpConst = OpConst32
2151 }
2152
2153 for _, f := range rttype.Type.Fields() {
2154 if f.Offset == off && copyCompatibleType(v.Type, f.Type) {
2155 switch f.Sym.Name {
2156 case "Size_":
2157 v.reset(ptrSizedOpConst)
2158 v.AuxInt = t.Size()
2159 return v
2160 case "PtrBytes":
2161 v.reset(ptrSizedOpConst)
2162 v.AuxInt = types.PtrDataSize(t)
2163 return v
2164 case "Hash":
2165 v.reset(OpConst32)
2166 v.AuxInt = int64(types.TypeHash(t))
2167 return v
2168 case "Kind_":
2169 v.reset(OpConst8)
2170 v.AuxInt = int64(reflectdata.ABIKindOfType(t))
2171 return v
2172 case "GCData":
2173 gcdata, _ := reflectdata.GCSym(t, true)
2174 v.reset(OpAddr)
2175 v.Aux = symToAux(gcdata)
2176 v.AddArg(sb)
2177 return v
2178 default:
2179 base.Fatalf("unknown field %s for fixedLoad of %s at offset %d", f.Sym.Name, lsym.Name, off)
2180 }
2181 }
2182 }
2183
2184 if t.IsPtr() && off == rttype.PtrType.OffsetOf("Elem") {
2185 elemSym := reflectdata.TypeLinksym(t.Elem())
2186 reflectdata.MarkTypeSymUsedInInterface(elemSym, f.fe.Func().Linksym())
2187 v.reset(OpAddr)
2188 v.Aux = symToAux(elemSym)
2189 v.AddArg(sb)
2190 return v
2191 }
2192
2193 base.Fatalf("fixedLoad data not known for %s:%d", sym, off)
2194 }
2195
2196 base.Fatalf("fixedLoad data not known for %s:%d", sym, off)
2197 return nil
2198 }
2199
2200
2201 func read8(sym Sym, off int64) uint8 {
2202 lsym := sym.(*obj.LSym)
2203 if off >= int64(len(lsym.P)) || off < 0 {
2204
2205
2206
2207
2208 return 0
2209 }
2210 return lsym.P[off]
2211 }
2212
2213
2214 func read16(sym Sym, off int64, byteorder binary.ByteOrder) uint16 {
2215 lsym := sym.(*obj.LSym)
2216
2217
2218 var src []byte
2219 if 0 <= off && off < int64(len(lsym.P)) {
2220 src = lsym.P[off:]
2221 }
2222 buf := make([]byte, 2)
2223 copy(buf, src)
2224 return byteorder.Uint16(buf)
2225 }
2226
2227
2228 func read32(sym Sym, off int64, byteorder binary.ByteOrder) uint32 {
2229 lsym := sym.(*obj.LSym)
2230 var src []byte
2231 if 0 <= off && off < int64(len(lsym.P)) {
2232 src = lsym.P[off:]
2233 }
2234 buf := make([]byte, 4)
2235 copy(buf, src)
2236 return byteorder.Uint32(buf)
2237 }
2238
2239
2240 func read64(sym Sym, off int64, byteorder binary.ByteOrder) uint64 {
2241 lsym := sym.(*obj.LSym)
2242 var src []byte
2243 if 0 <= off && off < int64(len(lsym.P)) {
2244 src = lsym.P[off:]
2245 }
2246 buf := make([]byte, 8)
2247 copy(buf, src)
2248 return byteorder.Uint64(buf)
2249 }
2250
2251
2252 func sequentialAddresses(x, y *Value, n int64) bool {
2253 if x == y && n == 0 {
2254 return true
2255 }
2256 if x.Op == Op386ADDL && y.Op == Op386LEAL1 && y.AuxInt == n && y.Aux == nil &&
2257 (x.Args[0] == y.Args[0] && x.Args[1] == y.Args[1] ||
2258 x.Args[0] == y.Args[1] && x.Args[1] == y.Args[0]) {
2259 return true
2260 }
2261 if x.Op == Op386LEAL1 && y.Op == Op386LEAL1 && y.AuxInt == x.AuxInt+n && x.Aux == y.Aux &&
2262 (x.Args[0] == y.Args[0] && x.Args[1] == y.Args[1] ||
2263 x.Args[0] == y.Args[1] && x.Args[1] == y.Args[0]) {
2264 return true
2265 }
2266 if x.Op == OpAMD64ADDQ && y.Op == OpAMD64LEAQ1 && y.AuxInt == n && y.Aux == nil &&
2267 (x.Args[0] == y.Args[0] && x.Args[1] == y.Args[1] ||
2268 x.Args[0] == y.Args[1] && x.Args[1] == y.Args[0]) {
2269 return true
2270 }
2271 if x.Op == OpAMD64LEAQ1 && y.Op == OpAMD64LEAQ1 && y.AuxInt == x.AuxInt+n && x.Aux == y.Aux &&
2272 (x.Args[0] == y.Args[0] && x.Args[1] == y.Args[1] ||
2273 x.Args[0] == y.Args[1] && x.Args[1] == y.Args[0]) {
2274 return true
2275 }
2276 return false
2277 }
2278
2279
2280
2281
2282
2283
2284
2285
2286
2287
2288
2289
2290
2291 type flagConstant uint8
2292
2293
2294 func (fc flagConstant) N() bool {
2295 return fc&1 != 0
2296 }
2297
2298
2299 func (fc flagConstant) Z() bool {
2300 return fc&2 != 0
2301 }
2302
2303
2304
2305 func (fc flagConstant) C() bool {
2306 return fc&4 != 0
2307 }
2308
2309
2310 func (fc flagConstant) V() bool {
2311 return fc&8 != 0
2312 }
2313
2314 func (fc flagConstant) eq() bool {
2315 return fc.Z()
2316 }
2317 func (fc flagConstant) ne() bool {
2318 return !fc.Z()
2319 }
2320 func (fc flagConstant) lt() bool {
2321 return fc.N() != fc.V()
2322 }
2323 func (fc flagConstant) le() bool {
2324 return fc.Z() || fc.lt()
2325 }
2326 func (fc flagConstant) gt() bool {
2327 return !fc.Z() && fc.ge()
2328 }
2329 func (fc flagConstant) ge() bool {
2330 return fc.N() == fc.V()
2331 }
2332 func (fc flagConstant) ult() bool {
2333 return !fc.C()
2334 }
2335 func (fc flagConstant) ule() bool {
2336 return fc.Z() || fc.ult()
2337 }
2338 func (fc flagConstant) ugt() bool {
2339 return !fc.Z() && fc.uge()
2340 }
2341 func (fc flagConstant) uge() bool {
2342 return fc.C()
2343 }
2344
2345 func (fc flagConstant) ltNoov() bool {
2346 return fc.lt() && !fc.V()
2347 }
2348 func (fc flagConstant) leNoov() bool {
2349 return fc.le() && !fc.V()
2350 }
2351 func (fc flagConstant) gtNoov() bool {
2352 return fc.gt() && !fc.V()
2353 }
2354 func (fc flagConstant) geNoov() bool {
2355 return fc.ge() && !fc.V()
2356 }
2357
2358 func (fc flagConstant) String() string {
2359 return fmt.Sprintf("N=%v,Z=%v,C=%v,V=%v", fc.N(), fc.Z(), fc.C(), fc.V())
2360 }
2361
2362 type flagConstantBuilder struct {
2363 N bool
2364 Z bool
2365 C bool
2366 V bool
2367 }
2368
2369 func (fcs flagConstantBuilder) encode() flagConstant {
2370 var fc flagConstant
2371 if fcs.N {
2372 fc |= 1
2373 }
2374 if fcs.Z {
2375 fc |= 2
2376 }
2377 if fcs.C {
2378 fc |= 4
2379 }
2380 if fcs.V {
2381 fc |= 8
2382 }
2383 return fc
2384 }
2385
2386
2387
2388
2389
2390
2391 func addFlags64(x, y int64) flagConstant {
2392 var fcb flagConstantBuilder
2393 fcb.Z = x+y == 0
2394 fcb.N = x+y < 0
2395 fcb.C = uint64(x+y) < uint64(x)
2396 fcb.V = x >= 0 && y >= 0 && x+y < 0 || x < 0 && y < 0 && x+y >= 0
2397 return fcb.encode()
2398 }
2399
2400
2401 func subFlags64(x, y int64) flagConstant {
2402 var fcb flagConstantBuilder
2403 fcb.Z = x-y == 0
2404 fcb.N = x-y < 0
2405 fcb.C = uint64(y) <= uint64(x)
2406 fcb.V = x >= 0 && y < 0 && x-y < 0 || x < 0 && y >= 0 && x-y >= 0
2407 return fcb.encode()
2408 }
2409
2410
2411 func addFlags32(x, y int32) flagConstant {
2412 var fcb flagConstantBuilder
2413 fcb.Z = x+y == 0
2414 fcb.N = x+y < 0
2415 fcb.C = uint32(x+y) < uint32(x)
2416 fcb.V = x >= 0 && y >= 0 && x+y < 0 || x < 0 && y < 0 && x+y >= 0
2417 return fcb.encode()
2418 }
2419
2420
2421 func subFlags32(x, y int32) flagConstant {
2422 var fcb flagConstantBuilder
2423 fcb.Z = x-y == 0
2424 fcb.N = x-y < 0
2425 fcb.C = uint32(y) <= uint32(x)
2426 fcb.V = x >= 0 && y < 0 && x-y < 0 || x < 0 && y >= 0 && x-y >= 0
2427 return fcb.encode()
2428 }
2429
2430
2431
2432 func logicFlags64(x int64) flagConstant {
2433 var fcb flagConstantBuilder
2434 fcb.Z = x == 0
2435 fcb.N = x < 0
2436 return fcb.encode()
2437 }
2438
2439
2440
2441 func logicFlags32(x int32) flagConstant {
2442 var fcb flagConstantBuilder
2443 fcb.Z = x == 0
2444 fcb.N = x < 0
2445 return fcb.encode()
2446 }
2447
2448 func makeJumpTableSym(b *Block) *obj.LSym {
2449 s := base.Ctxt.Lookup(fmt.Sprintf("%s.jump%d", b.Func.fe.Func().LSym.Name, b.ID))
2450
2451 s.Set(obj.AttrStatic, true)
2452 return s
2453 }
2454
2455
2456
2457 func canRotate(c *Config, bits int64) bool {
2458 if bits > c.PtrSize*8 {
2459
2460 return false
2461 }
2462 switch c.arch {
2463 case "386", "amd64", "arm64", "loong64", "riscv64":
2464 return true
2465 case "arm", "s390x", "ppc64", "ppc64le", "wasm":
2466 return bits >= 32
2467 default:
2468 return false
2469 }
2470 }
2471
2472
2473 func isARM64bitcon(x uint64) bool {
2474 if x == 1<<64-1 || x == 0 {
2475 return false
2476 }
2477
2478 switch {
2479 case x != x>>32|x<<32:
2480
2481
2482 case x != x>>16|x<<48:
2483
2484 x = uint64(int64(int32(x)))
2485 case x != x>>8|x<<56:
2486
2487 x = uint64(int64(int16(x)))
2488 case x != x>>4|x<<60:
2489
2490 x = uint64(int64(int8(x)))
2491 default:
2492
2493
2494
2495
2496
2497 return true
2498 }
2499 return sequenceOfOnes(x) || sequenceOfOnes(^x)
2500 }
2501
2502
2503 func sequenceOfOnes(x uint64) bool {
2504 y := x & -x
2505 y += x
2506 return (y-1)&y == 0
2507 }
2508
2509
2510 func isARM64addcon(v int64) bool {
2511
2512 if v < 0 {
2513 return false
2514 }
2515 if (v & 0xFFF) == 0 {
2516 v >>= 12
2517 }
2518 return v <= 0xFFF
2519 }
2520
2521
2522
2523
2524 func setPos(v *Value, pos src.XPos) bool {
2525 v.Pos = pos
2526 return true
2527 }
2528
2529
2530
2531
2532 func isNonNegative(v *Value) bool {
2533 if !v.Type.IsInteger() {
2534 v.Fatalf("isNonNegative bad type: %v", v.Type)
2535 }
2536
2537
2538
2539
2540 switch v.Op {
2541 case OpConst64:
2542 return v.AuxInt >= 0
2543
2544 case OpConst32:
2545 return int32(v.AuxInt) >= 0
2546
2547 case OpConst16:
2548 return int16(v.AuxInt) >= 0
2549
2550 case OpConst8:
2551 return int8(v.AuxInt) >= 0
2552
2553 case OpStringLen, OpSliceLen, OpSliceCap,
2554 OpZeroExt8to64, OpZeroExt16to64, OpZeroExt32to64,
2555 OpZeroExt8to32, OpZeroExt16to32, OpZeroExt8to16,
2556 OpCtz64, OpCtz32, OpCtz16, OpCtz8,
2557 OpCtz64NonZero, OpCtz32NonZero, OpCtz16NonZero, OpCtz8NonZero,
2558 OpBitLen64, OpBitLen32, OpBitLen16, OpBitLen8:
2559 return true
2560
2561 case OpRsh64Ux64, OpRsh32Ux64:
2562 by := v.Args[1]
2563 return by.Op == OpConst64 && by.AuxInt > 0
2564
2565 case OpRsh64x64, OpRsh32x64, OpRsh8x64, OpRsh16x64, OpRsh32x32, OpRsh64x32,
2566 OpSignExt32to64, OpSignExt16to64, OpSignExt8to64, OpSignExt16to32, OpSignExt8to32:
2567 return isNonNegative(v.Args[0])
2568
2569 case OpAnd64, OpAnd32, OpAnd16, OpAnd8:
2570 return isNonNegative(v.Args[0]) || isNonNegative(v.Args[1])
2571
2572 case OpMod64, OpMod32, OpMod16, OpMod8,
2573 OpDiv64, OpDiv32, OpDiv16, OpDiv8,
2574 OpOr64, OpOr32, OpOr16, OpOr8,
2575 OpXor64, OpXor32, OpXor16, OpXor8:
2576 return isNonNegative(v.Args[0]) && isNonNegative(v.Args[1])
2577
2578
2579
2580 }
2581 return false
2582 }
2583
2584 func rewriteStructLoad(v *Value) *Value {
2585 b := v.Block
2586 ptr := v.Args[0]
2587 mem := v.Args[1]
2588
2589 t := v.Type
2590 args := make([]*Value, t.NumFields())
2591 for i := range args {
2592 ft := t.FieldType(i)
2593 addr := b.NewValue1I(v.Pos, OpOffPtr, ft.PtrTo(), t.FieldOff(i), ptr)
2594 args[i] = b.NewValue2(v.Pos, OpLoad, ft, addr, mem)
2595 }
2596
2597 v.reset(OpStructMake)
2598 v.AddArgs(args...)
2599 return v
2600 }
2601
2602 func rewriteStructStore(v *Value) *Value {
2603 b := v.Block
2604 dst := v.Args[0]
2605 x := v.Args[1]
2606 if x.Op != OpStructMake {
2607 base.Fatalf("invalid struct store: %v", x)
2608 }
2609 mem := v.Args[2]
2610
2611 t := x.Type
2612 for i, arg := range x.Args {
2613 ft := t.FieldType(i)
2614
2615 addr := b.NewValue1I(v.Pos, OpOffPtr, ft.PtrTo(), t.FieldOff(i), dst)
2616 mem = b.NewValue3A(v.Pos, OpStore, types.TypeMem, typeToAux(ft), addr, arg, mem)
2617 }
2618
2619 return mem
2620 }
2621
2622
2623
2624
2625 func isDirectType(v *Value) bool {
2626 return isDirectType1(v)
2627 }
2628
2629
2630 func isDirectType1(v *Value) bool {
2631 switch v.Op {
2632 case OpITab:
2633 return isDirectType2(v.Args[0])
2634 case OpAddr:
2635 lsym := v.Aux.(*obj.LSym)
2636 if ti := lsym.TypeInfo(); ti != nil {
2637 return types.IsDirectIface(ti.Type.(*types.Type))
2638 }
2639 }
2640 return false
2641 }
2642
2643
2644 func isDirectType2(v *Value) bool {
2645 switch v.Op {
2646 case OpIMake:
2647 return isDirectType1(v.Args[0])
2648 }
2649 return false
2650 }
2651
2652
2653
2654
2655 func isDirectIface(v *Value) bool {
2656 return isDirectIface1(v, 9)
2657 }
2658
2659
2660 func isDirectIface1(v *Value, depth int) bool {
2661 if depth == 0 {
2662 return false
2663 }
2664 switch v.Op {
2665 case OpITab:
2666 return isDirectIface2(v.Args[0], depth-1)
2667 case OpAddr:
2668 lsym := v.Aux.(*obj.LSym)
2669 if ii := lsym.ItabInfo(); ii != nil {
2670 return types.IsDirectIface(ii.Type.(*types.Type))
2671 }
2672 case OpConstNil:
2673
2674
2675 return true
2676 }
2677 return false
2678 }
2679
2680
2681 func isDirectIface2(v *Value, depth int) bool {
2682 if depth == 0 {
2683 return false
2684 }
2685 switch v.Op {
2686 case OpIMake:
2687 return isDirectIface1(v.Args[0], depth-1)
2688 case OpPhi:
2689 for _, a := range v.Args {
2690 if !isDirectIface2(a, depth-1) {
2691 return false
2692 }
2693 }
2694 return true
2695 }
2696 return false
2697 }
2698
2699 func bitsAdd64(x, y, carry int64) (r struct{ sum, carry int64 }) {
2700 s, c := bits.Add64(uint64(x), uint64(y), uint64(carry))
2701 r.sum, r.carry = int64(s), int64(c)
2702 return
2703 }
2704
2705 func bitsMulU64(x, y int64) (r struct{ hi, lo int64 }) {
2706 hi, lo := bits.Mul64(uint64(x), uint64(y))
2707 r.hi, r.lo = int64(hi), int64(lo)
2708 return
2709 }
2710 func bitsMulU32(x, y int32) (r struct{ hi, lo int32 }) {
2711 hi, lo := bits.Mul32(uint32(x), uint32(y))
2712 r.hi, r.lo = int32(hi), int32(lo)
2713 return
2714 }
2715
2716
2717 func flagify(v *Value) bool {
2718 var flagVersion Op
2719 switch v.Op {
2720 case OpAMD64ADDQconst:
2721 flagVersion = OpAMD64ADDQconstflags
2722 case OpAMD64ADDLconst:
2723 flagVersion = OpAMD64ADDLconstflags
2724 default:
2725 base.Fatalf("can't flagify op %s", v.Op)
2726 }
2727 inner := v.copyInto(v.Block)
2728 inner.Op = flagVersion
2729 inner.Type = types.NewTuple(v.Type, types.TypeFlags)
2730 v.reset(OpSelect0)
2731 v.AddArg(inner)
2732 return true
2733 }
2734
2735
2736 type PanicBoundsC struct {
2737 C int64
2738 }
2739
2740
2741 type PanicBoundsCC struct {
2742 Cx int64
2743 Cy int64
2744 }
2745
2746 func (p PanicBoundsC) CanBeAnSSAAux() {
2747 }
2748 func (p PanicBoundsCC) CanBeAnSSAAux() {
2749 }
2750
2751 func auxToPanicBoundsC(i Aux) PanicBoundsC {
2752 return i.(PanicBoundsC)
2753 }
2754 func auxToPanicBoundsCC(i Aux) PanicBoundsCC {
2755 return i.(PanicBoundsCC)
2756 }
2757 func panicBoundsCToAux(p PanicBoundsC) Aux {
2758 return p
2759 }
2760 func panicBoundsCCToAux(p PanicBoundsCC) Aux {
2761 return p
2762 }
2763
2764 func isDictArgSym(sym Sym) bool {
2765 return sym.(*ir.Name).Sym().Name == typecheck.LocalDictName
2766 }
2767
View as plain text