Source file src/cmd/compile/internal/ssa/rewriteRISCV64.go

     1  // Code generated from _gen/RISCV64.rules using 'go generate'; DO NOT EDIT.
     2  
     3  package ssa
     4  
     5  import "internal/buildcfg"
     6  import "math"
     7  import "cmd/compile/internal/types"
     8  
     9  func rewriteValueRISCV64(v *Value) bool {
    10  	switch v.Op {
    11  	case OpAbs:
    12  		v.Op = OpRISCV64FABSD
    13  		return true
    14  	case OpAdd16:
    15  		v.Op = OpRISCV64ADD
    16  		return true
    17  	case OpAdd32:
    18  		v.Op = OpRISCV64ADD
    19  		return true
    20  	case OpAdd32F:
    21  		v.Op = OpRISCV64FADDS
    22  		return true
    23  	case OpAdd64:
    24  		v.Op = OpRISCV64ADD
    25  		return true
    26  	case OpAdd64F:
    27  		v.Op = OpRISCV64FADDD
    28  		return true
    29  	case OpAdd8:
    30  		v.Op = OpRISCV64ADD
    31  		return true
    32  	case OpAddPtr:
    33  		v.Op = OpRISCV64ADD
    34  		return true
    35  	case OpAddr:
    36  		return rewriteValueRISCV64_OpAddr(v)
    37  	case OpAnd16:
    38  		v.Op = OpRISCV64AND
    39  		return true
    40  	case OpAnd32:
    41  		v.Op = OpRISCV64AND
    42  		return true
    43  	case OpAnd64:
    44  		v.Op = OpRISCV64AND
    45  		return true
    46  	case OpAnd8:
    47  		v.Op = OpRISCV64AND
    48  		return true
    49  	case OpAndB:
    50  		v.Op = OpRISCV64AND
    51  		return true
    52  	case OpAtomicAdd32:
    53  		v.Op = OpRISCV64LoweredAtomicAdd32
    54  		return true
    55  	case OpAtomicAdd64:
    56  		v.Op = OpRISCV64LoweredAtomicAdd64
    57  		return true
    58  	case OpAtomicAnd32:
    59  		v.Op = OpRISCV64LoweredAtomicAnd32
    60  		return true
    61  	case OpAtomicAnd8:
    62  		return rewriteValueRISCV64_OpAtomicAnd8(v)
    63  	case OpAtomicCompareAndSwap32:
    64  		return rewriteValueRISCV64_OpAtomicCompareAndSwap32(v)
    65  	case OpAtomicCompareAndSwap64:
    66  		v.Op = OpRISCV64LoweredAtomicCas64
    67  		return true
    68  	case OpAtomicExchange32:
    69  		v.Op = OpRISCV64LoweredAtomicExchange32
    70  		return true
    71  	case OpAtomicExchange64:
    72  		v.Op = OpRISCV64LoweredAtomicExchange64
    73  		return true
    74  	case OpAtomicLoad32:
    75  		v.Op = OpRISCV64LoweredAtomicLoad32
    76  		return true
    77  	case OpAtomicLoad64:
    78  		v.Op = OpRISCV64LoweredAtomicLoad64
    79  		return true
    80  	case OpAtomicLoad8:
    81  		v.Op = OpRISCV64LoweredAtomicLoad8
    82  		return true
    83  	case OpAtomicLoadPtr:
    84  		v.Op = OpRISCV64LoweredAtomicLoad64
    85  		return true
    86  	case OpAtomicOr32:
    87  		v.Op = OpRISCV64LoweredAtomicOr32
    88  		return true
    89  	case OpAtomicOr8:
    90  		return rewriteValueRISCV64_OpAtomicOr8(v)
    91  	case OpAtomicStore32:
    92  		v.Op = OpRISCV64LoweredAtomicStore32
    93  		return true
    94  	case OpAtomicStore64:
    95  		v.Op = OpRISCV64LoweredAtomicStore64
    96  		return true
    97  	case OpAtomicStore8:
    98  		v.Op = OpRISCV64LoweredAtomicStore8
    99  		return true
   100  	case OpAtomicStorePtrNoWB:
   101  		v.Op = OpRISCV64LoweredAtomicStore64
   102  		return true
   103  	case OpAvg64u:
   104  		return rewriteValueRISCV64_OpAvg64u(v)
   105  	case OpBitLen16:
   106  		return rewriteValueRISCV64_OpBitLen16(v)
   107  	case OpBitLen32:
   108  		return rewriteValueRISCV64_OpBitLen32(v)
   109  	case OpBitLen64:
   110  		return rewriteValueRISCV64_OpBitLen64(v)
   111  	case OpBitLen8:
   112  		return rewriteValueRISCV64_OpBitLen8(v)
   113  	case OpBswap16:
   114  		return rewriteValueRISCV64_OpBswap16(v)
   115  	case OpBswap32:
   116  		return rewriteValueRISCV64_OpBswap32(v)
   117  	case OpBswap64:
   118  		v.Op = OpRISCV64REV8
   119  		return true
   120  	case OpClosureCall:
   121  		v.Op = OpRISCV64CALLclosure
   122  		return true
   123  	case OpCom16:
   124  		v.Op = OpRISCV64NOT
   125  		return true
   126  	case OpCom32:
   127  		v.Op = OpRISCV64NOT
   128  		return true
   129  	case OpCom64:
   130  		v.Op = OpRISCV64NOT
   131  		return true
   132  	case OpCom8:
   133  		v.Op = OpRISCV64NOT
   134  		return true
   135  	case OpConst16:
   136  		return rewriteValueRISCV64_OpConst16(v)
   137  	case OpConst32:
   138  		return rewriteValueRISCV64_OpConst32(v)
   139  	case OpConst32F:
   140  		return rewriteValueRISCV64_OpConst32F(v)
   141  	case OpConst64:
   142  		return rewriteValueRISCV64_OpConst64(v)
   143  	case OpConst64F:
   144  		return rewriteValueRISCV64_OpConst64F(v)
   145  	case OpConst8:
   146  		return rewriteValueRISCV64_OpConst8(v)
   147  	case OpConstBool:
   148  		return rewriteValueRISCV64_OpConstBool(v)
   149  	case OpConstNil:
   150  		return rewriteValueRISCV64_OpConstNil(v)
   151  	case OpCopysign:
   152  		v.Op = OpRISCV64FSGNJD
   153  		return true
   154  	case OpCtz16:
   155  		return rewriteValueRISCV64_OpCtz16(v)
   156  	case OpCtz16NonZero:
   157  		v.Op = OpCtz64
   158  		return true
   159  	case OpCtz32:
   160  		v.Op = OpRISCV64CTZW
   161  		return true
   162  	case OpCtz32NonZero:
   163  		v.Op = OpCtz64
   164  		return true
   165  	case OpCtz64:
   166  		v.Op = OpRISCV64CTZ
   167  		return true
   168  	case OpCtz64NonZero:
   169  		v.Op = OpCtz64
   170  		return true
   171  	case OpCtz8:
   172  		return rewriteValueRISCV64_OpCtz8(v)
   173  	case OpCtz8NonZero:
   174  		v.Op = OpCtz64
   175  		return true
   176  	case OpCvt32Fto32:
   177  		v.Op = OpRISCV64FCVTWS
   178  		return true
   179  	case OpCvt32Fto64:
   180  		v.Op = OpRISCV64FCVTLS
   181  		return true
   182  	case OpCvt32Fto64F:
   183  		v.Op = OpRISCV64FCVTDS
   184  		return true
   185  	case OpCvt32to32F:
   186  		v.Op = OpRISCV64FCVTSW
   187  		return true
   188  	case OpCvt32to64F:
   189  		v.Op = OpRISCV64FCVTDW
   190  		return true
   191  	case OpCvt64Fto32:
   192  		v.Op = OpRISCV64FCVTWD
   193  		return true
   194  	case OpCvt64Fto32F:
   195  		v.Op = OpRISCV64FCVTSD
   196  		return true
   197  	case OpCvt64Fto64:
   198  		v.Op = OpRISCV64FCVTLD
   199  		return true
   200  	case OpCvt64to32F:
   201  		v.Op = OpRISCV64FCVTSL
   202  		return true
   203  	case OpCvt64to64F:
   204  		v.Op = OpRISCV64FCVTDL
   205  		return true
   206  	case OpCvtBoolToUint8:
   207  		v.Op = OpCopy
   208  		return true
   209  	case OpDiv16:
   210  		return rewriteValueRISCV64_OpDiv16(v)
   211  	case OpDiv16u:
   212  		return rewriteValueRISCV64_OpDiv16u(v)
   213  	case OpDiv32:
   214  		return rewriteValueRISCV64_OpDiv32(v)
   215  	case OpDiv32F:
   216  		v.Op = OpRISCV64FDIVS
   217  		return true
   218  	case OpDiv32u:
   219  		v.Op = OpRISCV64DIVUW
   220  		return true
   221  	case OpDiv64:
   222  		return rewriteValueRISCV64_OpDiv64(v)
   223  	case OpDiv64F:
   224  		v.Op = OpRISCV64FDIVD
   225  		return true
   226  	case OpDiv64u:
   227  		v.Op = OpRISCV64DIVU
   228  		return true
   229  	case OpDiv8:
   230  		return rewriteValueRISCV64_OpDiv8(v)
   231  	case OpDiv8u:
   232  		return rewriteValueRISCV64_OpDiv8u(v)
   233  	case OpEq16:
   234  		return rewriteValueRISCV64_OpEq16(v)
   235  	case OpEq32:
   236  		return rewriteValueRISCV64_OpEq32(v)
   237  	case OpEq32F:
   238  		v.Op = OpRISCV64FEQS
   239  		return true
   240  	case OpEq64:
   241  		return rewriteValueRISCV64_OpEq64(v)
   242  	case OpEq64F:
   243  		v.Op = OpRISCV64FEQD
   244  		return true
   245  	case OpEq8:
   246  		return rewriteValueRISCV64_OpEq8(v)
   247  	case OpEqB:
   248  		return rewriteValueRISCV64_OpEqB(v)
   249  	case OpEqPtr:
   250  		return rewriteValueRISCV64_OpEqPtr(v)
   251  	case OpFMA:
   252  		v.Op = OpRISCV64FMADDD
   253  		return true
   254  	case OpGetCallerPC:
   255  		v.Op = OpRISCV64LoweredGetCallerPC
   256  		return true
   257  	case OpGetCallerSP:
   258  		v.Op = OpRISCV64LoweredGetCallerSP
   259  		return true
   260  	case OpGetClosurePtr:
   261  		v.Op = OpRISCV64LoweredGetClosurePtr
   262  		return true
   263  	case OpHmul32:
   264  		return rewriteValueRISCV64_OpHmul32(v)
   265  	case OpHmul32u:
   266  		return rewriteValueRISCV64_OpHmul32u(v)
   267  	case OpHmul64:
   268  		v.Op = OpRISCV64MULH
   269  		return true
   270  	case OpHmul64u:
   271  		v.Op = OpRISCV64MULHU
   272  		return true
   273  	case OpInterCall:
   274  		v.Op = OpRISCV64CALLinter
   275  		return true
   276  	case OpIsInBounds:
   277  		v.Op = OpLess64U
   278  		return true
   279  	case OpIsNonNil:
   280  		v.Op = OpRISCV64SNEZ
   281  		return true
   282  	case OpIsSliceInBounds:
   283  		v.Op = OpLeq64U
   284  		return true
   285  	case OpLeq16:
   286  		return rewriteValueRISCV64_OpLeq16(v)
   287  	case OpLeq16U:
   288  		return rewriteValueRISCV64_OpLeq16U(v)
   289  	case OpLeq32:
   290  		return rewriteValueRISCV64_OpLeq32(v)
   291  	case OpLeq32F:
   292  		v.Op = OpRISCV64FLES
   293  		return true
   294  	case OpLeq32U:
   295  		return rewriteValueRISCV64_OpLeq32U(v)
   296  	case OpLeq64:
   297  		return rewriteValueRISCV64_OpLeq64(v)
   298  	case OpLeq64F:
   299  		v.Op = OpRISCV64FLED
   300  		return true
   301  	case OpLeq64U:
   302  		return rewriteValueRISCV64_OpLeq64U(v)
   303  	case OpLeq8:
   304  		return rewriteValueRISCV64_OpLeq8(v)
   305  	case OpLeq8U:
   306  		return rewriteValueRISCV64_OpLeq8U(v)
   307  	case OpLess16:
   308  		return rewriteValueRISCV64_OpLess16(v)
   309  	case OpLess16U:
   310  		return rewriteValueRISCV64_OpLess16U(v)
   311  	case OpLess32:
   312  		return rewriteValueRISCV64_OpLess32(v)
   313  	case OpLess32F:
   314  		v.Op = OpRISCV64FLTS
   315  		return true
   316  	case OpLess32U:
   317  		return rewriteValueRISCV64_OpLess32U(v)
   318  	case OpLess64:
   319  		v.Op = OpRISCV64SLT
   320  		return true
   321  	case OpLess64F:
   322  		v.Op = OpRISCV64FLTD
   323  		return true
   324  	case OpLess64U:
   325  		v.Op = OpRISCV64SLTU
   326  		return true
   327  	case OpLess8:
   328  		return rewriteValueRISCV64_OpLess8(v)
   329  	case OpLess8U:
   330  		return rewriteValueRISCV64_OpLess8U(v)
   331  	case OpLoad:
   332  		return rewriteValueRISCV64_OpLoad(v)
   333  	case OpLocalAddr:
   334  		return rewriteValueRISCV64_OpLocalAddr(v)
   335  	case OpLsh16x16:
   336  		return rewriteValueRISCV64_OpLsh16x16(v)
   337  	case OpLsh16x32:
   338  		return rewriteValueRISCV64_OpLsh16x32(v)
   339  	case OpLsh16x64:
   340  		return rewriteValueRISCV64_OpLsh16x64(v)
   341  	case OpLsh16x8:
   342  		return rewriteValueRISCV64_OpLsh16x8(v)
   343  	case OpLsh32x16:
   344  		return rewriteValueRISCV64_OpLsh32x16(v)
   345  	case OpLsh32x32:
   346  		return rewriteValueRISCV64_OpLsh32x32(v)
   347  	case OpLsh32x64:
   348  		return rewriteValueRISCV64_OpLsh32x64(v)
   349  	case OpLsh32x8:
   350  		return rewriteValueRISCV64_OpLsh32x8(v)
   351  	case OpLsh64x16:
   352  		return rewriteValueRISCV64_OpLsh64x16(v)
   353  	case OpLsh64x32:
   354  		return rewriteValueRISCV64_OpLsh64x32(v)
   355  	case OpLsh64x64:
   356  		return rewriteValueRISCV64_OpLsh64x64(v)
   357  	case OpLsh64x8:
   358  		return rewriteValueRISCV64_OpLsh64x8(v)
   359  	case OpLsh8x16:
   360  		return rewriteValueRISCV64_OpLsh8x16(v)
   361  	case OpLsh8x32:
   362  		return rewriteValueRISCV64_OpLsh8x32(v)
   363  	case OpLsh8x64:
   364  		return rewriteValueRISCV64_OpLsh8x64(v)
   365  	case OpLsh8x8:
   366  		return rewriteValueRISCV64_OpLsh8x8(v)
   367  	case OpMax32F:
   368  		v.Op = OpRISCV64LoweredFMAXS
   369  		return true
   370  	case OpMax64:
   371  		return rewriteValueRISCV64_OpMax64(v)
   372  	case OpMax64F:
   373  		v.Op = OpRISCV64LoweredFMAXD
   374  		return true
   375  	case OpMax64u:
   376  		return rewriteValueRISCV64_OpMax64u(v)
   377  	case OpMin32F:
   378  		v.Op = OpRISCV64LoweredFMINS
   379  		return true
   380  	case OpMin64:
   381  		return rewriteValueRISCV64_OpMin64(v)
   382  	case OpMin64F:
   383  		v.Op = OpRISCV64LoweredFMIND
   384  		return true
   385  	case OpMin64u:
   386  		return rewriteValueRISCV64_OpMin64u(v)
   387  	case OpMod16:
   388  		return rewriteValueRISCV64_OpMod16(v)
   389  	case OpMod16u:
   390  		return rewriteValueRISCV64_OpMod16u(v)
   391  	case OpMod32:
   392  		return rewriteValueRISCV64_OpMod32(v)
   393  	case OpMod32u:
   394  		v.Op = OpRISCV64REMUW
   395  		return true
   396  	case OpMod64:
   397  		return rewriteValueRISCV64_OpMod64(v)
   398  	case OpMod64u:
   399  		v.Op = OpRISCV64REMU
   400  		return true
   401  	case OpMod8:
   402  		return rewriteValueRISCV64_OpMod8(v)
   403  	case OpMod8u:
   404  		return rewriteValueRISCV64_OpMod8u(v)
   405  	case OpMove:
   406  		return rewriteValueRISCV64_OpMove(v)
   407  	case OpMul16:
   408  		return rewriteValueRISCV64_OpMul16(v)
   409  	case OpMul32:
   410  		v.Op = OpRISCV64MULW
   411  		return true
   412  	case OpMul32F:
   413  		v.Op = OpRISCV64FMULS
   414  		return true
   415  	case OpMul64:
   416  		v.Op = OpRISCV64MUL
   417  		return true
   418  	case OpMul64F:
   419  		v.Op = OpRISCV64FMULD
   420  		return true
   421  	case OpMul64uhilo:
   422  		v.Op = OpRISCV64LoweredMuluhilo
   423  		return true
   424  	case OpMul64uover:
   425  		v.Op = OpRISCV64LoweredMuluover
   426  		return true
   427  	case OpMul8:
   428  		return rewriteValueRISCV64_OpMul8(v)
   429  	case OpNeg16:
   430  		v.Op = OpRISCV64NEG
   431  		return true
   432  	case OpNeg32:
   433  		v.Op = OpRISCV64NEG
   434  		return true
   435  	case OpNeg32F:
   436  		v.Op = OpRISCV64FNEGS
   437  		return true
   438  	case OpNeg64:
   439  		v.Op = OpRISCV64NEG
   440  		return true
   441  	case OpNeg64F:
   442  		v.Op = OpRISCV64FNEGD
   443  		return true
   444  	case OpNeg8:
   445  		v.Op = OpRISCV64NEG
   446  		return true
   447  	case OpNeq16:
   448  		return rewriteValueRISCV64_OpNeq16(v)
   449  	case OpNeq32:
   450  		return rewriteValueRISCV64_OpNeq32(v)
   451  	case OpNeq32F:
   452  		v.Op = OpRISCV64FNES
   453  		return true
   454  	case OpNeq64:
   455  		return rewriteValueRISCV64_OpNeq64(v)
   456  	case OpNeq64F:
   457  		v.Op = OpRISCV64FNED
   458  		return true
   459  	case OpNeq8:
   460  		return rewriteValueRISCV64_OpNeq8(v)
   461  	case OpNeqB:
   462  		return rewriteValueRISCV64_OpNeqB(v)
   463  	case OpNeqPtr:
   464  		return rewriteValueRISCV64_OpNeqPtr(v)
   465  	case OpNilCheck:
   466  		v.Op = OpRISCV64LoweredNilCheck
   467  		return true
   468  	case OpNot:
   469  		v.Op = OpRISCV64SEQZ
   470  		return true
   471  	case OpOffPtr:
   472  		return rewriteValueRISCV64_OpOffPtr(v)
   473  	case OpOr16:
   474  		v.Op = OpRISCV64OR
   475  		return true
   476  	case OpOr32:
   477  		v.Op = OpRISCV64OR
   478  		return true
   479  	case OpOr64:
   480  		v.Op = OpRISCV64OR
   481  		return true
   482  	case OpOr8:
   483  		v.Op = OpRISCV64OR
   484  		return true
   485  	case OpOrB:
   486  		v.Op = OpRISCV64OR
   487  		return true
   488  	case OpPanicBounds:
   489  		return rewriteValueRISCV64_OpPanicBounds(v)
   490  	case OpPopCount16:
   491  		return rewriteValueRISCV64_OpPopCount16(v)
   492  	case OpPopCount32:
   493  		v.Op = OpRISCV64CPOPW
   494  		return true
   495  	case OpPopCount64:
   496  		v.Op = OpRISCV64CPOP
   497  		return true
   498  	case OpPopCount8:
   499  		return rewriteValueRISCV64_OpPopCount8(v)
   500  	case OpPubBarrier:
   501  		v.Op = OpRISCV64LoweredPubBarrier
   502  		return true
   503  	case OpRISCV64ADD:
   504  		return rewriteValueRISCV64_OpRISCV64ADD(v)
   505  	case OpRISCV64ADDI:
   506  		return rewriteValueRISCV64_OpRISCV64ADDI(v)
   507  	case OpRISCV64AND:
   508  		return rewriteValueRISCV64_OpRISCV64AND(v)
   509  	case OpRISCV64ANDI:
   510  		return rewriteValueRISCV64_OpRISCV64ANDI(v)
   511  	case OpRISCV64FADDD:
   512  		return rewriteValueRISCV64_OpRISCV64FADDD(v)
   513  	case OpRISCV64FADDS:
   514  		return rewriteValueRISCV64_OpRISCV64FADDS(v)
   515  	case OpRISCV64FMADDD:
   516  		return rewriteValueRISCV64_OpRISCV64FMADDD(v)
   517  	case OpRISCV64FMADDS:
   518  		return rewriteValueRISCV64_OpRISCV64FMADDS(v)
   519  	case OpRISCV64FMSUBD:
   520  		return rewriteValueRISCV64_OpRISCV64FMSUBD(v)
   521  	case OpRISCV64FMSUBS:
   522  		return rewriteValueRISCV64_OpRISCV64FMSUBS(v)
   523  	case OpRISCV64FNMADDD:
   524  		return rewriteValueRISCV64_OpRISCV64FNMADDD(v)
   525  	case OpRISCV64FNMADDS:
   526  		return rewriteValueRISCV64_OpRISCV64FNMADDS(v)
   527  	case OpRISCV64FNMSUBD:
   528  		return rewriteValueRISCV64_OpRISCV64FNMSUBD(v)
   529  	case OpRISCV64FNMSUBS:
   530  		return rewriteValueRISCV64_OpRISCV64FNMSUBS(v)
   531  	case OpRISCV64FSUBD:
   532  		return rewriteValueRISCV64_OpRISCV64FSUBD(v)
   533  	case OpRISCV64FSUBS:
   534  		return rewriteValueRISCV64_OpRISCV64FSUBS(v)
   535  	case OpRISCV64MOVBUload:
   536  		return rewriteValueRISCV64_OpRISCV64MOVBUload(v)
   537  	case OpRISCV64MOVBUreg:
   538  		return rewriteValueRISCV64_OpRISCV64MOVBUreg(v)
   539  	case OpRISCV64MOVBload:
   540  		return rewriteValueRISCV64_OpRISCV64MOVBload(v)
   541  	case OpRISCV64MOVBreg:
   542  		return rewriteValueRISCV64_OpRISCV64MOVBreg(v)
   543  	case OpRISCV64MOVBstore:
   544  		return rewriteValueRISCV64_OpRISCV64MOVBstore(v)
   545  	case OpRISCV64MOVBstorezero:
   546  		return rewriteValueRISCV64_OpRISCV64MOVBstorezero(v)
   547  	case OpRISCV64MOVDload:
   548  		return rewriteValueRISCV64_OpRISCV64MOVDload(v)
   549  	case OpRISCV64MOVDnop:
   550  		return rewriteValueRISCV64_OpRISCV64MOVDnop(v)
   551  	case OpRISCV64MOVDreg:
   552  		return rewriteValueRISCV64_OpRISCV64MOVDreg(v)
   553  	case OpRISCV64MOVDstore:
   554  		return rewriteValueRISCV64_OpRISCV64MOVDstore(v)
   555  	case OpRISCV64MOVDstorezero:
   556  		return rewriteValueRISCV64_OpRISCV64MOVDstorezero(v)
   557  	case OpRISCV64MOVHUload:
   558  		return rewriteValueRISCV64_OpRISCV64MOVHUload(v)
   559  	case OpRISCV64MOVHUreg:
   560  		return rewriteValueRISCV64_OpRISCV64MOVHUreg(v)
   561  	case OpRISCV64MOVHload:
   562  		return rewriteValueRISCV64_OpRISCV64MOVHload(v)
   563  	case OpRISCV64MOVHreg:
   564  		return rewriteValueRISCV64_OpRISCV64MOVHreg(v)
   565  	case OpRISCV64MOVHstore:
   566  		return rewriteValueRISCV64_OpRISCV64MOVHstore(v)
   567  	case OpRISCV64MOVHstorezero:
   568  		return rewriteValueRISCV64_OpRISCV64MOVHstorezero(v)
   569  	case OpRISCV64MOVWUload:
   570  		return rewriteValueRISCV64_OpRISCV64MOVWUload(v)
   571  	case OpRISCV64MOVWUreg:
   572  		return rewriteValueRISCV64_OpRISCV64MOVWUreg(v)
   573  	case OpRISCV64MOVWload:
   574  		return rewriteValueRISCV64_OpRISCV64MOVWload(v)
   575  	case OpRISCV64MOVWreg:
   576  		return rewriteValueRISCV64_OpRISCV64MOVWreg(v)
   577  	case OpRISCV64MOVWstore:
   578  		return rewriteValueRISCV64_OpRISCV64MOVWstore(v)
   579  	case OpRISCV64MOVWstorezero:
   580  		return rewriteValueRISCV64_OpRISCV64MOVWstorezero(v)
   581  	case OpRISCV64NEG:
   582  		return rewriteValueRISCV64_OpRISCV64NEG(v)
   583  	case OpRISCV64NEGW:
   584  		return rewriteValueRISCV64_OpRISCV64NEGW(v)
   585  	case OpRISCV64OR:
   586  		return rewriteValueRISCV64_OpRISCV64OR(v)
   587  	case OpRISCV64ORI:
   588  		return rewriteValueRISCV64_OpRISCV64ORI(v)
   589  	case OpRISCV64ROL:
   590  		return rewriteValueRISCV64_OpRISCV64ROL(v)
   591  	case OpRISCV64ROLW:
   592  		return rewriteValueRISCV64_OpRISCV64ROLW(v)
   593  	case OpRISCV64ROR:
   594  		return rewriteValueRISCV64_OpRISCV64ROR(v)
   595  	case OpRISCV64RORW:
   596  		return rewriteValueRISCV64_OpRISCV64RORW(v)
   597  	case OpRISCV64SEQZ:
   598  		return rewriteValueRISCV64_OpRISCV64SEQZ(v)
   599  	case OpRISCV64SLL:
   600  		return rewriteValueRISCV64_OpRISCV64SLL(v)
   601  	case OpRISCV64SLLI:
   602  		return rewriteValueRISCV64_OpRISCV64SLLI(v)
   603  	case OpRISCV64SLLW:
   604  		return rewriteValueRISCV64_OpRISCV64SLLW(v)
   605  	case OpRISCV64SLT:
   606  		return rewriteValueRISCV64_OpRISCV64SLT(v)
   607  	case OpRISCV64SLTI:
   608  		return rewriteValueRISCV64_OpRISCV64SLTI(v)
   609  	case OpRISCV64SLTIU:
   610  		return rewriteValueRISCV64_OpRISCV64SLTIU(v)
   611  	case OpRISCV64SLTU:
   612  		return rewriteValueRISCV64_OpRISCV64SLTU(v)
   613  	case OpRISCV64SNEZ:
   614  		return rewriteValueRISCV64_OpRISCV64SNEZ(v)
   615  	case OpRISCV64SRA:
   616  		return rewriteValueRISCV64_OpRISCV64SRA(v)
   617  	case OpRISCV64SRAI:
   618  		return rewriteValueRISCV64_OpRISCV64SRAI(v)
   619  	case OpRISCV64SRAW:
   620  		return rewriteValueRISCV64_OpRISCV64SRAW(v)
   621  	case OpRISCV64SRL:
   622  		return rewriteValueRISCV64_OpRISCV64SRL(v)
   623  	case OpRISCV64SRLI:
   624  		return rewriteValueRISCV64_OpRISCV64SRLI(v)
   625  	case OpRISCV64SRLW:
   626  		return rewriteValueRISCV64_OpRISCV64SRLW(v)
   627  	case OpRISCV64SUB:
   628  		return rewriteValueRISCV64_OpRISCV64SUB(v)
   629  	case OpRISCV64SUBW:
   630  		return rewriteValueRISCV64_OpRISCV64SUBW(v)
   631  	case OpRISCV64XOR:
   632  		return rewriteValueRISCV64_OpRISCV64XOR(v)
   633  	case OpRotateLeft16:
   634  		return rewriteValueRISCV64_OpRotateLeft16(v)
   635  	case OpRotateLeft32:
   636  		v.Op = OpRISCV64ROLW
   637  		return true
   638  	case OpRotateLeft64:
   639  		v.Op = OpRISCV64ROL
   640  		return true
   641  	case OpRotateLeft8:
   642  		return rewriteValueRISCV64_OpRotateLeft8(v)
   643  	case OpRound32F:
   644  		v.Op = OpRISCV64LoweredRound32F
   645  		return true
   646  	case OpRound64F:
   647  		v.Op = OpRISCV64LoweredRound64F
   648  		return true
   649  	case OpRsh16Ux16:
   650  		return rewriteValueRISCV64_OpRsh16Ux16(v)
   651  	case OpRsh16Ux32:
   652  		return rewriteValueRISCV64_OpRsh16Ux32(v)
   653  	case OpRsh16Ux64:
   654  		return rewriteValueRISCV64_OpRsh16Ux64(v)
   655  	case OpRsh16Ux8:
   656  		return rewriteValueRISCV64_OpRsh16Ux8(v)
   657  	case OpRsh16x16:
   658  		return rewriteValueRISCV64_OpRsh16x16(v)
   659  	case OpRsh16x32:
   660  		return rewriteValueRISCV64_OpRsh16x32(v)
   661  	case OpRsh16x64:
   662  		return rewriteValueRISCV64_OpRsh16x64(v)
   663  	case OpRsh16x8:
   664  		return rewriteValueRISCV64_OpRsh16x8(v)
   665  	case OpRsh32Ux16:
   666  		return rewriteValueRISCV64_OpRsh32Ux16(v)
   667  	case OpRsh32Ux32:
   668  		return rewriteValueRISCV64_OpRsh32Ux32(v)
   669  	case OpRsh32Ux64:
   670  		return rewriteValueRISCV64_OpRsh32Ux64(v)
   671  	case OpRsh32Ux8:
   672  		return rewriteValueRISCV64_OpRsh32Ux8(v)
   673  	case OpRsh32x16:
   674  		return rewriteValueRISCV64_OpRsh32x16(v)
   675  	case OpRsh32x32:
   676  		return rewriteValueRISCV64_OpRsh32x32(v)
   677  	case OpRsh32x64:
   678  		return rewriteValueRISCV64_OpRsh32x64(v)
   679  	case OpRsh32x8:
   680  		return rewriteValueRISCV64_OpRsh32x8(v)
   681  	case OpRsh64Ux16:
   682  		return rewriteValueRISCV64_OpRsh64Ux16(v)
   683  	case OpRsh64Ux32:
   684  		return rewriteValueRISCV64_OpRsh64Ux32(v)
   685  	case OpRsh64Ux64:
   686  		return rewriteValueRISCV64_OpRsh64Ux64(v)
   687  	case OpRsh64Ux8:
   688  		return rewriteValueRISCV64_OpRsh64Ux8(v)
   689  	case OpRsh64x16:
   690  		return rewriteValueRISCV64_OpRsh64x16(v)
   691  	case OpRsh64x32:
   692  		return rewriteValueRISCV64_OpRsh64x32(v)
   693  	case OpRsh64x64:
   694  		return rewriteValueRISCV64_OpRsh64x64(v)
   695  	case OpRsh64x8:
   696  		return rewriteValueRISCV64_OpRsh64x8(v)
   697  	case OpRsh8Ux16:
   698  		return rewriteValueRISCV64_OpRsh8Ux16(v)
   699  	case OpRsh8Ux32:
   700  		return rewriteValueRISCV64_OpRsh8Ux32(v)
   701  	case OpRsh8Ux64:
   702  		return rewriteValueRISCV64_OpRsh8Ux64(v)
   703  	case OpRsh8Ux8:
   704  		return rewriteValueRISCV64_OpRsh8Ux8(v)
   705  	case OpRsh8x16:
   706  		return rewriteValueRISCV64_OpRsh8x16(v)
   707  	case OpRsh8x32:
   708  		return rewriteValueRISCV64_OpRsh8x32(v)
   709  	case OpRsh8x64:
   710  		return rewriteValueRISCV64_OpRsh8x64(v)
   711  	case OpRsh8x8:
   712  		return rewriteValueRISCV64_OpRsh8x8(v)
   713  	case OpSelect0:
   714  		return rewriteValueRISCV64_OpSelect0(v)
   715  	case OpSelect1:
   716  		return rewriteValueRISCV64_OpSelect1(v)
   717  	case OpSignExt16to32:
   718  		v.Op = OpRISCV64MOVHreg
   719  		return true
   720  	case OpSignExt16to64:
   721  		v.Op = OpRISCV64MOVHreg
   722  		return true
   723  	case OpSignExt32to64:
   724  		v.Op = OpRISCV64MOVWreg
   725  		return true
   726  	case OpSignExt8to16:
   727  		v.Op = OpRISCV64MOVBreg
   728  		return true
   729  	case OpSignExt8to32:
   730  		v.Op = OpRISCV64MOVBreg
   731  		return true
   732  	case OpSignExt8to64:
   733  		v.Op = OpRISCV64MOVBreg
   734  		return true
   735  	case OpSlicemask:
   736  		return rewriteValueRISCV64_OpSlicemask(v)
   737  	case OpSqrt:
   738  		v.Op = OpRISCV64FSQRTD
   739  		return true
   740  	case OpSqrt32:
   741  		v.Op = OpRISCV64FSQRTS
   742  		return true
   743  	case OpStaticCall:
   744  		v.Op = OpRISCV64CALLstatic
   745  		return true
   746  	case OpStore:
   747  		return rewriteValueRISCV64_OpStore(v)
   748  	case OpSub16:
   749  		v.Op = OpRISCV64SUB
   750  		return true
   751  	case OpSub32:
   752  		v.Op = OpRISCV64SUB
   753  		return true
   754  	case OpSub32F:
   755  		v.Op = OpRISCV64FSUBS
   756  		return true
   757  	case OpSub64:
   758  		v.Op = OpRISCV64SUB
   759  		return true
   760  	case OpSub64F:
   761  		v.Op = OpRISCV64FSUBD
   762  		return true
   763  	case OpSub8:
   764  		v.Op = OpRISCV64SUB
   765  		return true
   766  	case OpSubPtr:
   767  		v.Op = OpRISCV64SUB
   768  		return true
   769  	case OpTailCall:
   770  		v.Op = OpRISCV64CALLtail
   771  		return true
   772  	case OpTrunc16to8:
   773  		v.Op = OpCopy
   774  		return true
   775  	case OpTrunc32to16:
   776  		v.Op = OpCopy
   777  		return true
   778  	case OpTrunc32to8:
   779  		v.Op = OpCopy
   780  		return true
   781  	case OpTrunc64to16:
   782  		v.Op = OpCopy
   783  		return true
   784  	case OpTrunc64to32:
   785  		v.Op = OpCopy
   786  		return true
   787  	case OpTrunc64to8:
   788  		v.Op = OpCopy
   789  		return true
   790  	case OpWB:
   791  		v.Op = OpRISCV64LoweredWB
   792  		return true
   793  	case OpXor16:
   794  		v.Op = OpRISCV64XOR
   795  		return true
   796  	case OpXor32:
   797  		v.Op = OpRISCV64XOR
   798  		return true
   799  	case OpXor64:
   800  		v.Op = OpRISCV64XOR
   801  		return true
   802  	case OpXor8:
   803  		v.Op = OpRISCV64XOR
   804  		return true
   805  	case OpZero:
   806  		return rewriteValueRISCV64_OpZero(v)
   807  	case OpZeroExt16to32:
   808  		v.Op = OpRISCV64MOVHUreg
   809  		return true
   810  	case OpZeroExt16to64:
   811  		v.Op = OpRISCV64MOVHUreg
   812  		return true
   813  	case OpZeroExt32to64:
   814  		v.Op = OpRISCV64MOVWUreg
   815  		return true
   816  	case OpZeroExt8to16:
   817  		v.Op = OpRISCV64MOVBUreg
   818  		return true
   819  	case OpZeroExt8to32:
   820  		v.Op = OpRISCV64MOVBUreg
   821  		return true
   822  	case OpZeroExt8to64:
   823  		v.Op = OpRISCV64MOVBUreg
   824  		return true
   825  	}
   826  	return false
   827  }
   828  func rewriteValueRISCV64_OpAddr(v *Value) bool {
   829  	v_0 := v.Args[0]
   830  	// match: (Addr {sym} base)
   831  	// result: (MOVaddr {sym} [0] base)
   832  	for {
   833  		sym := auxToSym(v.Aux)
   834  		base := v_0
   835  		v.reset(OpRISCV64MOVaddr)
   836  		v.AuxInt = int32ToAuxInt(0)
   837  		v.Aux = symToAux(sym)
   838  		v.AddArg(base)
   839  		return true
   840  	}
   841  }
   842  func rewriteValueRISCV64_OpAtomicAnd8(v *Value) bool {
   843  	v_2 := v.Args[2]
   844  	v_1 := v.Args[1]
   845  	v_0 := v.Args[0]
   846  	b := v.Block
   847  	typ := &b.Func.Config.Types
   848  	// match: (AtomicAnd8 ptr val mem)
   849  	// result: (LoweredAtomicAnd32 (ANDI <typ.Uintptr> [^3] ptr) (NOT <typ.UInt32> (SLL <typ.UInt32> (XORI <typ.UInt32> [0xff] (ZeroExt8to32 val)) (SLLI <typ.UInt64> [3] (ANDI <typ.UInt64> [3] ptr)))) mem)
   850  	for {
   851  		ptr := v_0
   852  		val := v_1
   853  		mem := v_2
   854  		v.reset(OpRISCV64LoweredAtomicAnd32)
   855  		v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Uintptr)
   856  		v0.AuxInt = int64ToAuxInt(^3)
   857  		v0.AddArg(ptr)
   858  		v1 := b.NewValue0(v.Pos, OpRISCV64NOT, typ.UInt32)
   859  		v2 := b.NewValue0(v.Pos, OpRISCV64SLL, typ.UInt32)
   860  		v3 := b.NewValue0(v.Pos, OpRISCV64XORI, typ.UInt32)
   861  		v3.AuxInt = int64ToAuxInt(0xff)
   862  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   863  		v4.AddArg(val)
   864  		v3.AddArg(v4)
   865  		v5 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64)
   866  		v5.AuxInt = int64ToAuxInt(3)
   867  		v6 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.UInt64)
   868  		v6.AuxInt = int64ToAuxInt(3)
   869  		v6.AddArg(ptr)
   870  		v5.AddArg(v6)
   871  		v2.AddArg2(v3, v5)
   872  		v1.AddArg(v2)
   873  		v.AddArg3(v0, v1, mem)
   874  		return true
   875  	}
   876  }
   877  func rewriteValueRISCV64_OpAtomicCompareAndSwap32(v *Value) bool {
   878  	v_3 := v.Args[3]
   879  	v_2 := v.Args[2]
   880  	v_1 := v.Args[1]
   881  	v_0 := v.Args[0]
   882  	b := v.Block
   883  	typ := &b.Func.Config.Types
   884  	// match: (AtomicCompareAndSwap32 ptr old new mem)
   885  	// result: (LoweredAtomicCas32 ptr (SignExt32to64 old) new mem)
   886  	for {
   887  		ptr := v_0
   888  		old := v_1
   889  		new := v_2
   890  		mem := v_3
   891  		v.reset(OpRISCV64LoweredAtomicCas32)
   892  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
   893  		v0.AddArg(old)
   894  		v.AddArg4(ptr, v0, new, mem)
   895  		return true
   896  	}
   897  }
   898  func rewriteValueRISCV64_OpAtomicOr8(v *Value) bool {
   899  	v_2 := v.Args[2]
   900  	v_1 := v.Args[1]
   901  	v_0 := v.Args[0]
   902  	b := v.Block
   903  	typ := &b.Func.Config.Types
   904  	// match: (AtomicOr8 ptr val mem)
   905  	// result: (LoweredAtomicOr32 (ANDI <typ.Uintptr> [^3] ptr) (SLL <typ.UInt32> (ZeroExt8to32 val) (SLLI <typ.UInt64> [3] (ANDI <typ.UInt64> [3] ptr))) mem)
   906  	for {
   907  		ptr := v_0
   908  		val := v_1
   909  		mem := v_2
   910  		v.reset(OpRISCV64LoweredAtomicOr32)
   911  		v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Uintptr)
   912  		v0.AuxInt = int64ToAuxInt(^3)
   913  		v0.AddArg(ptr)
   914  		v1 := b.NewValue0(v.Pos, OpRISCV64SLL, typ.UInt32)
   915  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   916  		v2.AddArg(val)
   917  		v3 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64)
   918  		v3.AuxInt = int64ToAuxInt(3)
   919  		v4 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.UInt64)
   920  		v4.AuxInt = int64ToAuxInt(3)
   921  		v4.AddArg(ptr)
   922  		v3.AddArg(v4)
   923  		v1.AddArg2(v2, v3)
   924  		v.AddArg3(v0, v1, mem)
   925  		return true
   926  	}
   927  }
   928  func rewriteValueRISCV64_OpAvg64u(v *Value) bool {
   929  	v_1 := v.Args[1]
   930  	v_0 := v.Args[0]
   931  	b := v.Block
   932  	// match: (Avg64u <t> x y)
   933  	// result: (ADD (ADD <t> (SRLI <t> [1] x) (SRLI <t> [1] y)) (ANDI <t> [1] (AND <t> x y)))
   934  	for {
   935  		t := v.Type
   936  		x := v_0
   937  		y := v_1
   938  		v.reset(OpRISCV64ADD)
   939  		v0 := b.NewValue0(v.Pos, OpRISCV64ADD, t)
   940  		v1 := b.NewValue0(v.Pos, OpRISCV64SRLI, t)
   941  		v1.AuxInt = int64ToAuxInt(1)
   942  		v1.AddArg(x)
   943  		v2 := b.NewValue0(v.Pos, OpRISCV64SRLI, t)
   944  		v2.AuxInt = int64ToAuxInt(1)
   945  		v2.AddArg(y)
   946  		v0.AddArg2(v1, v2)
   947  		v3 := b.NewValue0(v.Pos, OpRISCV64ANDI, t)
   948  		v3.AuxInt = int64ToAuxInt(1)
   949  		v4 := b.NewValue0(v.Pos, OpRISCV64AND, t)
   950  		v4.AddArg2(x, y)
   951  		v3.AddArg(v4)
   952  		v.AddArg2(v0, v3)
   953  		return true
   954  	}
   955  }
   956  func rewriteValueRISCV64_OpBitLen16(v *Value) bool {
   957  	v_0 := v.Args[0]
   958  	b := v.Block
   959  	typ := &b.Func.Config.Types
   960  	// match: (BitLen16 x)
   961  	// result: (BitLen64 (ZeroExt16to64 x))
   962  	for {
   963  		x := v_0
   964  		v.reset(OpBitLen64)
   965  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
   966  		v0.AddArg(x)
   967  		v.AddArg(v0)
   968  		return true
   969  	}
   970  }
   971  func rewriteValueRISCV64_OpBitLen32(v *Value) bool {
   972  	v_0 := v.Args[0]
   973  	b := v.Block
   974  	typ := &b.Func.Config.Types
   975  	// match: (BitLen32 <t> x)
   976  	// result: (SUB (MOVDconst [32]) (CLZW <t> x))
   977  	for {
   978  		t := v.Type
   979  		x := v_0
   980  		v.reset(OpRISCV64SUB)
   981  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
   982  		v0.AuxInt = int64ToAuxInt(32)
   983  		v1 := b.NewValue0(v.Pos, OpRISCV64CLZW, t)
   984  		v1.AddArg(x)
   985  		v.AddArg2(v0, v1)
   986  		return true
   987  	}
   988  }
   989  func rewriteValueRISCV64_OpBitLen64(v *Value) bool {
   990  	v_0 := v.Args[0]
   991  	b := v.Block
   992  	typ := &b.Func.Config.Types
   993  	// match: (BitLen64 <t> x)
   994  	// result: (SUB (MOVDconst [64]) (CLZ <t> x))
   995  	for {
   996  		t := v.Type
   997  		x := v_0
   998  		v.reset(OpRISCV64SUB)
   999  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  1000  		v0.AuxInt = int64ToAuxInt(64)
  1001  		v1 := b.NewValue0(v.Pos, OpRISCV64CLZ, t)
  1002  		v1.AddArg(x)
  1003  		v.AddArg2(v0, v1)
  1004  		return true
  1005  	}
  1006  }
  1007  func rewriteValueRISCV64_OpBitLen8(v *Value) bool {
  1008  	v_0 := v.Args[0]
  1009  	b := v.Block
  1010  	typ := &b.Func.Config.Types
  1011  	// match: (BitLen8 x)
  1012  	// result: (BitLen64 (ZeroExt8to64 x))
  1013  	for {
  1014  		x := v_0
  1015  		v.reset(OpBitLen64)
  1016  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1017  		v0.AddArg(x)
  1018  		v.AddArg(v0)
  1019  		return true
  1020  	}
  1021  }
  1022  func rewriteValueRISCV64_OpBswap16(v *Value) bool {
  1023  	v_0 := v.Args[0]
  1024  	b := v.Block
  1025  	// match: (Bswap16 <t> x)
  1026  	// result: (SRLI [48] (REV8 <t> x))
  1027  	for {
  1028  		t := v.Type
  1029  		x := v_0
  1030  		v.reset(OpRISCV64SRLI)
  1031  		v.AuxInt = int64ToAuxInt(48)
  1032  		v0 := b.NewValue0(v.Pos, OpRISCV64REV8, t)
  1033  		v0.AddArg(x)
  1034  		v.AddArg(v0)
  1035  		return true
  1036  	}
  1037  }
  1038  func rewriteValueRISCV64_OpBswap32(v *Value) bool {
  1039  	v_0 := v.Args[0]
  1040  	b := v.Block
  1041  	// match: (Bswap32 <t> x)
  1042  	// result: (SRLI [32] (REV8 <t> x))
  1043  	for {
  1044  		t := v.Type
  1045  		x := v_0
  1046  		v.reset(OpRISCV64SRLI)
  1047  		v.AuxInt = int64ToAuxInt(32)
  1048  		v0 := b.NewValue0(v.Pos, OpRISCV64REV8, t)
  1049  		v0.AddArg(x)
  1050  		v.AddArg(v0)
  1051  		return true
  1052  	}
  1053  }
  1054  func rewriteValueRISCV64_OpConst16(v *Value) bool {
  1055  	// match: (Const16 [val])
  1056  	// result: (MOVDconst [int64(val)])
  1057  	for {
  1058  		val := auxIntToInt16(v.AuxInt)
  1059  		v.reset(OpRISCV64MOVDconst)
  1060  		v.AuxInt = int64ToAuxInt(int64(val))
  1061  		return true
  1062  	}
  1063  }
  1064  func rewriteValueRISCV64_OpConst32(v *Value) bool {
  1065  	// match: (Const32 [val])
  1066  	// result: (MOVDconst [int64(val)])
  1067  	for {
  1068  		val := auxIntToInt32(v.AuxInt)
  1069  		v.reset(OpRISCV64MOVDconst)
  1070  		v.AuxInt = int64ToAuxInt(int64(val))
  1071  		return true
  1072  	}
  1073  }
  1074  func rewriteValueRISCV64_OpConst32F(v *Value) bool {
  1075  	b := v.Block
  1076  	typ := &b.Func.Config.Types
  1077  	// match: (Const32F [val])
  1078  	// result: (FMVSX (MOVDconst [int64(math.Float32bits(val))]))
  1079  	for {
  1080  		val := auxIntToFloat32(v.AuxInt)
  1081  		v.reset(OpRISCV64FMVSX)
  1082  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  1083  		v0.AuxInt = int64ToAuxInt(int64(math.Float32bits(val)))
  1084  		v.AddArg(v0)
  1085  		return true
  1086  	}
  1087  }
  1088  func rewriteValueRISCV64_OpConst64(v *Value) bool {
  1089  	// match: (Const64 [val])
  1090  	// result: (MOVDconst [int64(val)])
  1091  	for {
  1092  		val := auxIntToInt64(v.AuxInt)
  1093  		v.reset(OpRISCV64MOVDconst)
  1094  		v.AuxInt = int64ToAuxInt(int64(val))
  1095  		return true
  1096  	}
  1097  }
  1098  func rewriteValueRISCV64_OpConst64F(v *Value) bool {
  1099  	b := v.Block
  1100  	typ := &b.Func.Config.Types
  1101  	// match: (Const64F [val])
  1102  	// result: (FMVDX (MOVDconst [int64(math.Float64bits(val))]))
  1103  	for {
  1104  		val := auxIntToFloat64(v.AuxInt)
  1105  		v.reset(OpRISCV64FMVDX)
  1106  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  1107  		v0.AuxInt = int64ToAuxInt(int64(math.Float64bits(val)))
  1108  		v.AddArg(v0)
  1109  		return true
  1110  	}
  1111  }
  1112  func rewriteValueRISCV64_OpConst8(v *Value) bool {
  1113  	// match: (Const8 [val])
  1114  	// result: (MOVDconst [int64(val)])
  1115  	for {
  1116  		val := auxIntToInt8(v.AuxInt)
  1117  		v.reset(OpRISCV64MOVDconst)
  1118  		v.AuxInt = int64ToAuxInt(int64(val))
  1119  		return true
  1120  	}
  1121  }
  1122  func rewriteValueRISCV64_OpConstBool(v *Value) bool {
  1123  	// match: (ConstBool [val])
  1124  	// result: (MOVDconst [int64(b2i(val))])
  1125  	for {
  1126  		val := auxIntToBool(v.AuxInt)
  1127  		v.reset(OpRISCV64MOVDconst)
  1128  		v.AuxInt = int64ToAuxInt(int64(b2i(val)))
  1129  		return true
  1130  	}
  1131  }
  1132  func rewriteValueRISCV64_OpConstNil(v *Value) bool {
  1133  	// match: (ConstNil)
  1134  	// result: (MOVDconst [0])
  1135  	for {
  1136  		v.reset(OpRISCV64MOVDconst)
  1137  		v.AuxInt = int64ToAuxInt(0)
  1138  		return true
  1139  	}
  1140  }
  1141  func rewriteValueRISCV64_OpCtz16(v *Value) bool {
  1142  	v_0 := v.Args[0]
  1143  	b := v.Block
  1144  	typ := &b.Func.Config.Types
  1145  	// match: (Ctz16 x)
  1146  	// result: (CTZW (ORI <typ.UInt32> [1<<16] x))
  1147  	for {
  1148  		x := v_0
  1149  		v.reset(OpRISCV64CTZW)
  1150  		v0 := b.NewValue0(v.Pos, OpRISCV64ORI, typ.UInt32)
  1151  		v0.AuxInt = int64ToAuxInt(1 << 16)
  1152  		v0.AddArg(x)
  1153  		v.AddArg(v0)
  1154  		return true
  1155  	}
  1156  }
  1157  func rewriteValueRISCV64_OpCtz8(v *Value) bool {
  1158  	v_0 := v.Args[0]
  1159  	b := v.Block
  1160  	typ := &b.Func.Config.Types
  1161  	// match: (Ctz8 x)
  1162  	// result: (CTZW (ORI <typ.UInt32> [1<<8] x))
  1163  	for {
  1164  		x := v_0
  1165  		v.reset(OpRISCV64CTZW)
  1166  		v0 := b.NewValue0(v.Pos, OpRISCV64ORI, typ.UInt32)
  1167  		v0.AuxInt = int64ToAuxInt(1 << 8)
  1168  		v0.AddArg(x)
  1169  		v.AddArg(v0)
  1170  		return true
  1171  	}
  1172  }
  1173  func rewriteValueRISCV64_OpDiv16(v *Value) bool {
  1174  	v_1 := v.Args[1]
  1175  	v_0 := v.Args[0]
  1176  	b := v.Block
  1177  	typ := &b.Func.Config.Types
  1178  	// match: (Div16 x y [false])
  1179  	// result: (DIVW (SignExt16to32 x) (SignExt16to32 y))
  1180  	for {
  1181  		if auxIntToBool(v.AuxInt) != false {
  1182  			break
  1183  		}
  1184  		x := v_0
  1185  		y := v_1
  1186  		v.reset(OpRISCV64DIVW)
  1187  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  1188  		v0.AddArg(x)
  1189  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  1190  		v1.AddArg(y)
  1191  		v.AddArg2(v0, v1)
  1192  		return true
  1193  	}
  1194  	return false
  1195  }
  1196  func rewriteValueRISCV64_OpDiv16u(v *Value) bool {
  1197  	v_1 := v.Args[1]
  1198  	v_0 := v.Args[0]
  1199  	b := v.Block
  1200  	typ := &b.Func.Config.Types
  1201  	// match: (Div16u x y)
  1202  	// result: (DIVUW (ZeroExt16to32 x) (ZeroExt16to32 y))
  1203  	for {
  1204  		x := v_0
  1205  		y := v_1
  1206  		v.reset(OpRISCV64DIVUW)
  1207  		v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1208  		v0.AddArg(x)
  1209  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1210  		v1.AddArg(y)
  1211  		v.AddArg2(v0, v1)
  1212  		return true
  1213  	}
  1214  }
  1215  func rewriteValueRISCV64_OpDiv32(v *Value) bool {
  1216  	v_1 := v.Args[1]
  1217  	v_0 := v.Args[0]
  1218  	// match: (Div32 x y [false])
  1219  	// result: (DIVW x y)
  1220  	for {
  1221  		if auxIntToBool(v.AuxInt) != false {
  1222  			break
  1223  		}
  1224  		x := v_0
  1225  		y := v_1
  1226  		v.reset(OpRISCV64DIVW)
  1227  		v.AddArg2(x, y)
  1228  		return true
  1229  	}
  1230  	return false
  1231  }
  1232  func rewriteValueRISCV64_OpDiv64(v *Value) bool {
  1233  	v_1 := v.Args[1]
  1234  	v_0 := v.Args[0]
  1235  	// match: (Div64 x y [false])
  1236  	// result: (DIV x y)
  1237  	for {
  1238  		if auxIntToBool(v.AuxInt) != false {
  1239  			break
  1240  		}
  1241  		x := v_0
  1242  		y := v_1
  1243  		v.reset(OpRISCV64DIV)
  1244  		v.AddArg2(x, y)
  1245  		return true
  1246  	}
  1247  	return false
  1248  }
  1249  func rewriteValueRISCV64_OpDiv8(v *Value) bool {
  1250  	v_1 := v.Args[1]
  1251  	v_0 := v.Args[0]
  1252  	b := v.Block
  1253  	typ := &b.Func.Config.Types
  1254  	// match: (Div8 x y)
  1255  	// result: (DIVW (SignExt8to32 x) (SignExt8to32 y))
  1256  	for {
  1257  		x := v_0
  1258  		y := v_1
  1259  		v.reset(OpRISCV64DIVW)
  1260  		v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  1261  		v0.AddArg(x)
  1262  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  1263  		v1.AddArg(y)
  1264  		v.AddArg2(v0, v1)
  1265  		return true
  1266  	}
  1267  }
  1268  func rewriteValueRISCV64_OpDiv8u(v *Value) bool {
  1269  	v_1 := v.Args[1]
  1270  	v_0 := v.Args[0]
  1271  	b := v.Block
  1272  	typ := &b.Func.Config.Types
  1273  	// match: (Div8u x y)
  1274  	// result: (DIVUW (ZeroExt8to32 x) (ZeroExt8to32 y))
  1275  	for {
  1276  		x := v_0
  1277  		y := v_1
  1278  		v.reset(OpRISCV64DIVUW)
  1279  		v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1280  		v0.AddArg(x)
  1281  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1282  		v1.AddArg(y)
  1283  		v.AddArg2(v0, v1)
  1284  		return true
  1285  	}
  1286  }
  1287  func rewriteValueRISCV64_OpEq16(v *Value) bool {
  1288  	v_1 := v.Args[1]
  1289  	v_0 := v.Args[0]
  1290  	b := v.Block
  1291  	typ := &b.Func.Config.Types
  1292  	// match: (Eq16 x y)
  1293  	// result: (SEQZ (SUB <x.Type> (ZeroExt16to64 x) (ZeroExt16to64 y)))
  1294  	for {
  1295  		x := v_0
  1296  		y := v_1
  1297  		v.reset(OpRISCV64SEQZ)
  1298  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
  1299  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1300  		v1.AddArg(x)
  1301  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1302  		v2.AddArg(y)
  1303  		v0.AddArg2(v1, v2)
  1304  		v.AddArg(v0)
  1305  		return true
  1306  	}
  1307  }
  1308  func rewriteValueRISCV64_OpEq32(v *Value) bool {
  1309  	v_1 := v.Args[1]
  1310  	v_0 := v.Args[0]
  1311  	b := v.Block
  1312  	typ := &b.Func.Config.Types
  1313  	// match: (Eq32 x y)
  1314  	// cond: x.Type.IsSigned()
  1315  	// result: (SEQZ (SUB <x.Type> (SignExt32to64 x) (SignExt32to64 y)))
  1316  	for {
  1317  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1318  			x := v_0
  1319  			y := v_1
  1320  			if !(x.Type.IsSigned()) {
  1321  				continue
  1322  			}
  1323  			v.reset(OpRISCV64SEQZ)
  1324  			v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
  1325  			v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1326  			v1.AddArg(x)
  1327  			v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1328  			v2.AddArg(y)
  1329  			v0.AddArg2(v1, v2)
  1330  			v.AddArg(v0)
  1331  			return true
  1332  		}
  1333  		break
  1334  	}
  1335  	// match: (Eq32 x y)
  1336  	// cond: !x.Type.IsSigned()
  1337  	// result: (SEQZ (SUB <x.Type> (ZeroExt32to64 x) (ZeroExt32to64 y)))
  1338  	for {
  1339  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1340  			x := v_0
  1341  			y := v_1
  1342  			if !(!x.Type.IsSigned()) {
  1343  				continue
  1344  			}
  1345  			v.reset(OpRISCV64SEQZ)
  1346  			v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
  1347  			v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1348  			v1.AddArg(x)
  1349  			v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1350  			v2.AddArg(y)
  1351  			v0.AddArg2(v1, v2)
  1352  			v.AddArg(v0)
  1353  			return true
  1354  		}
  1355  		break
  1356  	}
  1357  	return false
  1358  }
  1359  func rewriteValueRISCV64_OpEq64(v *Value) bool {
  1360  	v_1 := v.Args[1]
  1361  	v_0 := v.Args[0]
  1362  	b := v.Block
  1363  	// match: (Eq64 x y)
  1364  	// result: (SEQZ (SUB <x.Type> x y))
  1365  	for {
  1366  		x := v_0
  1367  		y := v_1
  1368  		v.reset(OpRISCV64SEQZ)
  1369  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
  1370  		v0.AddArg2(x, y)
  1371  		v.AddArg(v0)
  1372  		return true
  1373  	}
  1374  }
  1375  func rewriteValueRISCV64_OpEq8(v *Value) bool {
  1376  	v_1 := v.Args[1]
  1377  	v_0 := v.Args[0]
  1378  	b := v.Block
  1379  	typ := &b.Func.Config.Types
  1380  	// match: (Eq8 x y)
  1381  	// result: (SEQZ (SUB <x.Type> (ZeroExt8to64 x) (ZeroExt8to64 y)))
  1382  	for {
  1383  		x := v_0
  1384  		y := v_1
  1385  		v.reset(OpRISCV64SEQZ)
  1386  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
  1387  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1388  		v1.AddArg(x)
  1389  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1390  		v2.AddArg(y)
  1391  		v0.AddArg2(v1, v2)
  1392  		v.AddArg(v0)
  1393  		return true
  1394  	}
  1395  }
  1396  func rewriteValueRISCV64_OpEqB(v *Value) bool {
  1397  	v_1 := v.Args[1]
  1398  	v_0 := v.Args[0]
  1399  	b := v.Block
  1400  	typ := &b.Func.Config.Types
  1401  	// match: (EqB x y)
  1402  	// result: (SEQZ (SUB <typ.Bool> x y))
  1403  	for {
  1404  		x := v_0
  1405  		y := v_1
  1406  		v.reset(OpRISCV64SEQZ)
  1407  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.Bool)
  1408  		v0.AddArg2(x, y)
  1409  		v.AddArg(v0)
  1410  		return true
  1411  	}
  1412  }
  1413  func rewriteValueRISCV64_OpEqPtr(v *Value) bool {
  1414  	v_1 := v.Args[1]
  1415  	v_0 := v.Args[0]
  1416  	b := v.Block
  1417  	typ := &b.Func.Config.Types
  1418  	// match: (EqPtr x y)
  1419  	// result: (SEQZ (SUB <typ.Uintptr> x y))
  1420  	for {
  1421  		x := v_0
  1422  		y := v_1
  1423  		v.reset(OpRISCV64SEQZ)
  1424  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.Uintptr)
  1425  		v0.AddArg2(x, y)
  1426  		v.AddArg(v0)
  1427  		return true
  1428  	}
  1429  }
  1430  func rewriteValueRISCV64_OpHmul32(v *Value) bool {
  1431  	v_1 := v.Args[1]
  1432  	v_0 := v.Args[0]
  1433  	b := v.Block
  1434  	typ := &b.Func.Config.Types
  1435  	// match: (Hmul32 x y)
  1436  	// result: (SRAI [32] (MUL (SignExt32to64 x) (SignExt32to64 y)))
  1437  	for {
  1438  		x := v_0
  1439  		y := v_1
  1440  		v.reset(OpRISCV64SRAI)
  1441  		v.AuxInt = int64ToAuxInt(32)
  1442  		v0 := b.NewValue0(v.Pos, OpRISCV64MUL, typ.Int64)
  1443  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1444  		v1.AddArg(x)
  1445  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1446  		v2.AddArg(y)
  1447  		v0.AddArg2(v1, v2)
  1448  		v.AddArg(v0)
  1449  		return true
  1450  	}
  1451  }
  1452  func rewriteValueRISCV64_OpHmul32u(v *Value) bool {
  1453  	v_1 := v.Args[1]
  1454  	v_0 := v.Args[0]
  1455  	b := v.Block
  1456  	typ := &b.Func.Config.Types
  1457  	// match: (Hmul32u x y)
  1458  	// result: (SRLI [32] (MUL (ZeroExt32to64 x) (ZeroExt32to64 y)))
  1459  	for {
  1460  		x := v_0
  1461  		y := v_1
  1462  		v.reset(OpRISCV64SRLI)
  1463  		v.AuxInt = int64ToAuxInt(32)
  1464  		v0 := b.NewValue0(v.Pos, OpRISCV64MUL, typ.Int64)
  1465  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1466  		v1.AddArg(x)
  1467  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1468  		v2.AddArg(y)
  1469  		v0.AddArg2(v1, v2)
  1470  		v.AddArg(v0)
  1471  		return true
  1472  	}
  1473  }
  1474  func rewriteValueRISCV64_OpLeq16(v *Value) bool {
  1475  	v_1 := v.Args[1]
  1476  	v_0 := v.Args[0]
  1477  	b := v.Block
  1478  	typ := &b.Func.Config.Types
  1479  	// match: (Leq16 x y)
  1480  	// result: (Not (Less16 y x))
  1481  	for {
  1482  		x := v_0
  1483  		y := v_1
  1484  		v.reset(OpNot)
  1485  		v0 := b.NewValue0(v.Pos, OpLess16, typ.Bool)
  1486  		v0.AddArg2(y, x)
  1487  		v.AddArg(v0)
  1488  		return true
  1489  	}
  1490  }
  1491  func rewriteValueRISCV64_OpLeq16U(v *Value) bool {
  1492  	v_1 := v.Args[1]
  1493  	v_0 := v.Args[0]
  1494  	b := v.Block
  1495  	typ := &b.Func.Config.Types
  1496  	// match: (Leq16U x y)
  1497  	// result: (Not (Less16U y x))
  1498  	for {
  1499  		x := v_0
  1500  		y := v_1
  1501  		v.reset(OpNot)
  1502  		v0 := b.NewValue0(v.Pos, OpLess16U, typ.Bool)
  1503  		v0.AddArg2(y, x)
  1504  		v.AddArg(v0)
  1505  		return true
  1506  	}
  1507  }
  1508  func rewriteValueRISCV64_OpLeq32(v *Value) bool {
  1509  	v_1 := v.Args[1]
  1510  	v_0 := v.Args[0]
  1511  	b := v.Block
  1512  	typ := &b.Func.Config.Types
  1513  	// match: (Leq32 x y)
  1514  	// result: (Not (Less32 y x))
  1515  	for {
  1516  		x := v_0
  1517  		y := v_1
  1518  		v.reset(OpNot)
  1519  		v0 := b.NewValue0(v.Pos, OpLess32, typ.Bool)
  1520  		v0.AddArg2(y, x)
  1521  		v.AddArg(v0)
  1522  		return true
  1523  	}
  1524  }
  1525  func rewriteValueRISCV64_OpLeq32U(v *Value) bool {
  1526  	v_1 := v.Args[1]
  1527  	v_0 := v.Args[0]
  1528  	b := v.Block
  1529  	typ := &b.Func.Config.Types
  1530  	// match: (Leq32U x y)
  1531  	// result: (Not (Less32U y x))
  1532  	for {
  1533  		x := v_0
  1534  		y := v_1
  1535  		v.reset(OpNot)
  1536  		v0 := b.NewValue0(v.Pos, OpLess32U, typ.Bool)
  1537  		v0.AddArg2(y, x)
  1538  		v.AddArg(v0)
  1539  		return true
  1540  	}
  1541  }
  1542  func rewriteValueRISCV64_OpLeq64(v *Value) bool {
  1543  	v_1 := v.Args[1]
  1544  	v_0 := v.Args[0]
  1545  	b := v.Block
  1546  	typ := &b.Func.Config.Types
  1547  	// match: (Leq64 x y)
  1548  	// result: (Not (Less64 y x))
  1549  	for {
  1550  		x := v_0
  1551  		y := v_1
  1552  		v.reset(OpNot)
  1553  		v0 := b.NewValue0(v.Pos, OpLess64, typ.Bool)
  1554  		v0.AddArg2(y, x)
  1555  		v.AddArg(v0)
  1556  		return true
  1557  	}
  1558  }
  1559  func rewriteValueRISCV64_OpLeq64U(v *Value) bool {
  1560  	v_1 := v.Args[1]
  1561  	v_0 := v.Args[0]
  1562  	b := v.Block
  1563  	typ := &b.Func.Config.Types
  1564  	// match: (Leq64U x y)
  1565  	// result: (Not (Less64U y x))
  1566  	for {
  1567  		x := v_0
  1568  		y := v_1
  1569  		v.reset(OpNot)
  1570  		v0 := b.NewValue0(v.Pos, OpLess64U, typ.Bool)
  1571  		v0.AddArg2(y, x)
  1572  		v.AddArg(v0)
  1573  		return true
  1574  	}
  1575  }
  1576  func rewriteValueRISCV64_OpLeq8(v *Value) bool {
  1577  	v_1 := v.Args[1]
  1578  	v_0 := v.Args[0]
  1579  	b := v.Block
  1580  	typ := &b.Func.Config.Types
  1581  	// match: (Leq8 x y)
  1582  	// result: (Not (Less8 y x))
  1583  	for {
  1584  		x := v_0
  1585  		y := v_1
  1586  		v.reset(OpNot)
  1587  		v0 := b.NewValue0(v.Pos, OpLess8, typ.Bool)
  1588  		v0.AddArg2(y, x)
  1589  		v.AddArg(v0)
  1590  		return true
  1591  	}
  1592  }
  1593  func rewriteValueRISCV64_OpLeq8U(v *Value) bool {
  1594  	v_1 := v.Args[1]
  1595  	v_0 := v.Args[0]
  1596  	b := v.Block
  1597  	typ := &b.Func.Config.Types
  1598  	// match: (Leq8U x y)
  1599  	// result: (Not (Less8U y x))
  1600  	for {
  1601  		x := v_0
  1602  		y := v_1
  1603  		v.reset(OpNot)
  1604  		v0 := b.NewValue0(v.Pos, OpLess8U, typ.Bool)
  1605  		v0.AddArg2(y, x)
  1606  		v.AddArg(v0)
  1607  		return true
  1608  	}
  1609  }
  1610  func rewriteValueRISCV64_OpLess16(v *Value) bool {
  1611  	v_1 := v.Args[1]
  1612  	v_0 := v.Args[0]
  1613  	b := v.Block
  1614  	typ := &b.Func.Config.Types
  1615  	// match: (Less16 x y)
  1616  	// result: (SLT (SignExt16to64 x) (SignExt16to64 y))
  1617  	for {
  1618  		x := v_0
  1619  		y := v_1
  1620  		v.reset(OpRISCV64SLT)
  1621  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1622  		v0.AddArg(x)
  1623  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1624  		v1.AddArg(y)
  1625  		v.AddArg2(v0, v1)
  1626  		return true
  1627  	}
  1628  }
  1629  func rewriteValueRISCV64_OpLess16U(v *Value) bool {
  1630  	v_1 := v.Args[1]
  1631  	v_0 := v.Args[0]
  1632  	b := v.Block
  1633  	typ := &b.Func.Config.Types
  1634  	// match: (Less16U x y)
  1635  	// result: (SLTU (ZeroExt16to64 x) (ZeroExt16to64 y))
  1636  	for {
  1637  		x := v_0
  1638  		y := v_1
  1639  		v.reset(OpRISCV64SLTU)
  1640  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1641  		v0.AddArg(x)
  1642  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1643  		v1.AddArg(y)
  1644  		v.AddArg2(v0, v1)
  1645  		return true
  1646  	}
  1647  }
  1648  func rewriteValueRISCV64_OpLess32(v *Value) bool {
  1649  	v_1 := v.Args[1]
  1650  	v_0 := v.Args[0]
  1651  	b := v.Block
  1652  	typ := &b.Func.Config.Types
  1653  	// match: (Less32 x y)
  1654  	// result: (SLT (SignExt32to64 x) (SignExt32to64 y))
  1655  	for {
  1656  		x := v_0
  1657  		y := v_1
  1658  		v.reset(OpRISCV64SLT)
  1659  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1660  		v0.AddArg(x)
  1661  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1662  		v1.AddArg(y)
  1663  		v.AddArg2(v0, v1)
  1664  		return true
  1665  	}
  1666  }
  1667  func rewriteValueRISCV64_OpLess32U(v *Value) bool {
  1668  	v_1 := v.Args[1]
  1669  	v_0 := v.Args[0]
  1670  	b := v.Block
  1671  	typ := &b.Func.Config.Types
  1672  	// match: (Less32U x y)
  1673  	// result: (SLTU (ZeroExt32to64 x) (ZeroExt32to64 y))
  1674  	for {
  1675  		x := v_0
  1676  		y := v_1
  1677  		v.reset(OpRISCV64SLTU)
  1678  		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1679  		v0.AddArg(x)
  1680  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1681  		v1.AddArg(y)
  1682  		v.AddArg2(v0, v1)
  1683  		return true
  1684  	}
  1685  }
  1686  func rewriteValueRISCV64_OpLess8(v *Value) bool {
  1687  	v_1 := v.Args[1]
  1688  	v_0 := v.Args[0]
  1689  	b := v.Block
  1690  	typ := &b.Func.Config.Types
  1691  	// match: (Less8 x y)
  1692  	// result: (SLT (SignExt8to64 x) (SignExt8to64 y))
  1693  	for {
  1694  		x := v_0
  1695  		y := v_1
  1696  		v.reset(OpRISCV64SLT)
  1697  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1698  		v0.AddArg(x)
  1699  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1700  		v1.AddArg(y)
  1701  		v.AddArg2(v0, v1)
  1702  		return true
  1703  	}
  1704  }
  1705  func rewriteValueRISCV64_OpLess8U(v *Value) bool {
  1706  	v_1 := v.Args[1]
  1707  	v_0 := v.Args[0]
  1708  	b := v.Block
  1709  	typ := &b.Func.Config.Types
  1710  	// match: (Less8U x y)
  1711  	// result: (SLTU (ZeroExt8to64 x) (ZeroExt8to64 y))
  1712  	for {
  1713  		x := v_0
  1714  		y := v_1
  1715  		v.reset(OpRISCV64SLTU)
  1716  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1717  		v0.AddArg(x)
  1718  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1719  		v1.AddArg(y)
  1720  		v.AddArg2(v0, v1)
  1721  		return true
  1722  	}
  1723  }
  1724  func rewriteValueRISCV64_OpLoad(v *Value) bool {
  1725  	v_1 := v.Args[1]
  1726  	v_0 := v.Args[0]
  1727  	// match: (Load <t> ptr mem)
  1728  	// cond: t.IsBoolean()
  1729  	// result: (MOVBUload ptr mem)
  1730  	for {
  1731  		t := v.Type
  1732  		ptr := v_0
  1733  		mem := v_1
  1734  		if !(t.IsBoolean()) {
  1735  			break
  1736  		}
  1737  		v.reset(OpRISCV64MOVBUload)
  1738  		v.AddArg2(ptr, mem)
  1739  		return true
  1740  	}
  1741  	// match: (Load <t> ptr mem)
  1742  	// cond: ( is8BitInt(t) && t.IsSigned())
  1743  	// result: (MOVBload ptr mem)
  1744  	for {
  1745  		t := v.Type
  1746  		ptr := v_0
  1747  		mem := v_1
  1748  		if !(is8BitInt(t) && t.IsSigned()) {
  1749  			break
  1750  		}
  1751  		v.reset(OpRISCV64MOVBload)
  1752  		v.AddArg2(ptr, mem)
  1753  		return true
  1754  	}
  1755  	// match: (Load <t> ptr mem)
  1756  	// cond: ( is8BitInt(t) && !t.IsSigned())
  1757  	// result: (MOVBUload ptr mem)
  1758  	for {
  1759  		t := v.Type
  1760  		ptr := v_0
  1761  		mem := v_1
  1762  		if !(is8BitInt(t) && !t.IsSigned()) {
  1763  			break
  1764  		}
  1765  		v.reset(OpRISCV64MOVBUload)
  1766  		v.AddArg2(ptr, mem)
  1767  		return true
  1768  	}
  1769  	// match: (Load <t> ptr mem)
  1770  	// cond: (is16BitInt(t) && t.IsSigned())
  1771  	// result: (MOVHload ptr mem)
  1772  	for {
  1773  		t := v.Type
  1774  		ptr := v_0
  1775  		mem := v_1
  1776  		if !(is16BitInt(t) && t.IsSigned()) {
  1777  			break
  1778  		}
  1779  		v.reset(OpRISCV64MOVHload)
  1780  		v.AddArg2(ptr, mem)
  1781  		return true
  1782  	}
  1783  	// match: (Load <t> ptr mem)
  1784  	// cond: (is16BitInt(t) && !t.IsSigned())
  1785  	// result: (MOVHUload ptr mem)
  1786  	for {
  1787  		t := v.Type
  1788  		ptr := v_0
  1789  		mem := v_1
  1790  		if !(is16BitInt(t) && !t.IsSigned()) {
  1791  			break
  1792  		}
  1793  		v.reset(OpRISCV64MOVHUload)
  1794  		v.AddArg2(ptr, mem)
  1795  		return true
  1796  	}
  1797  	// match: (Load <t> ptr mem)
  1798  	// cond: (is32BitInt(t) && t.IsSigned())
  1799  	// result: (MOVWload ptr mem)
  1800  	for {
  1801  		t := v.Type
  1802  		ptr := v_0
  1803  		mem := v_1
  1804  		if !(is32BitInt(t) && t.IsSigned()) {
  1805  			break
  1806  		}
  1807  		v.reset(OpRISCV64MOVWload)
  1808  		v.AddArg2(ptr, mem)
  1809  		return true
  1810  	}
  1811  	// match: (Load <t> ptr mem)
  1812  	// cond: (is32BitInt(t) && !t.IsSigned())
  1813  	// result: (MOVWUload ptr mem)
  1814  	for {
  1815  		t := v.Type
  1816  		ptr := v_0
  1817  		mem := v_1
  1818  		if !(is32BitInt(t) && !t.IsSigned()) {
  1819  			break
  1820  		}
  1821  		v.reset(OpRISCV64MOVWUload)
  1822  		v.AddArg2(ptr, mem)
  1823  		return true
  1824  	}
  1825  	// match: (Load <t> ptr mem)
  1826  	// cond: (is64BitInt(t) || isPtr(t))
  1827  	// result: (MOVDload ptr mem)
  1828  	for {
  1829  		t := v.Type
  1830  		ptr := v_0
  1831  		mem := v_1
  1832  		if !(is64BitInt(t) || isPtr(t)) {
  1833  			break
  1834  		}
  1835  		v.reset(OpRISCV64MOVDload)
  1836  		v.AddArg2(ptr, mem)
  1837  		return true
  1838  	}
  1839  	// match: (Load <t> ptr mem)
  1840  	// cond: is32BitFloat(t)
  1841  	// result: (FMOVWload ptr mem)
  1842  	for {
  1843  		t := v.Type
  1844  		ptr := v_0
  1845  		mem := v_1
  1846  		if !(is32BitFloat(t)) {
  1847  			break
  1848  		}
  1849  		v.reset(OpRISCV64FMOVWload)
  1850  		v.AddArg2(ptr, mem)
  1851  		return true
  1852  	}
  1853  	// match: (Load <t> ptr mem)
  1854  	// cond: is64BitFloat(t)
  1855  	// result: (FMOVDload ptr mem)
  1856  	for {
  1857  		t := v.Type
  1858  		ptr := v_0
  1859  		mem := v_1
  1860  		if !(is64BitFloat(t)) {
  1861  			break
  1862  		}
  1863  		v.reset(OpRISCV64FMOVDload)
  1864  		v.AddArg2(ptr, mem)
  1865  		return true
  1866  	}
  1867  	return false
  1868  }
  1869  func rewriteValueRISCV64_OpLocalAddr(v *Value) bool {
  1870  	v_1 := v.Args[1]
  1871  	v_0 := v.Args[0]
  1872  	b := v.Block
  1873  	typ := &b.Func.Config.Types
  1874  	// match: (LocalAddr <t> {sym} base mem)
  1875  	// cond: t.Elem().HasPointers()
  1876  	// result: (MOVaddr {sym} (SPanchored base mem))
  1877  	for {
  1878  		t := v.Type
  1879  		sym := auxToSym(v.Aux)
  1880  		base := v_0
  1881  		mem := v_1
  1882  		if !(t.Elem().HasPointers()) {
  1883  			break
  1884  		}
  1885  		v.reset(OpRISCV64MOVaddr)
  1886  		v.Aux = symToAux(sym)
  1887  		v0 := b.NewValue0(v.Pos, OpSPanchored, typ.Uintptr)
  1888  		v0.AddArg2(base, mem)
  1889  		v.AddArg(v0)
  1890  		return true
  1891  	}
  1892  	// match: (LocalAddr <t> {sym} base _)
  1893  	// cond: !t.Elem().HasPointers()
  1894  	// result: (MOVaddr {sym} base)
  1895  	for {
  1896  		t := v.Type
  1897  		sym := auxToSym(v.Aux)
  1898  		base := v_0
  1899  		if !(!t.Elem().HasPointers()) {
  1900  			break
  1901  		}
  1902  		v.reset(OpRISCV64MOVaddr)
  1903  		v.Aux = symToAux(sym)
  1904  		v.AddArg(base)
  1905  		return true
  1906  	}
  1907  	return false
  1908  }
  1909  func rewriteValueRISCV64_OpLsh16x16(v *Value) bool {
  1910  	v_1 := v.Args[1]
  1911  	v_0 := v.Args[0]
  1912  	b := v.Block
  1913  	typ := &b.Func.Config.Types
  1914  	// match: (Lsh16x16 <t> x y)
  1915  	// cond: !shiftIsBounded(v)
  1916  	// result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  1917  	for {
  1918  		t := v.Type
  1919  		x := v_0
  1920  		y := v_1
  1921  		if !(!shiftIsBounded(v)) {
  1922  			break
  1923  		}
  1924  		v.reset(OpRISCV64AND)
  1925  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  1926  		v0.AddArg2(x, y)
  1927  		v1 := b.NewValue0(v.Pos, OpNeg16, t)
  1928  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  1929  		v2.AuxInt = int64ToAuxInt(64)
  1930  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1931  		v3.AddArg(y)
  1932  		v2.AddArg(v3)
  1933  		v1.AddArg(v2)
  1934  		v.AddArg2(v0, v1)
  1935  		return true
  1936  	}
  1937  	// match: (Lsh16x16 x y)
  1938  	// cond: shiftIsBounded(v)
  1939  	// result: (SLL x y)
  1940  	for {
  1941  		x := v_0
  1942  		y := v_1
  1943  		if !(shiftIsBounded(v)) {
  1944  			break
  1945  		}
  1946  		v.reset(OpRISCV64SLL)
  1947  		v.AddArg2(x, y)
  1948  		return true
  1949  	}
  1950  	return false
  1951  }
  1952  func rewriteValueRISCV64_OpLsh16x32(v *Value) bool {
  1953  	v_1 := v.Args[1]
  1954  	v_0 := v.Args[0]
  1955  	b := v.Block
  1956  	typ := &b.Func.Config.Types
  1957  	// match: (Lsh16x32 <t> x y)
  1958  	// cond: !shiftIsBounded(v)
  1959  	// result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  1960  	for {
  1961  		t := v.Type
  1962  		x := v_0
  1963  		y := v_1
  1964  		if !(!shiftIsBounded(v)) {
  1965  			break
  1966  		}
  1967  		v.reset(OpRISCV64AND)
  1968  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  1969  		v0.AddArg2(x, y)
  1970  		v1 := b.NewValue0(v.Pos, OpNeg16, t)
  1971  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  1972  		v2.AuxInt = int64ToAuxInt(64)
  1973  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1974  		v3.AddArg(y)
  1975  		v2.AddArg(v3)
  1976  		v1.AddArg(v2)
  1977  		v.AddArg2(v0, v1)
  1978  		return true
  1979  	}
  1980  	// match: (Lsh16x32 x y)
  1981  	// cond: shiftIsBounded(v)
  1982  	// result: (SLL x y)
  1983  	for {
  1984  		x := v_0
  1985  		y := v_1
  1986  		if !(shiftIsBounded(v)) {
  1987  			break
  1988  		}
  1989  		v.reset(OpRISCV64SLL)
  1990  		v.AddArg2(x, y)
  1991  		return true
  1992  	}
  1993  	return false
  1994  }
  1995  func rewriteValueRISCV64_OpLsh16x64(v *Value) bool {
  1996  	v_1 := v.Args[1]
  1997  	v_0 := v.Args[0]
  1998  	b := v.Block
  1999  	// match: (Lsh16x64 <t> x y)
  2000  	// cond: !shiftIsBounded(v)
  2001  	// result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] y)))
  2002  	for {
  2003  		t := v.Type
  2004  		x := v_0
  2005  		y := v_1
  2006  		if !(!shiftIsBounded(v)) {
  2007  			break
  2008  		}
  2009  		v.reset(OpRISCV64AND)
  2010  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2011  		v0.AddArg2(x, y)
  2012  		v1 := b.NewValue0(v.Pos, OpNeg16, t)
  2013  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2014  		v2.AuxInt = int64ToAuxInt(64)
  2015  		v2.AddArg(y)
  2016  		v1.AddArg(v2)
  2017  		v.AddArg2(v0, v1)
  2018  		return true
  2019  	}
  2020  	// match: (Lsh16x64 x y)
  2021  	// cond: shiftIsBounded(v)
  2022  	// result: (SLL x y)
  2023  	for {
  2024  		x := v_0
  2025  		y := v_1
  2026  		if !(shiftIsBounded(v)) {
  2027  			break
  2028  		}
  2029  		v.reset(OpRISCV64SLL)
  2030  		v.AddArg2(x, y)
  2031  		return true
  2032  	}
  2033  	return false
  2034  }
  2035  func rewriteValueRISCV64_OpLsh16x8(v *Value) bool {
  2036  	v_1 := v.Args[1]
  2037  	v_0 := v.Args[0]
  2038  	b := v.Block
  2039  	typ := &b.Func.Config.Types
  2040  	// match: (Lsh16x8 <t> x y)
  2041  	// cond: !shiftIsBounded(v)
  2042  	// result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  2043  	for {
  2044  		t := v.Type
  2045  		x := v_0
  2046  		y := v_1
  2047  		if !(!shiftIsBounded(v)) {
  2048  			break
  2049  		}
  2050  		v.reset(OpRISCV64AND)
  2051  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2052  		v0.AddArg2(x, y)
  2053  		v1 := b.NewValue0(v.Pos, OpNeg16, t)
  2054  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2055  		v2.AuxInt = int64ToAuxInt(64)
  2056  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2057  		v3.AddArg(y)
  2058  		v2.AddArg(v3)
  2059  		v1.AddArg(v2)
  2060  		v.AddArg2(v0, v1)
  2061  		return true
  2062  	}
  2063  	// match: (Lsh16x8 x y)
  2064  	// cond: shiftIsBounded(v)
  2065  	// result: (SLL x y)
  2066  	for {
  2067  		x := v_0
  2068  		y := v_1
  2069  		if !(shiftIsBounded(v)) {
  2070  			break
  2071  		}
  2072  		v.reset(OpRISCV64SLL)
  2073  		v.AddArg2(x, y)
  2074  		return true
  2075  	}
  2076  	return false
  2077  }
  2078  func rewriteValueRISCV64_OpLsh32x16(v *Value) bool {
  2079  	v_1 := v.Args[1]
  2080  	v_0 := v.Args[0]
  2081  	b := v.Block
  2082  	typ := &b.Func.Config.Types
  2083  	// match: (Lsh32x16 <t> x y)
  2084  	// cond: !shiftIsBounded(v)
  2085  	// result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  2086  	for {
  2087  		t := v.Type
  2088  		x := v_0
  2089  		y := v_1
  2090  		if !(!shiftIsBounded(v)) {
  2091  			break
  2092  		}
  2093  		v.reset(OpRISCV64AND)
  2094  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2095  		v0.AddArg2(x, y)
  2096  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  2097  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2098  		v2.AuxInt = int64ToAuxInt(64)
  2099  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2100  		v3.AddArg(y)
  2101  		v2.AddArg(v3)
  2102  		v1.AddArg(v2)
  2103  		v.AddArg2(v0, v1)
  2104  		return true
  2105  	}
  2106  	// match: (Lsh32x16 x y)
  2107  	// cond: shiftIsBounded(v)
  2108  	// result: (SLL x y)
  2109  	for {
  2110  		x := v_0
  2111  		y := v_1
  2112  		if !(shiftIsBounded(v)) {
  2113  			break
  2114  		}
  2115  		v.reset(OpRISCV64SLL)
  2116  		v.AddArg2(x, y)
  2117  		return true
  2118  	}
  2119  	return false
  2120  }
  2121  func rewriteValueRISCV64_OpLsh32x32(v *Value) bool {
  2122  	v_1 := v.Args[1]
  2123  	v_0 := v.Args[0]
  2124  	b := v.Block
  2125  	typ := &b.Func.Config.Types
  2126  	// match: (Lsh32x32 <t> x y)
  2127  	// cond: !shiftIsBounded(v)
  2128  	// result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  2129  	for {
  2130  		t := v.Type
  2131  		x := v_0
  2132  		y := v_1
  2133  		if !(!shiftIsBounded(v)) {
  2134  			break
  2135  		}
  2136  		v.reset(OpRISCV64AND)
  2137  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2138  		v0.AddArg2(x, y)
  2139  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  2140  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2141  		v2.AuxInt = int64ToAuxInt(64)
  2142  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2143  		v3.AddArg(y)
  2144  		v2.AddArg(v3)
  2145  		v1.AddArg(v2)
  2146  		v.AddArg2(v0, v1)
  2147  		return true
  2148  	}
  2149  	// match: (Lsh32x32 x y)
  2150  	// cond: shiftIsBounded(v)
  2151  	// result: (SLL x y)
  2152  	for {
  2153  		x := v_0
  2154  		y := v_1
  2155  		if !(shiftIsBounded(v)) {
  2156  			break
  2157  		}
  2158  		v.reset(OpRISCV64SLL)
  2159  		v.AddArg2(x, y)
  2160  		return true
  2161  	}
  2162  	return false
  2163  }
  2164  func rewriteValueRISCV64_OpLsh32x64(v *Value) bool {
  2165  	v_1 := v.Args[1]
  2166  	v_0 := v.Args[0]
  2167  	b := v.Block
  2168  	// match: (Lsh32x64 <t> x y)
  2169  	// cond: !shiftIsBounded(v)
  2170  	// result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] y)))
  2171  	for {
  2172  		t := v.Type
  2173  		x := v_0
  2174  		y := v_1
  2175  		if !(!shiftIsBounded(v)) {
  2176  			break
  2177  		}
  2178  		v.reset(OpRISCV64AND)
  2179  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2180  		v0.AddArg2(x, y)
  2181  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  2182  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2183  		v2.AuxInt = int64ToAuxInt(64)
  2184  		v2.AddArg(y)
  2185  		v1.AddArg(v2)
  2186  		v.AddArg2(v0, v1)
  2187  		return true
  2188  	}
  2189  	// match: (Lsh32x64 x y)
  2190  	// cond: shiftIsBounded(v)
  2191  	// result: (SLL x y)
  2192  	for {
  2193  		x := v_0
  2194  		y := v_1
  2195  		if !(shiftIsBounded(v)) {
  2196  			break
  2197  		}
  2198  		v.reset(OpRISCV64SLL)
  2199  		v.AddArg2(x, y)
  2200  		return true
  2201  	}
  2202  	return false
  2203  }
  2204  func rewriteValueRISCV64_OpLsh32x8(v *Value) bool {
  2205  	v_1 := v.Args[1]
  2206  	v_0 := v.Args[0]
  2207  	b := v.Block
  2208  	typ := &b.Func.Config.Types
  2209  	// match: (Lsh32x8 <t> x y)
  2210  	// cond: !shiftIsBounded(v)
  2211  	// result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  2212  	for {
  2213  		t := v.Type
  2214  		x := v_0
  2215  		y := v_1
  2216  		if !(!shiftIsBounded(v)) {
  2217  			break
  2218  		}
  2219  		v.reset(OpRISCV64AND)
  2220  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2221  		v0.AddArg2(x, y)
  2222  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  2223  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2224  		v2.AuxInt = int64ToAuxInt(64)
  2225  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2226  		v3.AddArg(y)
  2227  		v2.AddArg(v3)
  2228  		v1.AddArg(v2)
  2229  		v.AddArg2(v0, v1)
  2230  		return true
  2231  	}
  2232  	// match: (Lsh32x8 x y)
  2233  	// cond: shiftIsBounded(v)
  2234  	// result: (SLL x y)
  2235  	for {
  2236  		x := v_0
  2237  		y := v_1
  2238  		if !(shiftIsBounded(v)) {
  2239  			break
  2240  		}
  2241  		v.reset(OpRISCV64SLL)
  2242  		v.AddArg2(x, y)
  2243  		return true
  2244  	}
  2245  	return false
  2246  }
  2247  func rewriteValueRISCV64_OpLsh64x16(v *Value) bool {
  2248  	v_1 := v.Args[1]
  2249  	v_0 := v.Args[0]
  2250  	b := v.Block
  2251  	typ := &b.Func.Config.Types
  2252  	// match: (Lsh64x16 <t> x y)
  2253  	// cond: !shiftIsBounded(v)
  2254  	// result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  2255  	for {
  2256  		t := v.Type
  2257  		x := v_0
  2258  		y := v_1
  2259  		if !(!shiftIsBounded(v)) {
  2260  			break
  2261  		}
  2262  		v.reset(OpRISCV64AND)
  2263  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2264  		v0.AddArg2(x, y)
  2265  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  2266  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2267  		v2.AuxInt = int64ToAuxInt(64)
  2268  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2269  		v3.AddArg(y)
  2270  		v2.AddArg(v3)
  2271  		v1.AddArg(v2)
  2272  		v.AddArg2(v0, v1)
  2273  		return true
  2274  	}
  2275  	// match: (Lsh64x16 x y)
  2276  	// cond: shiftIsBounded(v)
  2277  	// result: (SLL x y)
  2278  	for {
  2279  		x := v_0
  2280  		y := v_1
  2281  		if !(shiftIsBounded(v)) {
  2282  			break
  2283  		}
  2284  		v.reset(OpRISCV64SLL)
  2285  		v.AddArg2(x, y)
  2286  		return true
  2287  	}
  2288  	return false
  2289  }
  2290  func rewriteValueRISCV64_OpLsh64x32(v *Value) bool {
  2291  	v_1 := v.Args[1]
  2292  	v_0 := v.Args[0]
  2293  	b := v.Block
  2294  	typ := &b.Func.Config.Types
  2295  	// match: (Lsh64x32 <t> x y)
  2296  	// cond: !shiftIsBounded(v)
  2297  	// result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  2298  	for {
  2299  		t := v.Type
  2300  		x := v_0
  2301  		y := v_1
  2302  		if !(!shiftIsBounded(v)) {
  2303  			break
  2304  		}
  2305  		v.reset(OpRISCV64AND)
  2306  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2307  		v0.AddArg2(x, y)
  2308  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  2309  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2310  		v2.AuxInt = int64ToAuxInt(64)
  2311  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2312  		v3.AddArg(y)
  2313  		v2.AddArg(v3)
  2314  		v1.AddArg(v2)
  2315  		v.AddArg2(v0, v1)
  2316  		return true
  2317  	}
  2318  	// match: (Lsh64x32 x y)
  2319  	// cond: shiftIsBounded(v)
  2320  	// result: (SLL x y)
  2321  	for {
  2322  		x := v_0
  2323  		y := v_1
  2324  		if !(shiftIsBounded(v)) {
  2325  			break
  2326  		}
  2327  		v.reset(OpRISCV64SLL)
  2328  		v.AddArg2(x, y)
  2329  		return true
  2330  	}
  2331  	return false
  2332  }
  2333  func rewriteValueRISCV64_OpLsh64x64(v *Value) bool {
  2334  	v_1 := v.Args[1]
  2335  	v_0 := v.Args[0]
  2336  	b := v.Block
  2337  	// match: (Lsh64x64 <t> x y)
  2338  	// cond: !shiftIsBounded(v)
  2339  	// result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] y)))
  2340  	for {
  2341  		t := v.Type
  2342  		x := v_0
  2343  		y := v_1
  2344  		if !(!shiftIsBounded(v)) {
  2345  			break
  2346  		}
  2347  		v.reset(OpRISCV64AND)
  2348  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2349  		v0.AddArg2(x, y)
  2350  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  2351  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2352  		v2.AuxInt = int64ToAuxInt(64)
  2353  		v2.AddArg(y)
  2354  		v1.AddArg(v2)
  2355  		v.AddArg2(v0, v1)
  2356  		return true
  2357  	}
  2358  	// match: (Lsh64x64 x y)
  2359  	// cond: shiftIsBounded(v)
  2360  	// result: (SLL x y)
  2361  	for {
  2362  		x := v_0
  2363  		y := v_1
  2364  		if !(shiftIsBounded(v)) {
  2365  			break
  2366  		}
  2367  		v.reset(OpRISCV64SLL)
  2368  		v.AddArg2(x, y)
  2369  		return true
  2370  	}
  2371  	return false
  2372  }
  2373  func rewriteValueRISCV64_OpLsh64x8(v *Value) bool {
  2374  	v_1 := v.Args[1]
  2375  	v_0 := v.Args[0]
  2376  	b := v.Block
  2377  	typ := &b.Func.Config.Types
  2378  	// match: (Lsh64x8 <t> x y)
  2379  	// cond: !shiftIsBounded(v)
  2380  	// result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  2381  	for {
  2382  		t := v.Type
  2383  		x := v_0
  2384  		y := v_1
  2385  		if !(!shiftIsBounded(v)) {
  2386  			break
  2387  		}
  2388  		v.reset(OpRISCV64AND)
  2389  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2390  		v0.AddArg2(x, y)
  2391  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  2392  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2393  		v2.AuxInt = int64ToAuxInt(64)
  2394  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2395  		v3.AddArg(y)
  2396  		v2.AddArg(v3)
  2397  		v1.AddArg(v2)
  2398  		v.AddArg2(v0, v1)
  2399  		return true
  2400  	}
  2401  	// match: (Lsh64x8 x y)
  2402  	// cond: shiftIsBounded(v)
  2403  	// result: (SLL x y)
  2404  	for {
  2405  		x := v_0
  2406  		y := v_1
  2407  		if !(shiftIsBounded(v)) {
  2408  			break
  2409  		}
  2410  		v.reset(OpRISCV64SLL)
  2411  		v.AddArg2(x, y)
  2412  		return true
  2413  	}
  2414  	return false
  2415  }
  2416  func rewriteValueRISCV64_OpLsh8x16(v *Value) bool {
  2417  	v_1 := v.Args[1]
  2418  	v_0 := v.Args[0]
  2419  	b := v.Block
  2420  	typ := &b.Func.Config.Types
  2421  	// match: (Lsh8x16 <t> x y)
  2422  	// cond: !shiftIsBounded(v)
  2423  	// result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  2424  	for {
  2425  		t := v.Type
  2426  		x := v_0
  2427  		y := v_1
  2428  		if !(!shiftIsBounded(v)) {
  2429  			break
  2430  		}
  2431  		v.reset(OpRISCV64AND)
  2432  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2433  		v0.AddArg2(x, y)
  2434  		v1 := b.NewValue0(v.Pos, OpNeg8, t)
  2435  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2436  		v2.AuxInt = int64ToAuxInt(64)
  2437  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2438  		v3.AddArg(y)
  2439  		v2.AddArg(v3)
  2440  		v1.AddArg(v2)
  2441  		v.AddArg2(v0, v1)
  2442  		return true
  2443  	}
  2444  	// match: (Lsh8x16 x y)
  2445  	// cond: shiftIsBounded(v)
  2446  	// result: (SLL x y)
  2447  	for {
  2448  		x := v_0
  2449  		y := v_1
  2450  		if !(shiftIsBounded(v)) {
  2451  			break
  2452  		}
  2453  		v.reset(OpRISCV64SLL)
  2454  		v.AddArg2(x, y)
  2455  		return true
  2456  	}
  2457  	return false
  2458  }
  2459  func rewriteValueRISCV64_OpLsh8x32(v *Value) bool {
  2460  	v_1 := v.Args[1]
  2461  	v_0 := v.Args[0]
  2462  	b := v.Block
  2463  	typ := &b.Func.Config.Types
  2464  	// match: (Lsh8x32 <t> x y)
  2465  	// cond: !shiftIsBounded(v)
  2466  	// result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  2467  	for {
  2468  		t := v.Type
  2469  		x := v_0
  2470  		y := v_1
  2471  		if !(!shiftIsBounded(v)) {
  2472  			break
  2473  		}
  2474  		v.reset(OpRISCV64AND)
  2475  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2476  		v0.AddArg2(x, y)
  2477  		v1 := b.NewValue0(v.Pos, OpNeg8, t)
  2478  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2479  		v2.AuxInt = int64ToAuxInt(64)
  2480  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2481  		v3.AddArg(y)
  2482  		v2.AddArg(v3)
  2483  		v1.AddArg(v2)
  2484  		v.AddArg2(v0, v1)
  2485  		return true
  2486  	}
  2487  	// match: (Lsh8x32 x y)
  2488  	// cond: shiftIsBounded(v)
  2489  	// result: (SLL x y)
  2490  	for {
  2491  		x := v_0
  2492  		y := v_1
  2493  		if !(shiftIsBounded(v)) {
  2494  			break
  2495  		}
  2496  		v.reset(OpRISCV64SLL)
  2497  		v.AddArg2(x, y)
  2498  		return true
  2499  	}
  2500  	return false
  2501  }
  2502  func rewriteValueRISCV64_OpLsh8x64(v *Value) bool {
  2503  	v_1 := v.Args[1]
  2504  	v_0 := v.Args[0]
  2505  	b := v.Block
  2506  	// match: (Lsh8x64 <t> x y)
  2507  	// cond: !shiftIsBounded(v)
  2508  	// result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] y)))
  2509  	for {
  2510  		t := v.Type
  2511  		x := v_0
  2512  		y := v_1
  2513  		if !(!shiftIsBounded(v)) {
  2514  			break
  2515  		}
  2516  		v.reset(OpRISCV64AND)
  2517  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2518  		v0.AddArg2(x, y)
  2519  		v1 := b.NewValue0(v.Pos, OpNeg8, t)
  2520  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2521  		v2.AuxInt = int64ToAuxInt(64)
  2522  		v2.AddArg(y)
  2523  		v1.AddArg(v2)
  2524  		v.AddArg2(v0, v1)
  2525  		return true
  2526  	}
  2527  	// match: (Lsh8x64 x y)
  2528  	// cond: shiftIsBounded(v)
  2529  	// result: (SLL x y)
  2530  	for {
  2531  		x := v_0
  2532  		y := v_1
  2533  		if !(shiftIsBounded(v)) {
  2534  			break
  2535  		}
  2536  		v.reset(OpRISCV64SLL)
  2537  		v.AddArg2(x, y)
  2538  		return true
  2539  	}
  2540  	return false
  2541  }
  2542  func rewriteValueRISCV64_OpLsh8x8(v *Value) bool {
  2543  	v_1 := v.Args[1]
  2544  	v_0 := v.Args[0]
  2545  	b := v.Block
  2546  	typ := &b.Func.Config.Types
  2547  	// match: (Lsh8x8 <t> x y)
  2548  	// cond: !shiftIsBounded(v)
  2549  	// result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  2550  	for {
  2551  		t := v.Type
  2552  		x := v_0
  2553  		y := v_1
  2554  		if !(!shiftIsBounded(v)) {
  2555  			break
  2556  		}
  2557  		v.reset(OpRISCV64AND)
  2558  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2559  		v0.AddArg2(x, y)
  2560  		v1 := b.NewValue0(v.Pos, OpNeg8, t)
  2561  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2562  		v2.AuxInt = int64ToAuxInt(64)
  2563  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2564  		v3.AddArg(y)
  2565  		v2.AddArg(v3)
  2566  		v1.AddArg(v2)
  2567  		v.AddArg2(v0, v1)
  2568  		return true
  2569  	}
  2570  	// match: (Lsh8x8 x y)
  2571  	// cond: shiftIsBounded(v)
  2572  	// result: (SLL x y)
  2573  	for {
  2574  		x := v_0
  2575  		y := v_1
  2576  		if !(shiftIsBounded(v)) {
  2577  			break
  2578  		}
  2579  		v.reset(OpRISCV64SLL)
  2580  		v.AddArg2(x, y)
  2581  		return true
  2582  	}
  2583  	return false
  2584  }
  2585  func rewriteValueRISCV64_OpMax64(v *Value) bool {
  2586  	v_1 := v.Args[1]
  2587  	v_0 := v.Args[0]
  2588  	// match: (Max64 x y)
  2589  	// cond: buildcfg.GORISCV64 >= 22
  2590  	// result: (MAX x y)
  2591  	for {
  2592  		x := v_0
  2593  		y := v_1
  2594  		if !(buildcfg.GORISCV64 >= 22) {
  2595  			break
  2596  		}
  2597  		v.reset(OpRISCV64MAX)
  2598  		v.AddArg2(x, y)
  2599  		return true
  2600  	}
  2601  	return false
  2602  }
  2603  func rewriteValueRISCV64_OpMax64u(v *Value) bool {
  2604  	v_1 := v.Args[1]
  2605  	v_0 := v.Args[0]
  2606  	// match: (Max64u x y)
  2607  	// cond: buildcfg.GORISCV64 >= 22
  2608  	// result: (MAXU x y)
  2609  	for {
  2610  		x := v_0
  2611  		y := v_1
  2612  		if !(buildcfg.GORISCV64 >= 22) {
  2613  			break
  2614  		}
  2615  		v.reset(OpRISCV64MAXU)
  2616  		v.AddArg2(x, y)
  2617  		return true
  2618  	}
  2619  	return false
  2620  }
  2621  func rewriteValueRISCV64_OpMin64(v *Value) bool {
  2622  	v_1 := v.Args[1]
  2623  	v_0 := v.Args[0]
  2624  	// match: (Min64 x y)
  2625  	// cond: buildcfg.GORISCV64 >= 22
  2626  	// result: (MIN x y)
  2627  	for {
  2628  		x := v_0
  2629  		y := v_1
  2630  		if !(buildcfg.GORISCV64 >= 22) {
  2631  			break
  2632  		}
  2633  		v.reset(OpRISCV64MIN)
  2634  		v.AddArg2(x, y)
  2635  		return true
  2636  	}
  2637  	return false
  2638  }
  2639  func rewriteValueRISCV64_OpMin64u(v *Value) bool {
  2640  	v_1 := v.Args[1]
  2641  	v_0 := v.Args[0]
  2642  	// match: (Min64u x y)
  2643  	// cond: buildcfg.GORISCV64 >= 22
  2644  	// result: (MINU x y)
  2645  	for {
  2646  		x := v_0
  2647  		y := v_1
  2648  		if !(buildcfg.GORISCV64 >= 22) {
  2649  			break
  2650  		}
  2651  		v.reset(OpRISCV64MINU)
  2652  		v.AddArg2(x, y)
  2653  		return true
  2654  	}
  2655  	return false
  2656  }
  2657  func rewriteValueRISCV64_OpMod16(v *Value) bool {
  2658  	v_1 := v.Args[1]
  2659  	v_0 := v.Args[0]
  2660  	b := v.Block
  2661  	typ := &b.Func.Config.Types
  2662  	// match: (Mod16 x y [false])
  2663  	// result: (REMW (SignExt16to32 x) (SignExt16to32 y))
  2664  	for {
  2665  		if auxIntToBool(v.AuxInt) != false {
  2666  			break
  2667  		}
  2668  		x := v_0
  2669  		y := v_1
  2670  		v.reset(OpRISCV64REMW)
  2671  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  2672  		v0.AddArg(x)
  2673  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  2674  		v1.AddArg(y)
  2675  		v.AddArg2(v0, v1)
  2676  		return true
  2677  	}
  2678  	return false
  2679  }
  2680  func rewriteValueRISCV64_OpMod16u(v *Value) bool {
  2681  	v_1 := v.Args[1]
  2682  	v_0 := v.Args[0]
  2683  	b := v.Block
  2684  	typ := &b.Func.Config.Types
  2685  	// match: (Mod16u x y)
  2686  	// result: (REMUW (ZeroExt16to32 x) (ZeroExt16to32 y))
  2687  	for {
  2688  		x := v_0
  2689  		y := v_1
  2690  		v.reset(OpRISCV64REMUW)
  2691  		v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2692  		v0.AddArg(x)
  2693  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2694  		v1.AddArg(y)
  2695  		v.AddArg2(v0, v1)
  2696  		return true
  2697  	}
  2698  }
  2699  func rewriteValueRISCV64_OpMod32(v *Value) bool {
  2700  	v_1 := v.Args[1]
  2701  	v_0 := v.Args[0]
  2702  	// match: (Mod32 x y [false])
  2703  	// result: (REMW x y)
  2704  	for {
  2705  		if auxIntToBool(v.AuxInt) != false {
  2706  			break
  2707  		}
  2708  		x := v_0
  2709  		y := v_1
  2710  		v.reset(OpRISCV64REMW)
  2711  		v.AddArg2(x, y)
  2712  		return true
  2713  	}
  2714  	return false
  2715  }
  2716  func rewriteValueRISCV64_OpMod64(v *Value) bool {
  2717  	v_1 := v.Args[1]
  2718  	v_0 := v.Args[0]
  2719  	// match: (Mod64 x y [false])
  2720  	// result: (REM x y)
  2721  	for {
  2722  		if auxIntToBool(v.AuxInt) != false {
  2723  			break
  2724  		}
  2725  		x := v_0
  2726  		y := v_1
  2727  		v.reset(OpRISCV64REM)
  2728  		v.AddArg2(x, y)
  2729  		return true
  2730  	}
  2731  	return false
  2732  }
  2733  func rewriteValueRISCV64_OpMod8(v *Value) bool {
  2734  	v_1 := v.Args[1]
  2735  	v_0 := v.Args[0]
  2736  	b := v.Block
  2737  	typ := &b.Func.Config.Types
  2738  	// match: (Mod8 x y)
  2739  	// result: (REMW (SignExt8to32 x) (SignExt8to32 y))
  2740  	for {
  2741  		x := v_0
  2742  		y := v_1
  2743  		v.reset(OpRISCV64REMW)
  2744  		v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  2745  		v0.AddArg(x)
  2746  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  2747  		v1.AddArg(y)
  2748  		v.AddArg2(v0, v1)
  2749  		return true
  2750  	}
  2751  }
  2752  func rewriteValueRISCV64_OpMod8u(v *Value) bool {
  2753  	v_1 := v.Args[1]
  2754  	v_0 := v.Args[0]
  2755  	b := v.Block
  2756  	typ := &b.Func.Config.Types
  2757  	// match: (Mod8u x y)
  2758  	// result: (REMUW (ZeroExt8to32 x) (ZeroExt8to32 y))
  2759  	for {
  2760  		x := v_0
  2761  		y := v_1
  2762  		v.reset(OpRISCV64REMUW)
  2763  		v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2764  		v0.AddArg(x)
  2765  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2766  		v1.AddArg(y)
  2767  		v.AddArg2(v0, v1)
  2768  		return true
  2769  	}
  2770  }
  2771  func rewriteValueRISCV64_OpMove(v *Value) bool {
  2772  	v_2 := v.Args[2]
  2773  	v_1 := v.Args[1]
  2774  	v_0 := v.Args[0]
  2775  	b := v.Block
  2776  	config := b.Func.Config
  2777  	typ := &b.Func.Config.Types
  2778  	// match: (Move [0] _ _ mem)
  2779  	// result: mem
  2780  	for {
  2781  		if auxIntToInt64(v.AuxInt) != 0 {
  2782  			break
  2783  		}
  2784  		mem := v_2
  2785  		v.copyOf(mem)
  2786  		return true
  2787  	}
  2788  	// match: (Move [1] dst src mem)
  2789  	// result: (MOVBstore dst (MOVBload src mem) mem)
  2790  	for {
  2791  		if auxIntToInt64(v.AuxInt) != 1 {
  2792  			break
  2793  		}
  2794  		dst := v_0
  2795  		src := v_1
  2796  		mem := v_2
  2797  		v.reset(OpRISCV64MOVBstore)
  2798  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2799  		v0.AddArg2(src, mem)
  2800  		v.AddArg3(dst, v0, mem)
  2801  		return true
  2802  	}
  2803  	// match: (Move [2] {t} dst src mem)
  2804  	// cond: t.Alignment()%2 == 0
  2805  	// result: (MOVHstore dst (MOVHload src mem) mem)
  2806  	for {
  2807  		if auxIntToInt64(v.AuxInt) != 2 {
  2808  			break
  2809  		}
  2810  		t := auxToType(v.Aux)
  2811  		dst := v_0
  2812  		src := v_1
  2813  		mem := v_2
  2814  		if !(t.Alignment()%2 == 0) {
  2815  			break
  2816  		}
  2817  		v.reset(OpRISCV64MOVHstore)
  2818  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2819  		v0.AddArg2(src, mem)
  2820  		v.AddArg3(dst, v0, mem)
  2821  		return true
  2822  	}
  2823  	// match: (Move [2] dst src mem)
  2824  	// result: (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))
  2825  	for {
  2826  		if auxIntToInt64(v.AuxInt) != 2 {
  2827  			break
  2828  		}
  2829  		dst := v_0
  2830  		src := v_1
  2831  		mem := v_2
  2832  		v.reset(OpRISCV64MOVBstore)
  2833  		v.AuxInt = int32ToAuxInt(1)
  2834  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2835  		v0.AuxInt = int32ToAuxInt(1)
  2836  		v0.AddArg2(src, mem)
  2837  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  2838  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2839  		v2.AddArg2(src, mem)
  2840  		v1.AddArg3(dst, v2, mem)
  2841  		v.AddArg3(dst, v0, v1)
  2842  		return true
  2843  	}
  2844  	// match: (Move [4] {t} dst src mem)
  2845  	// cond: t.Alignment()%4 == 0
  2846  	// result: (MOVWstore dst (MOVWload src mem) mem)
  2847  	for {
  2848  		if auxIntToInt64(v.AuxInt) != 4 {
  2849  			break
  2850  		}
  2851  		t := auxToType(v.Aux)
  2852  		dst := v_0
  2853  		src := v_1
  2854  		mem := v_2
  2855  		if !(t.Alignment()%4 == 0) {
  2856  			break
  2857  		}
  2858  		v.reset(OpRISCV64MOVWstore)
  2859  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
  2860  		v0.AddArg2(src, mem)
  2861  		v.AddArg3(dst, v0, mem)
  2862  		return true
  2863  	}
  2864  	// match: (Move [4] {t} dst src mem)
  2865  	// cond: t.Alignment()%2 == 0
  2866  	// result: (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))
  2867  	for {
  2868  		if auxIntToInt64(v.AuxInt) != 4 {
  2869  			break
  2870  		}
  2871  		t := auxToType(v.Aux)
  2872  		dst := v_0
  2873  		src := v_1
  2874  		mem := v_2
  2875  		if !(t.Alignment()%2 == 0) {
  2876  			break
  2877  		}
  2878  		v.reset(OpRISCV64MOVHstore)
  2879  		v.AuxInt = int32ToAuxInt(2)
  2880  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2881  		v0.AuxInt = int32ToAuxInt(2)
  2882  		v0.AddArg2(src, mem)
  2883  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  2884  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2885  		v2.AddArg2(src, mem)
  2886  		v1.AddArg3(dst, v2, mem)
  2887  		v.AddArg3(dst, v0, v1)
  2888  		return true
  2889  	}
  2890  	// match: (Move [4] dst src mem)
  2891  	// result: (MOVBstore [3] dst (MOVBload [3] src mem) (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))))
  2892  	for {
  2893  		if auxIntToInt64(v.AuxInt) != 4 {
  2894  			break
  2895  		}
  2896  		dst := v_0
  2897  		src := v_1
  2898  		mem := v_2
  2899  		v.reset(OpRISCV64MOVBstore)
  2900  		v.AuxInt = int32ToAuxInt(3)
  2901  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2902  		v0.AuxInt = int32ToAuxInt(3)
  2903  		v0.AddArg2(src, mem)
  2904  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  2905  		v1.AuxInt = int32ToAuxInt(2)
  2906  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2907  		v2.AuxInt = int32ToAuxInt(2)
  2908  		v2.AddArg2(src, mem)
  2909  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  2910  		v3.AuxInt = int32ToAuxInt(1)
  2911  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2912  		v4.AuxInt = int32ToAuxInt(1)
  2913  		v4.AddArg2(src, mem)
  2914  		v5 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  2915  		v6 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2916  		v6.AddArg2(src, mem)
  2917  		v5.AddArg3(dst, v6, mem)
  2918  		v3.AddArg3(dst, v4, v5)
  2919  		v1.AddArg3(dst, v2, v3)
  2920  		v.AddArg3(dst, v0, v1)
  2921  		return true
  2922  	}
  2923  	// match: (Move [8] {t} dst src mem)
  2924  	// cond: t.Alignment()%8 == 0
  2925  	// result: (MOVDstore dst (MOVDload src mem) mem)
  2926  	for {
  2927  		if auxIntToInt64(v.AuxInt) != 8 {
  2928  			break
  2929  		}
  2930  		t := auxToType(v.Aux)
  2931  		dst := v_0
  2932  		src := v_1
  2933  		mem := v_2
  2934  		if !(t.Alignment()%8 == 0) {
  2935  			break
  2936  		}
  2937  		v.reset(OpRISCV64MOVDstore)
  2938  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  2939  		v0.AddArg2(src, mem)
  2940  		v.AddArg3(dst, v0, mem)
  2941  		return true
  2942  	}
  2943  	// match: (Move [8] {t} dst src mem)
  2944  	// cond: t.Alignment()%4 == 0
  2945  	// result: (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem))
  2946  	for {
  2947  		if auxIntToInt64(v.AuxInt) != 8 {
  2948  			break
  2949  		}
  2950  		t := auxToType(v.Aux)
  2951  		dst := v_0
  2952  		src := v_1
  2953  		mem := v_2
  2954  		if !(t.Alignment()%4 == 0) {
  2955  			break
  2956  		}
  2957  		v.reset(OpRISCV64MOVWstore)
  2958  		v.AuxInt = int32ToAuxInt(4)
  2959  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
  2960  		v0.AuxInt = int32ToAuxInt(4)
  2961  		v0.AddArg2(src, mem)
  2962  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
  2963  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
  2964  		v2.AddArg2(src, mem)
  2965  		v1.AddArg3(dst, v2, mem)
  2966  		v.AddArg3(dst, v0, v1)
  2967  		return true
  2968  	}
  2969  	// match: (Move [8] {t} dst src mem)
  2970  	// cond: t.Alignment()%2 == 0
  2971  	// result: (MOVHstore [6] dst (MOVHload [6] src mem) (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))))
  2972  	for {
  2973  		if auxIntToInt64(v.AuxInt) != 8 {
  2974  			break
  2975  		}
  2976  		t := auxToType(v.Aux)
  2977  		dst := v_0
  2978  		src := v_1
  2979  		mem := v_2
  2980  		if !(t.Alignment()%2 == 0) {
  2981  			break
  2982  		}
  2983  		v.reset(OpRISCV64MOVHstore)
  2984  		v.AuxInt = int32ToAuxInt(6)
  2985  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2986  		v0.AuxInt = int32ToAuxInt(6)
  2987  		v0.AddArg2(src, mem)
  2988  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  2989  		v1.AuxInt = int32ToAuxInt(4)
  2990  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2991  		v2.AuxInt = int32ToAuxInt(4)
  2992  		v2.AddArg2(src, mem)
  2993  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  2994  		v3.AuxInt = int32ToAuxInt(2)
  2995  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2996  		v4.AuxInt = int32ToAuxInt(2)
  2997  		v4.AddArg2(src, mem)
  2998  		v5 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  2999  		v6 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  3000  		v6.AddArg2(src, mem)
  3001  		v5.AddArg3(dst, v6, mem)
  3002  		v3.AddArg3(dst, v4, v5)
  3003  		v1.AddArg3(dst, v2, v3)
  3004  		v.AddArg3(dst, v0, v1)
  3005  		return true
  3006  	}
  3007  	// match: (Move [3] dst src mem)
  3008  	// result: (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem)))
  3009  	for {
  3010  		if auxIntToInt64(v.AuxInt) != 3 {
  3011  			break
  3012  		}
  3013  		dst := v_0
  3014  		src := v_1
  3015  		mem := v_2
  3016  		v.reset(OpRISCV64MOVBstore)
  3017  		v.AuxInt = int32ToAuxInt(2)
  3018  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  3019  		v0.AuxInt = int32ToAuxInt(2)
  3020  		v0.AddArg2(src, mem)
  3021  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  3022  		v1.AuxInt = int32ToAuxInt(1)
  3023  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  3024  		v2.AuxInt = int32ToAuxInt(1)
  3025  		v2.AddArg2(src, mem)
  3026  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  3027  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  3028  		v4.AddArg2(src, mem)
  3029  		v3.AddArg3(dst, v4, mem)
  3030  		v1.AddArg3(dst, v2, v3)
  3031  		v.AddArg3(dst, v0, v1)
  3032  		return true
  3033  	}
  3034  	// match: (Move [6] {t} dst src mem)
  3035  	// cond: t.Alignment()%2 == 0
  3036  	// result: (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem)))
  3037  	for {
  3038  		if auxIntToInt64(v.AuxInt) != 6 {
  3039  			break
  3040  		}
  3041  		t := auxToType(v.Aux)
  3042  		dst := v_0
  3043  		src := v_1
  3044  		mem := v_2
  3045  		if !(t.Alignment()%2 == 0) {
  3046  			break
  3047  		}
  3048  		v.reset(OpRISCV64MOVHstore)
  3049  		v.AuxInt = int32ToAuxInt(4)
  3050  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  3051  		v0.AuxInt = int32ToAuxInt(4)
  3052  		v0.AddArg2(src, mem)
  3053  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  3054  		v1.AuxInt = int32ToAuxInt(2)
  3055  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  3056  		v2.AuxInt = int32ToAuxInt(2)
  3057  		v2.AddArg2(src, mem)
  3058  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  3059  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  3060  		v4.AddArg2(src, mem)
  3061  		v3.AddArg3(dst, v4, mem)
  3062  		v1.AddArg3(dst, v2, v3)
  3063  		v.AddArg3(dst, v0, v1)
  3064  		return true
  3065  	}
  3066  	// match: (Move [12] {t} dst src mem)
  3067  	// cond: t.Alignment()%4 == 0
  3068  	// result: (MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem)))
  3069  	for {
  3070  		if auxIntToInt64(v.AuxInt) != 12 {
  3071  			break
  3072  		}
  3073  		t := auxToType(v.Aux)
  3074  		dst := v_0
  3075  		src := v_1
  3076  		mem := v_2
  3077  		if !(t.Alignment()%4 == 0) {
  3078  			break
  3079  		}
  3080  		v.reset(OpRISCV64MOVWstore)
  3081  		v.AuxInt = int32ToAuxInt(8)
  3082  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
  3083  		v0.AuxInt = int32ToAuxInt(8)
  3084  		v0.AddArg2(src, mem)
  3085  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
  3086  		v1.AuxInt = int32ToAuxInt(4)
  3087  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
  3088  		v2.AuxInt = int32ToAuxInt(4)
  3089  		v2.AddArg2(src, mem)
  3090  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
  3091  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
  3092  		v4.AddArg2(src, mem)
  3093  		v3.AddArg3(dst, v4, mem)
  3094  		v1.AddArg3(dst, v2, v3)
  3095  		v.AddArg3(dst, v0, v1)
  3096  		return true
  3097  	}
  3098  	// match: (Move [16] {t} dst src mem)
  3099  	// cond: t.Alignment()%8 == 0
  3100  	// result: (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem))
  3101  	for {
  3102  		if auxIntToInt64(v.AuxInt) != 16 {
  3103  			break
  3104  		}
  3105  		t := auxToType(v.Aux)
  3106  		dst := v_0
  3107  		src := v_1
  3108  		mem := v_2
  3109  		if !(t.Alignment()%8 == 0) {
  3110  			break
  3111  		}
  3112  		v.reset(OpRISCV64MOVDstore)
  3113  		v.AuxInt = int32ToAuxInt(8)
  3114  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  3115  		v0.AuxInt = int32ToAuxInt(8)
  3116  		v0.AddArg2(src, mem)
  3117  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  3118  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  3119  		v2.AddArg2(src, mem)
  3120  		v1.AddArg3(dst, v2, mem)
  3121  		v.AddArg3(dst, v0, v1)
  3122  		return true
  3123  	}
  3124  	// match: (Move [24] {t} dst src mem)
  3125  	// cond: t.Alignment()%8 == 0
  3126  	// result: (MOVDstore [16] dst (MOVDload [16] src mem) (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem)))
  3127  	for {
  3128  		if auxIntToInt64(v.AuxInt) != 24 {
  3129  			break
  3130  		}
  3131  		t := auxToType(v.Aux)
  3132  		dst := v_0
  3133  		src := v_1
  3134  		mem := v_2
  3135  		if !(t.Alignment()%8 == 0) {
  3136  			break
  3137  		}
  3138  		v.reset(OpRISCV64MOVDstore)
  3139  		v.AuxInt = int32ToAuxInt(16)
  3140  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  3141  		v0.AuxInt = int32ToAuxInt(16)
  3142  		v0.AddArg2(src, mem)
  3143  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  3144  		v1.AuxInt = int32ToAuxInt(8)
  3145  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  3146  		v2.AuxInt = int32ToAuxInt(8)
  3147  		v2.AddArg2(src, mem)
  3148  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  3149  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  3150  		v4.AddArg2(src, mem)
  3151  		v3.AddArg3(dst, v4, mem)
  3152  		v1.AddArg3(dst, v2, v3)
  3153  		v.AddArg3(dst, v0, v1)
  3154  		return true
  3155  	}
  3156  	// match: (Move [32] {t} dst src mem)
  3157  	// cond: t.Alignment()%8 == 0
  3158  	// result: (MOVDstore [24] dst (MOVDload [24] src mem) (MOVDstore [16] dst (MOVDload [16] src mem) (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem))))
  3159  	for {
  3160  		if auxIntToInt64(v.AuxInt) != 32 {
  3161  			break
  3162  		}
  3163  		t := auxToType(v.Aux)
  3164  		dst := v_0
  3165  		src := v_1
  3166  		mem := v_2
  3167  		if !(t.Alignment()%8 == 0) {
  3168  			break
  3169  		}
  3170  		v.reset(OpRISCV64MOVDstore)
  3171  		v.AuxInt = int32ToAuxInt(24)
  3172  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  3173  		v0.AuxInt = int32ToAuxInt(24)
  3174  		v0.AddArg2(src, mem)
  3175  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  3176  		v1.AuxInt = int32ToAuxInt(16)
  3177  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  3178  		v2.AuxInt = int32ToAuxInt(16)
  3179  		v2.AddArg2(src, mem)
  3180  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  3181  		v3.AuxInt = int32ToAuxInt(8)
  3182  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  3183  		v4.AuxInt = int32ToAuxInt(8)
  3184  		v4.AddArg2(src, mem)
  3185  		v5 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  3186  		v6 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  3187  		v6.AddArg2(src, mem)
  3188  		v5.AddArg3(dst, v6, mem)
  3189  		v3.AddArg3(dst, v4, v5)
  3190  		v1.AddArg3(dst, v2, v3)
  3191  		v.AddArg3(dst, v0, v1)
  3192  		return true
  3193  	}
  3194  	// match: (Move [s] {t} dst src mem)
  3195  	// cond: s%8 == 0 && s <= 8*128 && t.Alignment()%8 == 0 && logLargeCopy(v, s)
  3196  	// result: (DUFFCOPY [16 * (128 - s/8)] dst src mem)
  3197  	for {
  3198  		s := auxIntToInt64(v.AuxInt)
  3199  		t := auxToType(v.Aux)
  3200  		dst := v_0
  3201  		src := v_1
  3202  		mem := v_2
  3203  		if !(s%8 == 0 && s <= 8*128 && t.Alignment()%8 == 0 && logLargeCopy(v, s)) {
  3204  			break
  3205  		}
  3206  		v.reset(OpRISCV64DUFFCOPY)
  3207  		v.AuxInt = int64ToAuxInt(16 * (128 - s/8))
  3208  		v.AddArg3(dst, src, mem)
  3209  		return true
  3210  	}
  3211  	// match: (Move [s] {t} dst src mem)
  3212  	// cond: (s <= 16 || logLargeCopy(v, s))
  3213  	// result: (LoweredMove [t.Alignment()] dst src (ADDI <src.Type> [s-moveSize(t.Alignment(), config)] src) mem)
  3214  	for {
  3215  		s := auxIntToInt64(v.AuxInt)
  3216  		t := auxToType(v.Aux)
  3217  		dst := v_0
  3218  		src := v_1
  3219  		mem := v_2
  3220  		if !(s <= 16 || logLargeCopy(v, s)) {
  3221  			break
  3222  		}
  3223  		v.reset(OpRISCV64LoweredMove)
  3224  		v.AuxInt = int64ToAuxInt(t.Alignment())
  3225  		v0 := b.NewValue0(v.Pos, OpRISCV64ADDI, src.Type)
  3226  		v0.AuxInt = int64ToAuxInt(s - moveSize(t.Alignment(), config))
  3227  		v0.AddArg(src)
  3228  		v.AddArg4(dst, src, v0, mem)
  3229  		return true
  3230  	}
  3231  	return false
  3232  }
  3233  func rewriteValueRISCV64_OpMul16(v *Value) bool {
  3234  	v_1 := v.Args[1]
  3235  	v_0 := v.Args[0]
  3236  	b := v.Block
  3237  	typ := &b.Func.Config.Types
  3238  	// match: (Mul16 x y)
  3239  	// result: (MULW (SignExt16to32 x) (SignExt16to32 y))
  3240  	for {
  3241  		x := v_0
  3242  		y := v_1
  3243  		v.reset(OpRISCV64MULW)
  3244  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  3245  		v0.AddArg(x)
  3246  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  3247  		v1.AddArg(y)
  3248  		v.AddArg2(v0, v1)
  3249  		return true
  3250  	}
  3251  }
  3252  func rewriteValueRISCV64_OpMul8(v *Value) bool {
  3253  	v_1 := v.Args[1]
  3254  	v_0 := v.Args[0]
  3255  	b := v.Block
  3256  	typ := &b.Func.Config.Types
  3257  	// match: (Mul8 x y)
  3258  	// result: (MULW (SignExt8to32 x) (SignExt8to32 y))
  3259  	for {
  3260  		x := v_0
  3261  		y := v_1
  3262  		v.reset(OpRISCV64MULW)
  3263  		v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  3264  		v0.AddArg(x)
  3265  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  3266  		v1.AddArg(y)
  3267  		v.AddArg2(v0, v1)
  3268  		return true
  3269  	}
  3270  }
  3271  func rewriteValueRISCV64_OpNeq16(v *Value) bool {
  3272  	v_1 := v.Args[1]
  3273  	v_0 := v.Args[0]
  3274  	b := v.Block
  3275  	typ := &b.Func.Config.Types
  3276  	// match: (Neq16 x y)
  3277  	// result: (Not (Eq16 x y))
  3278  	for {
  3279  		x := v_0
  3280  		y := v_1
  3281  		v.reset(OpNot)
  3282  		v0 := b.NewValue0(v.Pos, OpEq16, typ.Bool)
  3283  		v0.AddArg2(x, y)
  3284  		v.AddArg(v0)
  3285  		return true
  3286  	}
  3287  }
  3288  func rewriteValueRISCV64_OpNeq32(v *Value) bool {
  3289  	v_1 := v.Args[1]
  3290  	v_0 := v.Args[0]
  3291  	b := v.Block
  3292  	typ := &b.Func.Config.Types
  3293  	// match: (Neq32 x y)
  3294  	// result: (Not (Eq32 x y))
  3295  	for {
  3296  		x := v_0
  3297  		y := v_1
  3298  		v.reset(OpNot)
  3299  		v0 := b.NewValue0(v.Pos, OpEq32, typ.Bool)
  3300  		v0.AddArg2(x, y)
  3301  		v.AddArg(v0)
  3302  		return true
  3303  	}
  3304  }
  3305  func rewriteValueRISCV64_OpNeq64(v *Value) bool {
  3306  	v_1 := v.Args[1]
  3307  	v_0 := v.Args[0]
  3308  	b := v.Block
  3309  	typ := &b.Func.Config.Types
  3310  	// match: (Neq64 x y)
  3311  	// result: (Not (Eq64 x y))
  3312  	for {
  3313  		x := v_0
  3314  		y := v_1
  3315  		v.reset(OpNot)
  3316  		v0 := b.NewValue0(v.Pos, OpEq64, typ.Bool)
  3317  		v0.AddArg2(x, y)
  3318  		v.AddArg(v0)
  3319  		return true
  3320  	}
  3321  }
  3322  func rewriteValueRISCV64_OpNeq8(v *Value) bool {
  3323  	v_1 := v.Args[1]
  3324  	v_0 := v.Args[0]
  3325  	b := v.Block
  3326  	typ := &b.Func.Config.Types
  3327  	// match: (Neq8 x y)
  3328  	// result: (Not (Eq8 x y))
  3329  	for {
  3330  		x := v_0
  3331  		y := v_1
  3332  		v.reset(OpNot)
  3333  		v0 := b.NewValue0(v.Pos, OpEq8, typ.Bool)
  3334  		v0.AddArg2(x, y)
  3335  		v.AddArg(v0)
  3336  		return true
  3337  	}
  3338  }
  3339  func rewriteValueRISCV64_OpNeqB(v *Value) bool {
  3340  	v_1 := v.Args[1]
  3341  	v_0 := v.Args[0]
  3342  	b := v.Block
  3343  	typ := &b.Func.Config.Types
  3344  	// match: (NeqB x y)
  3345  	// result: (SNEZ (SUB <typ.Bool> x y))
  3346  	for {
  3347  		x := v_0
  3348  		y := v_1
  3349  		v.reset(OpRISCV64SNEZ)
  3350  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.Bool)
  3351  		v0.AddArg2(x, y)
  3352  		v.AddArg(v0)
  3353  		return true
  3354  	}
  3355  }
  3356  func rewriteValueRISCV64_OpNeqPtr(v *Value) bool {
  3357  	v_1 := v.Args[1]
  3358  	v_0 := v.Args[0]
  3359  	b := v.Block
  3360  	typ := &b.Func.Config.Types
  3361  	// match: (NeqPtr x y)
  3362  	// result: (Not (EqPtr x y))
  3363  	for {
  3364  		x := v_0
  3365  		y := v_1
  3366  		v.reset(OpNot)
  3367  		v0 := b.NewValue0(v.Pos, OpEqPtr, typ.Bool)
  3368  		v0.AddArg2(x, y)
  3369  		v.AddArg(v0)
  3370  		return true
  3371  	}
  3372  }
  3373  func rewriteValueRISCV64_OpOffPtr(v *Value) bool {
  3374  	v_0 := v.Args[0]
  3375  	b := v.Block
  3376  	typ := &b.Func.Config.Types
  3377  	// match: (OffPtr [off] ptr:(SP))
  3378  	// cond: is32Bit(off)
  3379  	// result: (MOVaddr [int32(off)] ptr)
  3380  	for {
  3381  		off := auxIntToInt64(v.AuxInt)
  3382  		ptr := v_0
  3383  		if ptr.Op != OpSP || !(is32Bit(off)) {
  3384  			break
  3385  		}
  3386  		v.reset(OpRISCV64MOVaddr)
  3387  		v.AuxInt = int32ToAuxInt(int32(off))
  3388  		v.AddArg(ptr)
  3389  		return true
  3390  	}
  3391  	// match: (OffPtr [off] ptr)
  3392  	// cond: is32Bit(off)
  3393  	// result: (ADDI [off] ptr)
  3394  	for {
  3395  		off := auxIntToInt64(v.AuxInt)
  3396  		ptr := v_0
  3397  		if !(is32Bit(off)) {
  3398  			break
  3399  		}
  3400  		v.reset(OpRISCV64ADDI)
  3401  		v.AuxInt = int64ToAuxInt(off)
  3402  		v.AddArg(ptr)
  3403  		return true
  3404  	}
  3405  	// match: (OffPtr [off] ptr)
  3406  	// result: (ADD (MOVDconst [off]) ptr)
  3407  	for {
  3408  		off := auxIntToInt64(v.AuxInt)
  3409  		ptr := v_0
  3410  		v.reset(OpRISCV64ADD)
  3411  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  3412  		v0.AuxInt = int64ToAuxInt(off)
  3413  		v.AddArg2(v0, ptr)
  3414  		return true
  3415  	}
  3416  }
  3417  func rewriteValueRISCV64_OpPanicBounds(v *Value) bool {
  3418  	v_2 := v.Args[2]
  3419  	v_1 := v.Args[1]
  3420  	v_0 := v.Args[0]
  3421  	// match: (PanicBounds [kind] x y mem)
  3422  	// cond: boundsABI(kind) == 0
  3423  	// result: (LoweredPanicBoundsA [kind] x y mem)
  3424  	for {
  3425  		kind := auxIntToInt64(v.AuxInt)
  3426  		x := v_0
  3427  		y := v_1
  3428  		mem := v_2
  3429  		if !(boundsABI(kind) == 0) {
  3430  			break
  3431  		}
  3432  		v.reset(OpRISCV64LoweredPanicBoundsA)
  3433  		v.AuxInt = int64ToAuxInt(kind)
  3434  		v.AddArg3(x, y, mem)
  3435  		return true
  3436  	}
  3437  	// match: (PanicBounds [kind] x y mem)
  3438  	// cond: boundsABI(kind) == 1
  3439  	// result: (LoweredPanicBoundsB [kind] x y mem)
  3440  	for {
  3441  		kind := auxIntToInt64(v.AuxInt)
  3442  		x := v_0
  3443  		y := v_1
  3444  		mem := v_2
  3445  		if !(boundsABI(kind) == 1) {
  3446  			break
  3447  		}
  3448  		v.reset(OpRISCV64LoweredPanicBoundsB)
  3449  		v.AuxInt = int64ToAuxInt(kind)
  3450  		v.AddArg3(x, y, mem)
  3451  		return true
  3452  	}
  3453  	// match: (PanicBounds [kind] x y mem)
  3454  	// cond: boundsABI(kind) == 2
  3455  	// result: (LoweredPanicBoundsC [kind] x y mem)
  3456  	for {
  3457  		kind := auxIntToInt64(v.AuxInt)
  3458  		x := v_0
  3459  		y := v_1
  3460  		mem := v_2
  3461  		if !(boundsABI(kind) == 2) {
  3462  			break
  3463  		}
  3464  		v.reset(OpRISCV64LoweredPanicBoundsC)
  3465  		v.AuxInt = int64ToAuxInt(kind)
  3466  		v.AddArg3(x, y, mem)
  3467  		return true
  3468  	}
  3469  	return false
  3470  }
  3471  func rewriteValueRISCV64_OpPopCount16(v *Value) bool {
  3472  	v_0 := v.Args[0]
  3473  	b := v.Block
  3474  	typ := &b.Func.Config.Types
  3475  	// match: (PopCount16 x)
  3476  	// result: (CPOP (ZeroExt16to64 x))
  3477  	for {
  3478  		x := v_0
  3479  		v.reset(OpRISCV64CPOP)
  3480  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  3481  		v0.AddArg(x)
  3482  		v.AddArg(v0)
  3483  		return true
  3484  	}
  3485  }
  3486  func rewriteValueRISCV64_OpPopCount8(v *Value) bool {
  3487  	v_0 := v.Args[0]
  3488  	b := v.Block
  3489  	typ := &b.Func.Config.Types
  3490  	// match: (PopCount8 x)
  3491  	// result: (CPOP (ZeroExt8to64 x))
  3492  	for {
  3493  		x := v_0
  3494  		v.reset(OpRISCV64CPOP)
  3495  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  3496  		v0.AddArg(x)
  3497  		v.AddArg(v0)
  3498  		return true
  3499  	}
  3500  }
  3501  func rewriteValueRISCV64_OpRISCV64ADD(v *Value) bool {
  3502  	v_1 := v.Args[1]
  3503  	v_0 := v.Args[0]
  3504  	// match: (ADD (MOVDconst <t> [val]) x)
  3505  	// cond: is32Bit(val) && !t.IsPtr()
  3506  	// result: (ADDI [val] x)
  3507  	for {
  3508  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3509  			if v_0.Op != OpRISCV64MOVDconst {
  3510  				continue
  3511  			}
  3512  			t := v_0.Type
  3513  			val := auxIntToInt64(v_0.AuxInt)
  3514  			x := v_1
  3515  			if !(is32Bit(val) && !t.IsPtr()) {
  3516  				continue
  3517  			}
  3518  			v.reset(OpRISCV64ADDI)
  3519  			v.AuxInt = int64ToAuxInt(val)
  3520  			v.AddArg(x)
  3521  			return true
  3522  		}
  3523  		break
  3524  	}
  3525  	// match: (ADD (SLLI [1] x) y)
  3526  	// cond: buildcfg.GORISCV64 >= 22
  3527  	// result: (SH1ADD x y)
  3528  	for {
  3529  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3530  			if v_0.Op != OpRISCV64SLLI || auxIntToInt64(v_0.AuxInt) != 1 {
  3531  				continue
  3532  			}
  3533  			x := v_0.Args[0]
  3534  			y := v_1
  3535  			if !(buildcfg.GORISCV64 >= 22) {
  3536  				continue
  3537  			}
  3538  			v.reset(OpRISCV64SH1ADD)
  3539  			v.AddArg2(x, y)
  3540  			return true
  3541  		}
  3542  		break
  3543  	}
  3544  	// match: (ADD (SLLI [2] x) y)
  3545  	// cond: buildcfg.GORISCV64 >= 22
  3546  	// result: (SH2ADD x y)
  3547  	for {
  3548  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3549  			if v_0.Op != OpRISCV64SLLI || auxIntToInt64(v_0.AuxInt) != 2 {
  3550  				continue
  3551  			}
  3552  			x := v_0.Args[0]
  3553  			y := v_1
  3554  			if !(buildcfg.GORISCV64 >= 22) {
  3555  				continue
  3556  			}
  3557  			v.reset(OpRISCV64SH2ADD)
  3558  			v.AddArg2(x, y)
  3559  			return true
  3560  		}
  3561  		break
  3562  	}
  3563  	// match: (ADD (SLLI [3] x) y)
  3564  	// cond: buildcfg.GORISCV64 >= 22
  3565  	// result: (SH3ADD x y)
  3566  	for {
  3567  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3568  			if v_0.Op != OpRISCV64SLLI || auxIntToInt64(v_0.AuxInt) != 3 {
  3569  				continue
  3570  			}
  3571  			x := v_0.Args[0]
  3572  			y := v_1
  3573  			if !(buildcfg.GORISCV64 >= 22) {
  3574  				continue
  3575  			}
  3576  			v.reset(OpRISCV64SH3ADD)
  3577  			v.AddArg2(x, y)
  3578  			return true
  3579  		}
  3580  		break
  3581  	}
  3582  	return false
  3583  }
  3584  func rewriteValueRISCV64_OpRISCV64ADDI(v *Value) bool {
  3585  	v_0 := v.Args[0]
  3586  	// match: (ADDI [c] (MOVaddr [d] {s} x))
  3587  	// cond: is32Bit(c+int64(d))
  3588  	// result: (MOVaddr [int32(c)+d] {s} x)
  3589  	for {
  3590  		c := auxIntToInt64(v.AuxInt)
  3591  		if v_0.Op != OpRISCV64MOVaddr {
  3592  			break
  3593  		}
  3594  		d := auxIntToInt32(v_0.AuxInt)
  3595  		s := auxToSym(v_0.Aux)
  3596  		x := v_0.Args[0]
  3597  		if !(is32Bit(c + int64(d))) {
  3598  			break
  3599  		}
  3600  		v.reset(OpRISCV64MOVaddr)
  3601  		v.AuxInt = int32ToAuxInt(int32(c) + d)
  3602  		v.Aux = symToAux(s)
  3603  		v.AddArg(x)
  3604  		return true
  3605  	}
  3606  	// match: (ADDI [0] x)
  3607  	// result: x
  3608  	for {
  3609  		if auxIntToInt64(v.AuxInt) != 0 {
  3610  			break
  3611  		}
  3612  		x := v_0
  3613  		v.copyOf(x)
  3614  		return true
  3615  	}
  3616  	// match: (ADDI [x] (MOVDconst [y]))
  3617  	// cond: is32Bit(x + y)
  3618  	// result: (MOVDconst [x + y])
  3619  	for {
  3620  		x := auxIntToInt64(v.AuxInt)
  3621  		if v_0.Op != OpRISCV64MOVDconst {
  3622  			break
  3623  		}
  3624  		y := auxIntToInt64(v_0.AuxInt)
  3625  		if !(is32Bit(x + y)) {
  3626  			break
  3627  		}
  3628  		v.reset(OpRISCV64MOVDconst)
  3629  		v.AuxInt = int64ToAuxInt(x + y)
  3630  		return true
  3631  	}
  3632  	// match: (ADDI [x] (ADDI [y] z))
  3633  	// cond: is32Bit(x + y)
  3634  	// result: (ADDI [x + y] z)
  3635  	for {
  3636  		x := auxIntToInt64(v.AuxInt)
  3637  		if v_0.Op != OpRISCV64ADDI {
  3638  			break
  3639  		}
  3640  		y := auxIntToInt64(v_0.AuxInt)
  3641  		z := v_0.Args[0]
  3642  		if !(is32Bit(x + y)) {
  3643  			break
  3644  		}
  3645  		v.reset(OpRISCV64ADDI)
  3646  		v.AuxInt = int64ToAuxInt(x + y)
  3647  		v.AddArg(z)
  3648  		return true
  3649  	}
  3650  	return false
  3651  }
  3652  func rewriteValueRISCV64_OpRISCV64AND(v *Value) bool {
  3653  	v_1 := v.Args[1]
  3654  	v_0 := v.Args[0]
  3655  	// match: (AND (MOVDconst [val]) x)
  3656  	// cond: is32Bit(val)
  3657  	// result: (ANDI [val] x)
  3658  	for {
  3659  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3660  			if v_0.Op != OpRISCV64MOVDconst {
  3661  				continue
  3662  			}
  3663  			val := auxIntToInt64(v_0.AuxInt)
  3664  			x := v_1
  3665  			if !(is32Bit(val)) {
  3666  				continue
  3667  			}
  3668  			v.reset(OpRISCV64ANDI)
  3669  			v.AuxInt = int64ToAuxInt(val)
  3670  			v.AddArg(x)
  3671  			return true
  3672  		}
  3673  		break
  3674  	}
  3675  	return false
  3676  }
  3677  func rewriteValueRISCV64_OpRISCV64ANDI(v *Value) bool {
  3678  	v_0 := v.Args[0]
  3679  	// match: (ANDI [0] x)
  3680  	// result: (MOVDconst [0])
  3681  	for {
  3682  		if auxIntToInt64(v.AuxInt) != 0 {
  3683  			break
  3684  		}
  3685  		v.reset(OpRISCV64MOVDconst)
  3686  		v.AuxInt = int64ToAuxInt(0)
  3687  		return true
  3688  	}
  3689  	// match: (ANDI [-1] x)
  3690  	// result: x
  3691  	for {
  3692  		if auxIntToInt64(v.AuxInt) != -1 {
  3693  			break
  3694  		}
  3695  		x := v_0
  3696  		v.copyOf(x)
  3697  		return true
  3698  	}
  3699  	// match: (ANDI [x] (MOVDconst [y]))
  3700  	// result: (MOVDconst [x & y])
  3701  	for {
  3702  		x := auxIntToInt64(v.AuxInt)
  3703  		if v_0.Op != OpRISCV64MOVDconst {
  3704  			break
  3705  		}
  3706  		y := auxIntToInt64(v_0.AuxInt)
  3707  		v.reset(OpRISCV64MOVDconst)
  3708  		v.AuxInt = int64ToAuxInt(x & y)
  3709  		return true
  3710  	}
  3711  	// match: (ANDI [x] (ANDI [y] z))
  3712  	// result: (ANDI [x & y] z)
  3713  	for {
  3714  		x := auxIntToInt64(v.AuxInt)
  3715  		if v_0.Op != OpRISCV64ANDI {
  3716  			break
  3717  		}
  3718  		y := auxIntToInt64(v_0.AuxInt)
  3719  		z := v_0.Args[0]
  3720  		v.reset(OpRISCV64ANDI)
  3721  		v.AuxInt = int64ToAuxInt(x & y)
  3722  		v.AddArg(z)
  3723  		return true
  3724  	}
  3725  	return false
  3726  }
  3727  func rewriteValueRISCV64_OpRISCV64FADDD(v *Value) bool {
  3728  	v_1 := v.Args[1]
  3729  	v_0 := v.Args[0]
  3730  	// match: (FADDD a (FMULD x y))
  3731  	// cond: a.Block.Func.useFMA(v)
  3732  	// result: (FMADDD x y a)
  3733  	for {
  3734  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3735  			a := v_0
  3736  			if v_1.Op != OpRISCV64FMULD {
  3737  				continue
  3738  			}
  3739  			y := v_1.Args[1]
  3740  			x := v_1.Args[0]
  3741  			if !(a.Block.Func.useFMA(v)) {
  3742  				continue
  3743  			}
  3744  			v.reset(OpRISCV64FMADDD)
  3745  			v.AddArg3(x, y, a)
  3746  			return true
  3747  		}
  3748  		break
  3749  	}
  3750  	return false
  3751  }
  3752  func rewriteValueRISCV64_OpRISCV64FADDS(v *Value) bool {
  3753  	v_1 := v.Args[1]
  3754  	v_0 := v.Args[0]
  3755  	// match: (FADDS a (FMULS x y))
  3756  	// cond: a.Block.Func.useFMA(v)
  3757  	// result: (FMADDS x y a)
  3758  	for {
  3759  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3760  			a := v_0
  3761  			if v_1.Op != OpRISCV64FMULS {
  3762  				continue
  3763  			}
  3764  			y := v_1.Args[1]
  3765  			x := v_1.Args[0]
  3766  			if !(a.Block.Func.useFMA(v)) {
  3767  				continue
  3768  			}
  3769  			v.reset(OpRISCV64FMADDS)
  3770  			v.AddArg3(x, y, a)
  3771  			return true
  3772  		}
  3773  		break
  3774  	}
  3775  	return false
  3776  }
  3777  func rewriteValueRISCV64_OpRISCV64FMADDD(v *Value) bool {
  3778  	v_2 := v.Args[2]
  3779  	v_1 := v.Args[1]
  3780  	v_0 := v.Args[0]
  3781  	// match: (FMADDD neg:(FNEGD x) y z)
  3782  	// cond: neg.Uses == 1
  3783  	// result: (FNMSUBD x y z)
  3784  	for {
  3785  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3786  			neg := v_0
  3787  			if neg.Op != OpRISCV64FNEGD {
  3788  				continue
  3789  			}
  3790  			x := neg.Args[0]
  3791  			y := v_1
  3792  			z := v_2
  3793  			if !(neg.Uses == 1) {
  3794  				continue
  3795  			}
  3796  			v.reset(OpRISCV64FNMSUBD)
  3797  			v.AddArg3(x, y, z)
  3798  			return true
  3799  		}
  3800  		break
  3801  	}
  3802  	// match: (FMADDD x y neg:(FNEGD z))
  3803  	// cond: neg.Uses == 1
  3804  	// result: (FMSUBD x y z)
  3805  	for {
  3806  		x := v_0
  3807  		y := v_1
  3808  		neg := v_2
  3809  		if neg.Op != OpRISCV64FNEGD {
  3810  			break
  3811  		}
  3812  		z := neg.Args[0]
  3813  		if !(neg.Uses == 1) {
  3814  			break
  3815  		}
  3816  		v.reset(OpRISCV64FMSUBD)
  3817  		v.AddArg3(x, y, z)
  3818  		return true
  3819  	}
  3820  	return false
  3821  }
  3822  func rewriteValueRISCV64_OpRISCV64FMADDS(v *Value) bool {
  3823  	v_2 := v.Args[2]
  3824  	v_1 := v.Args[1]
  3825  	v_0 := v.Args[0]
  3826  	// match: (FMADDS neg:(FNEGS x) y z)
  3827  	// cond: neg.Uses == 1
  3828  	// result: (FNMSUBS x y z)
  3829  	for {
  3830  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3831  			neg := v_0
  3832  			if neg.Op != OpRISCV64FNEGS {
  3833  				continue
  3834  			}
  3835  			x := neg.Args[0]
  3836  			y := v_1
  3837  			z := v_2
  3838  			if !(neg.Uses == 1) {
  3839  				continue
  3840  			}
  3841  			v.reset(OpRISCV64FNMSUBS)
  3842  			v.AddArg3(x, y, z)
  3843  			return true
  3844  		}
  3845  		break
  3846  	}
  3847  	// match: (FMADDS x y neg:(FNEGS z))
  3848  	// cond: neg.Uses == 1
  3849  	// result: (FMSUBS x y z)
  3850  	for {
  3851  		x := v_0
  3852  		y := v_1
  3853  		neg := v_2
  3854  		if neg.Op != OpRISCV64FNEGS {
  3855  			break
  3856  		}
  3857  		z := neg.Args[0]
  3858  		if !(neg.Uses == 1) {
  3859  			break
  3860  		}
  3861  		v.reset(OpRISCV64FMSUBS)
  3862  		v.AddArg3(x, y, z)
  3863  		return true
  3864  	}
  3865  	return false
  3866  }
  3867  func rewriteValueRISCV64_OpRISCV64FMSUBD(v *Value) bool {
  3868  	v_2 := v.Args[2]
  3869  	v_1 := v.Args[1]
  3870  	v_0 := v.Args[0]
  3871  	// match: (FMSUBD neg:(FNEGD x) y z)
  3872  	// cond: neg.Uses == 1
  3873  	// result: (FNMADDD x y z)
  3874  	for {
  3875  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3876  			neg := v_0
  3877  			if neg.Op != OpRISCV64FNEGD {
  3878  				continue
  3879  			}
  3880  			x := neg.Args[0]
  3881  			y := v_1
  3882  			z := v_2
  3883  			if !(neg.Uses == 1) {
  3884  				continue
  3885  			}
  3886  			v.reset(OpRISCV64FNMADDD)
  3887  			v.AddArg3(x, y, z)
  3888  			return true
  3889  		}
  3890  		break
  3891  	}
  3892  	// match: (FMSUBD x y neg:(FNEGD z))
  3893  	// cond: neg.Uses == 1
  3894  	// result: (FMADDD x y z)
  3895  	for {
  3896  		x := v_0
  3897  		y := v_1
  3898  		neg := v_2
  3899  		if neg.Op != OpRISCV64FNEGD {
  3900  			break
  3901  		}
  3902  		z := neg.Args[0]
  3903  		if !(neg.Uses == 1) {
  3904  			break
  3905  		}
  3906  		v.reset(OpRISCV64FMADDD)
  3907  		v.AddArg3(x, y, z)
  3908  		return true
  3909  	}
  3910  	return false
  3911  }
  3912  func rewriteValueRISCV64_OpRISCV64FMSUBS(v *Value) bool {
  3913  	v_2 := v.Args[2]
  3914  	v_1 := v.Args[1]
  3915  	v_0 := v.Args[0]
  3916  	// match: (FMSUBS neg:(FNEGS x) y z)
  3917  	// cond: neg.Uses == 1
  3918  	// result: (FNMADDS x y z)
  3919  	for {
  3920  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3921  			neg := v_0
  3922  			if neg.Op != OpRISCV64FNEGS {
  3923  				continue
  3924  			}
  3925  			x := neg.Args[0]
  3926  			y := v_1
  3927  			z := v_2
  3928  			if !(neg.Uses == 1) {
  3929  				continue
  3930  			}
  3931  			v.reset(OpRISCV64FNMADDS)
  3932  			v.AddArg3(x, y, z)
  3933  			return true
  3934  		}
  3935  		break
  3936  	}
  3937  	// match: (FMSUBS x y neg:(FNEGS z))
  3938  	// cond: neg.Uses == 1
  3939  	// result: (FMADDS x y z)
  3940  	for {
  3941  		x := v_0
  3942  		y := v_1
  3943  		neg := v_2
  3944  		if neg.Op != OpRISCV64FNEGS {
  3945  			break
  3946  		}
  3947  		z := neg.Args[0]
  3948  		if !(neg.Uses == 1) {
  3949  			break
  3950  		}
  3951  		v.reset(OpRISCV64FMADDS)
  3952  		v.AddArg3(x, y, z)
  3953  		return true
  3954  	}
  3955  	return false
  3956  }
  3957  func rewriteValueRISCV64_OpRISCV64FNMADDD(v *Value) bool {
  3958  	v_2 := v.Args[2]
  3959  	v_1 := v.Args[1]
  3960  	v_0 := v.Args[0]
  3961  	// match: (FNMADDD neg:(FNEGD x) y z)
  3962  	// cond: neg.Uses == 1
  3963  	// result: (FMSUBD x y z)
  3964  	for {
  3965  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3966  			neg := v_0
  3967  			if neg.Op != OpRISCV64FNEGD {
  3968  				continue
  3969  			}
  3970  			x := neg.Args[0]
  3971  			y := v_1
  3972  			z := v_2
  3973  			if !(neg.Uses == 1) {
  3974  				continue
  3975  			}
  3976  			v.reset(OpRISCV64FMSUBD)
  3977  			v.AddArg3(x, y, z)
  3978  			return true
  3979  		}
  3980  		break
  3981  	}
  3982  	// match: (FNMADDD x y neg:(FNEGD z))
  3983  	// cond: neg.Uses == 1
  3984  	// result: (FNMSUBD x y z)
  3985  	for {
  3986  		x := v_0
  3987  		y := v_1
  3988  		neg := v_2
  3989  		if neg.Op != OpRISCV64FNEGD {
  3990  			break
  3991  		}
  3992  		z := neg.Args[0]
  3993  		if !(neg.Uses == 1) {
  3994  			break
  3995  		}
  3996  		v.reset(OpRISCV64FNMSUBD)
  3997  		v.AddArg3(x, y, z)
  3998  		return true
  3999  	}
  4000  	return false
  4001  }
  4002  func rewriteValueRISCV64_OpRISCV64FNMADDS(v *Value) bool {
  4003  	v_2 := v.Args[2]
  4004  	v_1 := v.Args[1]
  4005  	v_0 := v.Args[0]
  4006  	// match: (FNMADDS neg:(FNEGS x) y z)
  4007  	// cond: neg.Uses == 1
  4008  	// result: (FMSUBS x y z)
  4009  	for {
  4010  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4011  			neg := v_0
  4012  			if neg.Op != OpRISCV64FNEGS {
  4013  				continue
  4014  			}
  4015  			x := neg.Args[0]
  4016  			y := v_1
  4017  			z := v_2
  4018  			if !(neg.Uses == 1) {
  4019  				continue
  4020  			}
  4021  			v.reset(OpRISCV64FMSUBS)
  4022  			v.AddArg3(x, y, z)
  4023  			return true
  4024  		}
  4025  		break
  4026  	}
  4027  	// match: (FNMADDS x y neg:(FNEGS z))
  4028  	// cond: neg.Uses == 1
  4029  	// result: (FNMSUBS x y z)
  4030  	for {
  4031  		x := v_0
  4032  		y := v_1
  4033  		neg := v_2
  4034  		if neg.Op != OpRISCV64FNEGS {
  4035  			break
  4036  		}
  4037  		z := neg.Args[0]
  4038  		if !(neg.Uses == 1) {
  4039  			break
  4040  		}
  4041  		v.reset(OpRISCV64FNMSUBS)
  4042  		v.AddArg3(x, y, z)
  4043  		return true
  4044  	}
  4045  	return false
  4046  }
  4047  func rewriteValueRISCV64_OpRISCV64FNMSUBD(v *Value) bool {
  4048  	v_2 := v.Args[2]
  4049  	v_1 := v.Args[1]
  4050  	v_0 := v.Args[0]
  4051  	// match: (FNMSUBD neg:(FNEGD x) y z)
  4052  	// cond: neg.Uses == 1
  4053  	// result: (FMADDD x y z)
  4054  	for {
  4055  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4056  			neg := v_0
  4057  			if neg.Op != OpRISCV64FNEGD {
  4058  				continue
  4059  			}
  4060  			x := neg.Args[0]
  4061  			y := v_1
  4062  			z := v_2
  4063  			if !(neg.Uses == 1) {
  4064  				continue
  4065  			}
  4066  			v.reset(OpRISCV64FMADDD)
  4067  			v.AddArg3(x, y, z)
  4068  			return true
  4069  		}
  4070  		break
  4071  	}
  4072  	// match: (FNMSUBD x y neg:(FNEGD z))
  4073  	// cond: neg.Uses == 1
  4074  	// result: (FNMADDD x y z)
  4075  	for {
  4076  		x := v_0
  4077  		y := v_1
  4078  		neg := v_2
  4079  		if neg.Op != OpRISCV64FNEGD {
  4080  			break
  4081  		}
  4082  		z := neg.Args[0]
  4083  		if !(neg.Uses == 1) {
  4084  			break
  4085  		}
  4086  		v.reset(OpRISCV64FNMADDD)
  4087  		v.AddArg3(x, y, z)
  4088  		return true
  4089  	}
  4090  	return false
  4091  }
  4092  func rewriteValueRISCV64_OpRISCV64FNMSUBS(v *Value) bool {
  4093  	v_2 := v.Args[2]
  4094  	v_1 := v.Args[1]
  4095  	v_0 := v.Args[0]
  4096  	// match: (FNMSUBS neg:(FNEGS x) y z)
  4097  	// cond: neg.Uses == 1
  4098  	// result: (FMADDS x y z)
  4099  	for {
  4100  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4101  			neg := v_0
  4102  			if neg.Op != OpRISCV64FNEGS {
  4103  				continue
  4104  			}
  4105  			x := neg.Args[0]
  4106  			y := v_1
  4107  			z := v_2
  4108  			if !(neg.Uses == 1) {
  4109  				continue
  4110  			}
  4111  			v.reset(OpRISCV64FMADDS)
  4112  			v.AddArg3(x, y, z)
  4113  			return true
  4114  		}
  4115  		break
  4116  	}
  4117  	// match: (FNMSUBS x y neg:(FNEGS z))
  4118  	// cond: neg.Uses == 1
  4119  	// result: (FNMADDS x y z)
  4120  	for {
  4121  		x := v_0
  4122  		y := v_1
  4123  		neg := v_2
  4124  		if neg.Op != OpRISCV64FNEGS {
  4125  			break
  4126  		}
  4127  		z := neg.Args[0]
  4128  		if !(neg.Uses == 1) {
  4129  			break
  4130  		}
  4131  		v.reset(OpRISCV64FNMADDS)
  4132  		v.AddArg3(x, y, z)
  4133  		return true
  4134  	}
  4135  	return false
  4136  }
  4137  func rewriteValueRISCV64_OpRISCV64FSUBD(v *Value) bool {
  4138  	v_1 := v.Args[1]
  4139  	v_0 := v.Args[0]
  4140  	// match: (FSUBD a (FMULD x y))
  4141  	// cond: a.Block.Func.useFMA(v)
  4142  	// result: (FNMSUBD x y a)
  4143  	for {
  4144  		a := v_0
  4145  		if v_1.Op != OpRISCV64FMULD {
  4146  			break
  4147  		}
  4148  		y := v_1.Args[1]
  4149  		x := v_1.Args[0]
  4150  		if !(a.Block.Func.useFMA(v)) {
  4151  			break
  4152  		}
  4153  		v.reset(OpRISCV64FNMSUBD)
  4154  		v.AddArg3(x, y, a)
  4155  		return true
  4156  	}
  4157  	// match: (FSUBD (FMULD x y) a)
  4158  	// cond: a.Block.Func.useFMA(v)
  4159  	// result: (FMSUBD x y a)
  4160  	for {
  4161  		if v_0.Op != OpRISCV64FMULD {
  4162  			break
  4163  		}
  4164  		y := v_0.Args[1]
  4165  		x := v_0.Args[0]
  4166  		a := v_1
  4167  		if !(a.Block.Func.useFMA(v)) {
  4168  			break
  4169  		}
  4170  		v.reset(OpRISCV64FMSUBD)
  4171  		v.AddArg3(x, y, a)
  4172  		return true
  4173  	}
  4174  	return false
  4175  }
  4176  func rewriteValueRISCV64_OpRISCV64FSUBS(v *Value) bool {
  4177  	v_1 := v.Args[1]
  4178  	v_0 := v.Args[0]
  4179  	// match: (FSUBS a (FMULS x y))
  4180  	// cond: a.Block.Func.useFMA(v)
  4181  	// result: (FNMSUBS x y a)
  4182  	for {
  4183  		a := v_0
  4184  		if v_1.Op != OpRISCV64FMULS {
  4185  			break
  4186  		}
  4187  		y := v_1.Args[1]
  4188  		x := v_1.Args[0]
  4189  		if !(a.Block.Func.useFMA(v)) {
  4190  			break
  4191  		}
  4192  		v.reset(OpRISCV64FNMSUBS)
  4193  		v.AddArg3(x, y, a)
  4194  		return true
  4195  	}
  4196  	// match: (FSUBS (FMULS x y) a)
  4197  	// cond: a.Block.Func.useFMA(v)
  4198  	// result: (FMSUBS x y a)
  4199  	for {
  4200  		if v_0.Op != OpRISCV64FMULS {
  4201  			break
  4202  		}
  4203  		y := v_0.Args[1]
  4204  		x := v_0.Args[0]
  4205  		a := v_1
  4206  		if !(a.Block.Func.useFMA(v)) {
  4207  			break
  4208  		}
  4209  		v.reset(OpRISCV64FMSUBS)
  4210  		v.AddArg3(x, y, a)
  4211  		return true
  4212  	}
  4213  	return false
  4214  }
  4215  func rewriteValueRISCV64_OpRISCV64MOVBUload(v *Value) bool {
  4216  	v_1 := v.Args[1]
  4217  	v_0 := v.Args[0]
  4218  	b := v.Block
  4219  	config := b.Func.Config
  4220  	// match: (MOVBUload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  4221  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  4222  	// result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  4223  	for {
  4224  		off1 := auxIntToInt32(v.AuxInt)
  4225  		sym1 := auxToSym(v.Aux)
  4226  		if v_0.Op != OpRISCV64MOVaddr {
  4227  			break
  4228  		}
  4229  		off2 := auxIntToInt32(v_0.AuxInt)
  4230  		sym2 := auxToSym(v_0.Aux)
  4231  		base := v_0.Args[0]
  4232  		mem := v_1
  4233  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4234  			break
  4235  		}
  4236  		v.reset(OpRISCV64MOVBUload)
  4237  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4238  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4239  		v.AddArg2(base, mem)
  4240  		return true
  4241  	}
  4242  	// match: (MOVBUload [off1] {sym} (ADDI [off2] base) mem)
  4243  	// cond: is32Bit(int64(off1)+off2)
  4244  	// result: (MOVBUload [off1+int32(off2)] {sym} base mem)
  4245  	for {
  4246  		off1 := auxIntToInt32(v.AuxInt)
  4247  		sym := auxToSym(v.Aux)
  4248  		if v_0.Op != OpRISCV64ADDI {
  4249  			break
  4250  		}
  4251  		off2 := auxIntToInt64(v_0.AuxInt)
  4252  		base := v_0.Args[0]
  4253  		mem := v_1
  4254  		if !(is32Bit(int64(off1) + off2)) {
  4255  			break
  4256  		}
  4257  		v.reset(OpRISCV64MOVBUload)
  4258  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4259  		v.Aux = symToAux(sym)
  4260  		v.AddArg2(base, mem)
  4261  		return true
  4262  	}
  4263  	return false
  4264  }
  4265  func rewriteValueRISCV64_OpRISCV64MOVBUreg(v *Value) bool {
  4266  	v_0 := v.Args[0]
  4267  	b := v.Block
  4268  	// match: (MOVBUreg x:(FLES _ _))
  4269  	// result: x
  4270  	for {
  4271  		x := v_0
  4272  		if x.Op != OpRISCV64FLES {
  4273  			break
  4274  		}
  4275  		v.copyOf(x)
  4276  		return true
  4277  	}
  4278  	// match: (MOVBUreg x:(FLTS _ _))
  4279  	// result: x
  4280  	for {
  4281  		x := v_0
  4282  		if x.Op != OpRISCV64FLTS {
  4283  			break
  4284  		}
  4285  		v.copyOf(x)
  4286  		return true
  4287  	}
  4288  	// match: (MOVBUreg x:(FEQS _ _))
  4289  	// result: x
  4290  	for {
  4291  		x := v_0
  4292  		if x.Op != OpRISCV64FEQS {
  4293  			break
  4294  		}
  4295  		v.copyOf(x)
  4296  		return true
  4297  	}
  4298  	// match: (MOVBUreg x:(FNES _ _))
  4299  	// result: x
  4300  	for {
  4301  		x := v_0
  4302  		if x.Op != OpRISCV64FNES {
  4303  			break
  4304  		}
  4305  		v.copyOf(x)
  4306  		return true
  4307  	}
  4308  	// match: (MOVBUreg x:(FLED _ _))
  4309  	// result: x
  4310  	for {
  4311  		x := v_0
  4312  		if x.Op != OpRISCV64FLED {
  4313  			break
  4314  		}
  4315  		v.copyOf(x)
  4316  		return true
  4317  	}
  4318  	// match: (MOVBUreg x:(FLTD _ _))
  4319  	// result: x
  4320  	for {
  4321  		x := v_0
  4322  		if x.Op != OpRISCV64FLTD {
  4323  			break
  4324  		}
  4325  		v.copyOf(x)
  4326  		return true
  4327  	}
  4328  	// match: (MOVBUreg x:(FEQD _ _))
  4329  	// result: x
  4330  	for {
  4331  		x := v_0
  4332  		if x.Op != OpRISCV64FEQD {
  4333  			break
  4334  		}
  4335  		v.copyOf(x)
  4336  		return true
  4337  	}
  4338  	// match: (MOVBUreg x:(FNED _ _))
  4339  	// result: x
  4340  	for {
  4341  		x := v_0
  4342  		if x.Op != OpRISCV64FNED {
  4343  			break
  4344  		}
  4345  		v.copyOf(x)
  4346  		return true
  4347  	}
  4348  	// match: (MOVBUreg x:(SEQZ _))
  4349  	// result: x
  4350  	for {
  4351  		x := v_0
  4352  		if x.Op != OpRISCV64SEQZ {
  4353  			break
  4354  		}
  4355  		v.copyOf(x)
  4356  		return true
  4357  	}
  4358  	// match: (MOVBUreg x:(SNEZ _))
  4359  	// result: x
  4360  	for {
  4361  		x := v_0
  4362  		if x.Op != OpRISCV64SNEZ {
  4363  			break
  4364  		}
  4365  		v.copyOf(x)
  4366  		return true
  4367  	}
  4368  	// match: (MOVBUreg x:(SLT _ _))
  4369  	// result: x
  4370  	for {
  4371  		x := v_0
  4372  		if x.Op != OpRISCV64SLT {
  4373  			break
  4374  		}
  4375  		v.copyOf(x)
  4376  		return true
  4377  	}
  4378  	// match: (MOVBUreg x:(SLTU _ _))
  4379  	// result: x
  4380  	for {
  4381  		x := v_0
  4382  		if x.Op != OpRISCV64SLTU {
  4383  			break
  4384  		}
  4385  		v.copyOf(x)
  4386  		return true
  4387  	}
  4388  	// match: (MOVBUreg x:(ANDI [c] y))
  4389  	// cond: c >= 0 && int64(uint8(c)) == c
  4390  	// result: x
  4391  	for {
  4392  		x := v_0
  4393  		if x.Op != OpRISCV64ANDI {
  4394  			break
  4395  		}
  4396  		c := auxIntToInt64(x.AuxInt)
  4397  		if !(c >= 0 && int64(uint8(c)) == c) {
  4398  			break
  4399  		}
  4400  		v.copyOf(x)
  4401  		return true
  4402  	}
  4403  	// match: (MOVBUreg (ANDI [c] x))
  4404  	// cond: c < 0
  4405  	// result: (ANDI [int64(uint8(c))] x)
  4406  	for {
  4407  		if v_0.Op != OpRISCV64ANDI {
  4408  			break
  4409  		}
  4410  		c := auxIntToInt64(v_0.AuxInt)
  4411  		x := v_0.Args[0]
  4412  		if !(c < 0) {
  4413  			break
  4414  		}
  4415  		v.reset(OpRISCV64ANDI)
  4416  		v.AuxInt = int64ToAuxInt(int64(uint8(c)))
  4417  		v.AddArg(x)
  4418  		return true
  4419  	}
  4420  	// match: (MOVBUreg (MOVDconst [c]))
  4421  	// result: (MOVDconst [int64(uint8(c))])
  4422  	for {
  4423  		if v_0.Op != OpRISCV64MOVDconst {
  4424  			break
  4425  		}
  4426  		c := auxIntToInt64(v_0.AuxInt)
  4427  		v.reset(OpRISCV64MOVDconst)
  4428  		v.AuxInt = int64ToAuxInt(int64(uint8(c)))
  4429  		return true
  4430  	}
  4431  	// match: (MOVBUreg x:(MOVBUload _ _))
  4432  	// result: (MOVDreg x)
  4433  	for {
  4434  		x := v_0
  4435  		if x.Op != OpRISCV64MOVBUload {
  4436  			break
  4437  		}
  4438  		v.reset(OpRISCV64MOVDreg)
  4439  		v.AddArg(x)
  4440  		return true
  4441  	}
  4442  	// match: (MOVBUreg x:(Select0 (LoweredAtomicLoad8 _ _)))
  4443  	// result: (MOVDreg x)
  4444  	for {
  4445  		x := v_0
  4446  		if x.Op != OpSelect0 {
  4447  			break
  4448  		}
  4449  		x_0 := x.Args[0]
  4450  		if x_0.Op != OpRISCV64LoweredAtomicLoad8 {
  4451  			break
  4452  		}
  4453  		v.reset(OpRISCV64MOVDreg)
  4454  		v.AddArg(x)
  4455  		return true
  4456  	}
  4457  	// match: (MOVBUreg x:(Select0 (LoweredAtomicCas32 _ _ _ _)))
  4458  	// result: (MOVDreg x)
  4459  	for {
  4460  		x := v_0
  4461  		if x.Op != OpSelect0 {
  4462  			break
  4463  		}
  4464  		x_0 := x.Args[0]
  4465  		if x_0.Op != OpRISCV64LoweredAtomicCas32 {
  4466  			break
  4467  		}
  4468  		v.reset(OpRISCV64MOVDreg)
  4469  		v.AddArg(x)
  4470  		return true
  4471  	}
  4472  	// match: (MOVBUreg x:(Select0 (LoweredAtomicCas64 _ _ _ _)))
  4473  	// result: (MOVDreg x)
  4474  	for {
  4475  		x := v_0
  4476  		if x.Op != OpSelect0 {
  4477  			break
  4478  		}
  4479  		x_0 := x.Args[0]
  4480  		if x_0.Op != OpRISCV64LoweredAtomicCas64 {
  4481  			break
  4482  		}
  4483  		v.reset(OpRISCV64MOVDreg)
  4484  		v.AddArg(x)
  4485  		return true
  4486  	}
  4487  	// match: (MOVBUreg x:(MOVBUreg _))
  4488  	// result: (MOVDreg x)
  4489  	for {
  4490  		x := v_0
  4491  		if x.Op != OpRISCV64MOVBUreg {
  4492  			break
  4493  		}
  4494  		v.reset(OpRISCV64MOVDreg)
  4495  		v.AddArg(x)
  4496  		return true
  4497  	}
  4498  	// match: (MOVBUreg <t> x:(MOVBload [off] {sym} ptr mem))
  4499  	// cond: x.Uses == 1 && clobber(x)
  4500  	// result: @x.Block (MOVBUload <t> [off] {sym} ptr mem)
  4501  	for {
  4502  		t := v.Type
  4503  		x := v_0
  4504  		if x.Op != OpRISCV64MOVBload {
  4505  			break
  4506  		}
  4507  		off := auxIntToInt32(x.AuxInt)
  4508  		sym := auxToSym(x.Aux)
  4509  		mem := x.Args[1]
  4510  		ptr := x.Args[0]
  4511  		if !(x.Uses == 1 && clobber(x)) {
  4512  			break
  4513  		}
  4514  		b = x.Block
  4515  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVBUload, t)
  4516  		v.copyOf(v0)
  4517  		v0.AuxInt = int32ToAuxInt(off)
  4518  		v0.Aux = symToAux(sym)
  4519  		v0.AddArg2(ptr, mem)
  4520  		return true
  4521  	}
  4522  	return false
  4523  }
  4524  func rewriteValueRISCV64_OpRISCV64MOVBload(v *Value) bool {
  4525  	v_1 := v.Args[1]
  4526  	v_0 := v.Args[0]
  4527  	b := v.Block
  4528  	config := b.Func.Config
  4529  	// match: (MOVBload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  4530  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  4531  	// result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  4532  	for {
  4533  		off1 := auxIntToInt32(v.AuxInt)
  4534  		sym1 := auxToSym(v.Aux)
  4535  		if v_0.Op != OpRISCV64MOVaddr {
  4536  			break
  4537  		}
  4538  		off2 := auxIntToInt32(v_0.AuxInt)
  4539  		sym2 := auxToSym(v_0.Aux)
  4540  		base := v_0.Args[0]
  4541  		mem := v_1
  4542  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4543  			break
  4544  		}
  4545  		v.reset(OpRISCV64MOVBload)
  4546  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4547  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4548  		v.AddArg2(base, mem)
  4549  		return true
  4550  	}
  4551  	// match: (MOVBload [off1] {sym} (ADDI [off2] base) mem)
  4552  	// cond: is32Bit(int64(off1)+off2)
  4553  	// result: (MOVBload [off1+int32(off2)] {sym} base mem)
  4554  	for {
  4555  		off1 := auxIntToInt32(v.AuxInt)
  4556  		sym := auxToSym(v.Aux)
  4557  		if v_0.Op != OpRISCV64ADDI {
  4558  			break
  4559  		}
  4560  		off2 := auxIntToInt64(v_0.AuxInt)
  4561  		base := v_0.Args[0]
  4562  		mem := v_1
  4563  		if !(is32Bit(int64(off1) + off2)) {
  4564  			break
  4565  		}
  4566  		v.reset(OpRISCV64MOVBload)
  4567  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4568  		v.Aux = symToAux(sym)
  4569  		v.AddArg2(base, mem)
  4570  		return true
  4571  	}
  4572  	return false
  4573  }
  4574  func rewriteValueRISCV64_OpRISCV64MOVBreg(v *Value) bool {
  4575  	v_0 := v.Args[0]
  4576  	b := v.Block
  4577  	// match: (MOVBreg x:(ANDI [c] y))
  4578  	// cond: c >= 0 && int64(int8(c)) == c
  4579  	// result: x
  4580  	for {
  4581  		x := v_0
  4582  		if x.Op != OpRISCV64ANDI {
  4583  			break
  4584  		}
  4585  		c := auxIntToInt64(x.AuxInt)
  4586  		if !(c >= 0 && int64(int8(c)) == c) {
  4587  			break
  4588  		}
  4589  		v.copyOf(x)
  4590  		return true
  4591  	}
  4592  	// match: (MOVBreg (MOVDconst [c]))
  4593  	// result: (MOVDconst [int64(int8(c))])
  4594  	for {
  4595  		if v_0.Op != OpRISCV64MOVDconst {
  4596  			break
  4597  		}
  4598  		c := auxIntToInt64(v_0.AuxInt)
  4599  		v.reset(OpRISCV64MOVDconst)
  4600  		v.AuxInt = int64ToAuxInt(int64(int8(c)))
  4601  		return true
  4602  	}
  4603  	// match: (MOVBreg x:(MOVBload _ _))
  4604  	// result: (MOVDreg x)
  4605  	for {
  4606  		x := v_0
  4607  		if x.Op != OpRISCV64MOVBload {
  4608  			break
  4609  		}
  4610  		v.reset(OpRISCV64MOVDreg)
  4611  		v.AddArg(x)
  4612  		return true
  4613  	}
  4614  	// match: (MOVBreg x:(MOVBreg _))
  4615  	// result: (MOVDreg x)
  4616  	for {
  4617  		x := v_0
  4618  		if x.Op != OpRISCV64MOVBreg {
  4619  			break
  4620  		}
  4621  		v.reset(OpRISCV64MOVDreg)
  4622  		v.AddArg(x)
  4623  		return true
  4624  	}
  4625  	// match: (MOVBreg <t> x:(MOVBUload [off] {sym} ptr mem))
  4626  	// cond: x.Uses == 1 && clobber(x)
  4627  	// result: @x.Block (MOVBload <t> [off] {sym} ptr mem)
  4628  	for {
  4629  		t := v.Type
  4630  		x := v_0
  4631  		if x.Op != OpRISCV64MOVBUload {
  4632  			break
  4633  		}
  4634  		off := auxIntToInt32(x.AuxInt)
  4635  		sym := auxToSym(x.Aux)
  4636  		mem := x.Args[1]
  4637  		ptr := x.Args[0]
  4638  		if !(x.Uses == 1 && clobber(x)) {
  4639  			break
  4640  		}
  4641  		b = x.Block
  4642  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVBload, t)
  4643  		v.copyOf(v0)
  4644  		v0.AuxInt = int32ToAuxInt(off)
  4645  		v0.Aux = symToAux(sym)
  4646  		v0.AddArg2(ptr, mem)
  4647  		return true
  4648  	}
  4649  	return false
  4650  }
  4651  func rewriteValueRISCV64_OpRISCV64MOVBstore(v *Value) bool {
  4652  	v_2 := v.Args[2]
  4653  	v_1 := v.Args[1]
  4654  	v_0 := v.Args[0]
  4655  	b := v.Block
  4656  	config := b.Func.Config
  4657  	// match: (MOVBstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
  4658  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  4659  	// result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
  4660  	for {
  4661  		off1 := auxIntToInt32(v.AuxInt)
  4662  		sym1 := auxToSym(v.Aux)
  4663  		if v_0.Op != OpRISCV64MOVaddr {
  4664  			break
  4665  		}
  4666  		off2 := auxIntToInt32(v_0.AuxInt)
  4667  		sym2 := auxToSym(v_0.Aux)
  4668  		base := v_0.Args[0]
  4669  		val := v_1
  4670  		mem := v_2
  4671  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4672  			break
  4673  		}
  4674  		v.reset(OpRISCV64MOVBstore)
  4675  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4676  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4677  		v.AddArg3(base, val, mem)
  4678  		return true
  4679  	}
  4680  	// match: (MOVBstore [off1] {sym} (ADDI [off2] base) val mem)
  4681  	// cond: is32Bit(int64(off1)+off2)
  4682  	// result: (MOVBstore [off1+int32(off2)] {sym} base val mem)
  4683  	for {
  4684  		off1 := auxIntToInt32(v.AuxInt)
  4685  		sym := auxToSym(v.Aux)
  4686  		if v_0.Op != OpRISCV64ADDI {
  4687  			break
  4688  		}
  4689  		off2 := auxIntToInt64(v_0.AuxInt)
  4690  		base := v_0.Args[0]
  4691  		val := v_1
  4692  		mem := v_2
  4693  		if !(is32Bit(int64(off1) + off2)) {
  4694  			break
  4695  		}
  4696  		v.reset(OpRISCV64MOVBstore)
  4697  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4698  		v.Aux = symToAux(sym)
  4699  		v.AddArg3(base, val, mem)
  4700  		return true
  4701  	}
  4702  	// match: (MOVBstore [off] {sym} ptr (MOVDconst [0]) mem)
  4703  	// result: (MOVBstorezero [off] {sym} ptr mem)
  4704  	for {
  4705  		off := auxIntToInt32(v.AuxInt)
  4706  		sym := auxToSym(v.Aux)
  4707  		ptr := v_0
  4708  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  4709  			break
  4710  		}
  4711  		mem := v_2
  4712  		v.reset(OpRISCV64MOVBstorezero)
  4713  		v.AuxInt = int32ToAuxInt(off)
  4714  		v.Aux = symToAux(sym)
  4715  		v.AddArg2(ptr, mem)
  4716  		return true
  4717  	}
  4718  	// match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem)
  4719  	// result: (MOVBstore [off] {sym} ptr x mem)
  4720  	for {
  4721  		off := auxIntToInt32(v.AuxInt)
  4722  		sym := auxToSym(v.Aux)
  4723  		ptr := v_0
  4724  		if v_1.Op != OpRISCV64MOVBreg {
  4725  			break
  4726  		}
  4727  		x := v_1.Args[0]
  4728  		mem := v_2
  4729  		v.reset(OpRISCV64MOVBstore)
  4730  		v.AuxInt = int32ToAuxInt(off)
  4731  		v.Aux = symToAux(sym)
  4732  		v.AddArg3(ptr, x, mem)
  4733  		return true
  4734  	}
  4735  	// match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem)
  4736  	// result: (MOVBstore [off] {sym} ptr x mem)
  4737  	for {
  4738  		off := auxIntToInt32(v.AuxInt)
  4739  		sym := auxToSym(v.Aux)
  4740  		ptr := v_0
  4741  		if v_1.Op != OpRISCV64MOVHreg {
  4742  			break
  4743  		}
  4744  		x := v_1.Args[0]
  4745  		mem := v_2
  4746  		v.reset(OpRISCV64MOVBstore)
  4747  		v.AuxInt = int32ToAuxInt(off)
  4748  		v.Aux = symToAux(sym)
  4749  		v.AddArg3(ptr, x, mem)
  4750  		return true
  4751  	}
  4752  	// match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem)
  4753  	// result: (MOVBstore [off] {sym} ptr x mem)
  4754  	for {
  4755  		off := auxIntToInt32(v.AuxInt)
  4756  		sym := auxToSym(v.Aux)
  4757  		ptr := v_0
  4758  		if v_1.Op != OpRISCV64MOVWreg {
  4759  			break
  4760  		}
  4761  		x := v_1.Args[0]
  4762  		mem := v_2
  4763  		v.reset(OpRISCV64MOVBstore)
  4764  		v.AuxInt = int32ToAuxInt(off)
  4765  		v.Aux = symToAux(sym)
  4766  		v.AddArg3(ptr, x, mem)
  4767  		return true
  4768  	}
  4769  	// match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem)
  4770  	// result: (MOVBstore [off] {sym} ptr x mem)
  4771  	for {
  4772  		off := auxIntToInt32(v.AuxInt)
  4773  		sym := auxToSym(v.Aux)
  4774  		ptr := v_0
  4775  		if v_1.Op != OpRISCV64MOVBUreg {
  4776  			break
  4777  		}
  4778  		x := v_1.Args[0]
  4779  		mem := v_2
  4780  		v.reset(OpRISCV64MOVBstore)
  4781  		v.AuxInt = int32ToAuxInt(off)
  4782  		v.Aux = symToAux(sym)
  4783  		v.AddArg3(ptr, x, mem)
  4784  		return true
  4785  	}
  4786  	// match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem)
  4787  	// result: (MOVBstore [off] {sym} ptr x mem)
  4788  	for {
  4789  		off := auxIntToInt32(v.AuxInt)
  4790  		sym := auxToSym(v.Aux)
  4791  		ptr := v_0
  4792  		if v_1.Op != OpRISCV64MOVHUreg {
  4793  			break
  4794  		}
  4795  		x := v_1.Args[0]
  4796  		mem := v_2
  4797  		v.reset(OpRISCV64MOVBstore)
  4798  		v.AuxInt = int32ToAuxInt(off)
  4799  		v.Aux = symToAux(sym)
  4800  		v.AddArg3(ptr, x, mem)
  4801  		return true
  4802  	}
  4803  	// match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem)
  4804  	// result: (MOVBstore [off] {sym} ptr x mem)
  4805  	for {
  4806  		off := auxIntToInt32(v.AuxInt)
  4807  		sym := auxToSym(v.Aux)
  4808  		ptr := v_0
  4809  		if v_1.Op != OpRISCV64MOVWUreg {
  4810  			break
  4811  		}
  4812  		x := v_1.Args[0]
  4813  		mem := v_2
  4814  		v.reset(OpRISCV64MOVBstore)
  4815  		v.AuxInt = int32ToAuxInt(off)
  4816  		v.Aux = symToAux(sym)
  4817  		v.AddArg3(ptr, x, mem)
  4818  		return true
  4819  	}
  4820  	return false
  4821  }
  4822  func rewriteValueRISCV64_OpRISCV64MOVBstorezero(v *Value) bool {
  4823  	v_1 := v.Args[1]
  4824  	v_0 := v.Args[0]
  4825  	b := v.Block
  4826  	config := b.Func.Config
  4827  	// match: (MOVBstorezero [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  4828  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  4829  	// result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} base mem)
  4830  	for {
  4831  		off1 := auxIntToInt32(v.AuxInt)
  4832  		sym1 := auxToSym(v.Aux)
  4833  		if v_0.Op != OpRISCV64MOVaddr {
  4834  			break
  4835  		}
  4836  		off2 := auxIntToInt32(v_0.AuxInt)
  4837  		sym2 := auxToSym(v_0.Aux)
  4838  		base := v_0.Args[0]
  4839  		mem := v_1
  4840  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4841  			break
  4842  		}
  4843  		v.reset(OpRISCV64MOVBstorezero)
  4844  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4845  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4846  		v.AddArg2(base, mem)
  4847  		return true
  4848  	}
  4849  	// match: (MOVBstorezero [off1] {sym} (ADDI [off2] base) mem)
  4850  	// cond: is32Bit(int64(off1)+off2)
  4851  	// result: (MOVBstorezero [off1+int32(off2)] {sym} base mem)
  4852  	for {
  4853  		off1 := auxIntToInt32(v.AuxInt)
  4854  		sym := auxToSym(v.Aux)
  4855  		if v_0.Op != OpRISCV64ADDI {
  4856  			break
  4857  		}
  4858  		off2 := auxIntToInt64(v_0.AuxInt)
  4859  		base := v_0.Args[0]
  4860  		mem := v_1
  4861  		if !(is32Bit(int64(off1) + off2)) {
  4862  			break
  4863  		}
  4864  		v.reset(OpRISCV64MOVBstorezero)
  4865  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4866  		v.Aux = symToAux(sym)
  4867  		v.AddArg2(base, mem)
  4868  		return true
  4869  	}
  4870  	return false
  4871  }
  4872  func rewriteValueRISCV64_OpRISCV64MOVDload(v *Value) bool {
  4873  	v_1 := v.Args[1]
  4874  	v_0 := v.Args[0]
  4875  	b := v.Block
  4876  	config := b.Func.Config
  4877  	// match: (MOVDload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  4878  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  4879  	// result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  4880  	for {
  4881  		off1 := auxIntToInt32(v.AuxInt)
  4882  		sym1 := auxToSym(v.Aux)
  4883  		if v_0.Op != OpRISCV64MOVaddr {
  4884  			break
  4885  		}
  4886  		off2 := auxIntToInt32(v_0.AuxInt)
  4887  		sym2 := auxToSym(v_0.Aux)
  4888  		base := v_0.Args[0]
  4889  		mem := v_1
  4890  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4891  			break
  4892  		}
  4893  		v.reset(OpRISCV64MOVDload)
  4894  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4895  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4896  		v.AddArg2(base, mem)
  4897  		return true
  4898  	}
  4899  	// match: (MOVDload [off1] {sym} (ADDI [off2] base) mem)
  4900  	// cond: is32Bit(int64(off1)+off2)
  4901  	// result: (MOVDload [off1+int32(off2)] {sym} base mem)
  4902  	for {
  4903  		off1 := auxIntToInt32(v.AuxInt)
  4904  		sym := auxToSym(v.Aux)
  4905  		if v_0.Op != OpRISCV64ADDI {
  4906  			break
  4907  		}
  4908  		off2 := auxIntToInt64(v_0.AuxInt)
  4909  		base := v_0.Args[0]
  4910  		mem := v_1
  4911  		if !(is32Bit(int64(off1) + off2)) {
  4912  			break
  4913  		}
  4914  		v.reset(OpRISCV64MOVDload)
  4915  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4916  		v.Aux = symToAux(sym)
  4917  		v.AddArg2(base, mem)
  4918  		return true
  4919  	}
  4920  	return false
  4921  }
  4922  func rewriteValueRISCV64_OpRISCV64MOVDnop(v *Value) bool {
  4923  	v_0 := v.Args[0]
  4924  	// match: (MOVDnop (MOVDconst [c]))
  4925  	// result: (MOVDconst [c])
  4926  	for {
  4927  		if v_0.Op != OpRISCV64MOVDconst {
  4928  			break
  4929  		}
  4930  		c := auxIntToInt64(v_0.AuxInt)
  4931  		v.reset(OpRISCV64MOVDconst)
  4932  		v.AuxInt = int64ToAuxInt(c)
  4933  		return true
  4934  	}
  4935  	return false
  4936  }
  4937  func rewriteValueRISCV64_OpRISCV64MOVDreg(v *Value) bool {
  4938  	v_0 := v.Args[0]
  4939  	// match: (MOVDreg x)
  4940  	// cond: x.Uses == 1
  4941  	// result: (MOVDnop x)
  4942  	for {
  4943  		x := v_0
  4944  		if !(x.Uses == 1) {
  4945  			break
  4946  		}
  4947  		v.reset(OpRISCV64MOVDnop)
  4948  		v.AddArg(x)
  4949  		return true
  4950  	}
  4951  	return false
  4952  }
  4953  func rewriteValueRISCV64_OpRISCV64MOVDstore(v *Value) bool {
  4954  	v_2 := v.Args[2]
  4955  	v_1 := v.Args[1]
  4956  	v_0 := v.Args[0]
  4957  	b := v.Block
  4958  	config := b.Func.Config
  4959  	// match: (MOVDstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
  4960  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  4961  	// result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
  4962  	for {
  4963  		off1 := auxIntToInt32(v.AuxInt)
  4964  		sym1 := auxToSym(v.Aux)
  4965  		if v_0.Op != OpRISCV64MOVaddr {
  4966  			break
  4967  		}
  4968  		off2 := auxIntToInt32(v_0.AuxInt)
  4969  		sym2 := auxToSym(v_0.Aux)
  4970  		base := v_0.Args[0]
  4971  		val := v_1
  4972  		mem := v_2
  4973  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4974  			break
  4975  		}
  4976  		v.reset(OpRISCV64MOVDstore)
  4977  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4978  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4979  		v.AddArg3(base, val, mem)
  4980  		return true
  4981  	}
  4982  	// match: (MOVDstore [off1] {sym} (ADDI [off2] base) val mem)
  4983  	// cond: is32Bit(int64(off1)+off2)
  4984  	// result: (MOVDstore [off1+int32(off2)] {sym} base val mem)
  4985  	for {
  4986  		off1 := auxIntToInt32(v.AuxInt)
  4987  		sym := auxToSym(v.Aux)
  4988  		if v_0.Op != OpRISCV64ADDI {
  4989  			break
  4990  		}
  4991  		off2 := auxIntToInt64(v_0.AuxInt)
  4992  		base := v_0.Args[0]
  4993  		val := v_1
  4994  		mem := v_2
  4995  		if !(is32Bit(int64(off1) + off2)) {
  4996  			break
  4997  		}
  4998  		v.reset(OpRISCV64MOVDstore)
  4999  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5000  		v.Aux = symToAux(sym)
  5001  		v.AddArg3(base, val, mem)
  5002  		return true
  5003  	}
  5004  	// match: (MOVDstore [off] {sym} ptr (MOVDconst [0]) mem)
  5005  	// result: (MOVDstorezero [off] {sym} ptr mem)
  5006  	for {
  5007  		off := auxIntToInt32(v.AuxInt)
  5008  		sym := auxToSym(v.Aux)
  5009  		ptr := v_0
  5010  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  5011  			break
  5012  		}
  5013  		mem := v_2
  5014  		v.reset(OpRISCV64MOVDstorezero)
  5015  		v.AuxInt = int32ToAuxInt(off)
  5016  		v.Aux = symToAux(sym)
  5017  		v.AddArg2(ptr, mem)
  5018  		return true
  5019  	}
  5020  	return false
  5021  }
  5022  func rewriteValueRISCV64_OpRISCV64MOVDstorezero(v *Value) bool {
  5023  	v_1 := v.Args[1]
  5024  	v_0 := v.Args[0]
  5025  	b := v.Block
  5026  	config := b.Func.Config
  5027  	// match: (MOVDstorezero [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  5028  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  5029  	// result: (MOVDstorezero [off1+off2] {mergeSym(sym1,sym2)} base mem)
  5030  	for {
  5031  		off1 := auxIntToInt32(v.AuxInt)
  5032  		sym1 := auxToSym(v.Aux)
  5033  		if v_0.Op != OpRISCV64MOVaddr {
  5034  			break
  5035  		}
  5036  		off2 := auxIntToInt32(v_0.AuxInt)
  5037  		sym2 := auxToSym(v_0.Aux)
  5038  		base := v_0.Args[0]
  5039  		mem := v_1
  5040  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5041  			break
  5042  		}
  5043  		v.reset(OpRISCV64MOVDstorezero)
  5044  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5045  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5046  		v.AddArg2(base, mem)
  5047  		return true
  5048  	}
  5049  	// match: (MOVDstorezero [off1] {sym} (ADDI [off2] base) mem)
  5050  	// cond: is32Bit(int64(off1)+off2)
  5051  	// result: (MOVDstorezero [off1+int32(off2)] {sym} base mem)
  5052  	for {
  5053  		off1 := auxIntToInt32(v.AuxInt)
  5054  		sym := auxToSym(v.Aux)
  5055  		if v_0.Op != OpRISCV64ADDI {
  5056  			break
  5057  		}
  5058  		off2 := auxIntToInt64(v_0.AuxInt)
  5059  		base := v_0.Args[0]
  5060  		mem := v_1
  5061  		if !(is32Bit(int64(off1) + off2)) {
  5062  			break
  5063  		}
  5064  		v.reset(OpRISCV64MOVDstorezero)
  5065  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5066  		v.Aux = symToAux(sym)
  5067  		v.AddArg2(base, mem)
  5068  		return true
  5069  	}
  5070  	return false
  5071  }
  5072  func rewriteValueRISCV64_OpRISCV64MOVHUload(v *Value) bool {
  5073  	v_1 := v.Args[1]
  5074  	v_0 := v.Args[0]
  5075  	b := v.Block
  5076  	config := b.Func.Config
  5077  	// match: (MOVHUload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  5078  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  5079  	// result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  5080  	for {
  5081  		off1 := auxIntToInt32(v.AuxInt)
  5082  		sym1 := auxToSym(v.Aux)
  5083  		if v_0.Op != OpRISCV64MOVaddr {
  5084  			break
  5085  		}
  5086  		off2 := auxIntToInt32(v_0.AuxInt)
  5087  		sym2 := auxToSym(v_0.Aux)
  5088  		base := v_0.Args[0]
  5089  		mem := v_1
  5090  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5091  			break
  5092  		}
  5093  		v.reset(OpRISCV64MOVHUload)
  5094  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5095  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5096  		v.AddArg2(base, mem)
  5097  		return true
  5098  	}
  5099  	// match: (MOVHUload [off1] {sym} (ADDI [off2] base) mem)
  5100  	// cond: is32Bit(int64(off1)+off2)
  5101  	// result: (MOVHUload [off1+int32(off2)] {sym} base mem)
  5102  	for {
  5103  		off1 := auxIntToInt32(v.AuxInt)
  5104  		sym := auxToSym(v.Aux)
  5105  		if v_0.Op != OpRISCV64ADDI {
  5106  			break
  5107  		}
  5108  		off2 := auxIntToInt64(v_0.AuxInt)
  5109  		base := v_0.Args[0]
  5110  		mem := v_1
  5111  		if !(is32Bit(int64(off1) + off2)) {
  5112  			break
  5113  		}
  5114  		v.reset(OpRISCV64MOVHUload)
  5115  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5116  		v.Aux = symToAux(sym)
  5117  		v.AddArg2(base, mem)
  5118  		return true
  5119  	}
  5120  	return false
  5121  }
  5122  func rewriteValueRISCV64_OpRISCV64MOVHUreg(v *Value) bool {
  5123  	v_0 := v.Args[0]
  5124  	b := v.Block
  5125  	// match: (MOVHUreg x:(ANDI [c] y))
  5126  	// cond: c >= 0 && int64(uint16(c)) == c
  5127  	// result: x
  5128  	for {
  5129  		x := v_0
  5130  		if x.Op != OpRISCV64ANDI {
  5131  			break
  5132  		}
  5133  		c := auxIntToInt64(x.AuxInt)
  5134  		if !(c >= 0 && int64(uint16(c)) == c) {
  5135  			break
  5136  		}
  5137  		v.copyOf(x)
  5138  		return true
  5139  	}
  5140  	// match: (MOVHUreg (ANDI [c] x))
  5141  	// cond: c < 0
  5142  	// result: (ANDI [int64(uint16(c))] x)
  5143  	for {
  5144  		if v_0.Op != OpRISCV64ANDI {
  5145  			break
  5146  		}
  5147  		c := auxIntToInt64(v_0.AuxInt)
  5148  		x := v_0.Args[0]
  5149  		if !(c < 0) {
  5150  			break
  5151  		}
  5152  		v.reset(OpRISCV64ANDI)
  5153  		v.AuxInt = int64ToAuxInt(int64(uint16(c)))
  5154  		v.AddArg(x)
  5155  		return true
  5156  	}
  5157  	// match: (MOVHUreg (MOVDconst [c]))
  5158  	// result: (MOVDconst [int64(uint16(c))])
  5159  	for {
  5160  		if v_0.Op != OpRISCV64MOVDconst {
  5161  			break
  5162  		}
  5163  		c := auxIntToInt64(v_0.AuxInt)
  5164  		v.reset(OpRISCV64MOVDconst)
  5165  		v.AuxInt = int64ToAuxInt(int64(uint16(c)))
  5166  		return true
  5167  	}
  5168  	// match: (MOVHUreg x:(MOVBUload _ _))
  5169  	// result: (MOVDreg x)
  5170  	for {
  5171  		x := v_0
  5172  		if x.Op != OpRISCV64MOVBUload {
  5173  			break
  5174  		}
  5175  		v.reset(OpRISCV64MOVDreg)
  5176  		v.AddArg(x)
  5177  		return true
  5178  	}
  5179  	// match: (MOVHUreg x:(MOVHUload _ _))
  5180  	// result: (MOVDreg x)
  5181  	for {
  5182  		x := v_0
  5183  		if x.Op != OpRISCV64MOVHUload {
  5184  			break
  5185  		}
  5186  		v.reset(OpRISCV64MOVDreg)
  5187  		v.AddArg(x)
  5188  		return true
  5189  	}
  5190  	// match: (MOVHUreg x:(MOVBUreg _))
  5191  	// result: (MOVDreg x)
  5192  	for {
  5193  		x := v_0
  5194  		if x.Op != OpRISCV64MOVBUreg {
  5195  			break
  5196  		}
  5197  		v.reset(OpRISCV64MOVDreg)
  5198  		v.AddArg(x)
  5199  		return true
  5200  	}
  5201  	// match: (MOVHUreg x:(MOVHUreg _))
  5202  	// result: (MOVDreg x)
  5203  	for {
  5204  		x := v_0
  5205  		if x.Op != OpRISCV64MOVHUreg {
  5206  			break
  5207  		}
  5208  		v.reset(OpRISCV64MOVDreg)
  5209  		v.AddArg(x)
  5210  		return true
  5211  	}
  5212  	// match: (MOVHUreg <t> x:(MOVHload [off] {sym} ptr mem))
  5213  	// cond: x.Uses == 1 && clobber(x)
  5214  	// result: @x.Block (MOVHUload <t> [off] {sym} ptr mem)
  5215  	for {
  5216  		t := v.Type
  5217  		x := v_0
  5218  		if x.Op != OpRISCV64MOVHload {
  5219  			break
  5220  		}
  5221  		off := auxIntToInt32(x.AuxInt)
  5222  		sym := auxToSym(x.Aux)
  5223  		mem := x.Args[1]
  5224  		ptr := x.Args[0]
  5225  		if !(x.Uses == 1 && clobber(x)) {
  5226  			break
  5227  		}
  5228  		b = x.Block
  5229  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVHUload, t)
  5230  		v.copyOf(v0)
  5231  		v0.AuxInt = int32ToAuxInt(off)
  5232  		v0.Aux = symToAux(sym)
  5233  		v0.AddArg2(ptr, mem)
  5234  		return true
  5235  	}
  5236  	return false
  5237  }
  5238  func rewriteValueRISCV64_OpRISCV64MOVHload(v *Value) bool {
  5239  	v_1 := v.Args[1]
  5240  	v_0 := v.Args[0]
  5241  	b := v.Block
  5242  	config := b.Func.Config
  5243  	// match: (MOVHload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  5244  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  5245  	// result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  5246  	for {
  5247  		off1 := auxIntToInt32(v.AuxInt)
  5248  		sym1 := auxToSym(v.Aux)
  5249  		if v_0.Op != OpRISCV64MOVaddr {
  5250  			break
  5251  		}
  5252  		off2 := auxIntToInt32(v_0.AuxInt)
  5253  		sym2 := auxToSym(v_0.Aux)
  5254  		base := v_0.Args[0]
  5255  		mem := v_1
  5256  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5257  			break
  5258  		}
  5259  		v.reset(OpRISCV64MOVHload)
  5260  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5261  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5262  		v.AddArg2(base, mem)
  5263  		return true
  5264  	}
  5265  	// match: (MOVHload [off1] {sym} (ADDI [off2] base) mem)
  5266  	// cond: is32Bit(int64(off1)+off2)
  5267  	// result: (MOVHload [off1+int32(off2)] {sym} base mem)
  5268  	for {
  5269  		off1 := auxIntToInt32(v.AuxInt)
  5270  		sym := auxToSym(v.Aux)
  5271  		if v_0.Op != OpRISCV64ADDI {
  5272  			break
  5273  		}
  5274  		off2 := auxIntToInt64(v_0.AuxInt)
  5275  		base := v_0.Args[0]
  5276  		mem := v_1
  5277  		if !(is32Bit(int64(off1) + off2)) {
  5278  			break
  5279  		}
  5280  		v.reset(OpRISCV64MOVHload)
  5281  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5282  		v.Aux = symToAux(sym)
  5283  		v.AddArg2(base, mem)
  5284  		return true
  5285  	}
  5286  	return false
  5287  }
  5288  func rewriteValueRISCV64_OpRISCV64MOVHreg(v *Value) bool {
  5289  	v_0 := v.Args[0]
  5290  	b := v.Block
  5291  	// match: (MOVHreg x:(ANDI [c] y))
  5292  	// cond: c >= 0 && int64(int16(c)) == c
  5293  	// result: x
  5294  	for {
  5295  		x := v_0
  5296  		if x.Op != OpRISCV64ANDI {
  5297  			break
  5298  		}
  5299  		c := auxIntToInt64(x.AuxInt)
  5300  		if !(c >= 0 && int64(int16(c)) == c) {
  5301  			break
  5302  		}
  5303  		v.copyOf(x)
  5304  		return true
  5305  	}
  5306  	// match: (MOVHreg (MOVDconst [c]))
  5307  	// result: (MOVDconst [int64(int16(c))])
  5308  	for {
  5309  		if v_0.Op != OpRISCV64MOVDconst {
  5310  			break
  5311  		}
  5312  		c := auxIntToInt64(v_0.AuxInt)
  5313  		v.reset(OpRISCV64MOVDconst)
  5314  		v.AuxInt = int64ToAuxInt(int64(int16(c)))
  5315  		return true
  5316  	}
  5317  	// match: (MOVHreg x:(MOVBload _ _))
  5318  	// result: (MOVDreg x)
  5319  	for {
  5320  		x := v_0
  5321  		if x.Op != OpRISCV64MOVBload {
  5322  			break
  5323  		}
  5324  		v.reset(OpRISCV64MOVDreg)
  5325  		v.AddArg(x)
  5326  		return true
  5327  	}
  5328  	// match: (MOVHreg x:(MOVBUload _ _))
  5329  	// result: (MOVDreg x)
  5330  	for {
  5331  		x := v_0
  5332  		if x.Op != OpRISCV64MOVBUload {
  5333  			break
  5334  		}
  5335  		v.reset(OpRISCV64MOVDreg)
  5336  		v.AddArg(x)
  5337  		return true
  5338  	}
  5339  	// match: (MOVHreg x:(MOVHload _ _))
  5340  	// result: (MOVDreg x)
  5341  	for {
  5342  		x := v_0
  5343  		if x.Op != OpRISCV64MOVHload {
  5344  			break
  5345  		}
  5346  		v.reset(OpRISCV64MOVDreg)
  5347  		v.AddArg(x)
  5348  		return true
  5349  	}
  5350  	// match: (MOVHreg x:(MOVBreg _))
  5351  	// result: (MOVDreg x)
  5352  	for {
  5353  		x := v_0
  5354  		if x.Op != OpRISCV64MOVBreg {
  5355  			break
  5356  		}
  5357  		v.reset(OpRISCV64MOVDreg)
  5358  		v.AddArg(x)
  5359  		return true
  5360  	}
  5361  	// match: (MOVHreg x:(MOVBUreg _))
  5362  	// result: (MOVDreg x)
  5363  	for {
  5364  		x := v_0
  5365  		if x.Op != OpRISCV64MOVBUreg {
  5366  			break
  5367  		}
  5368  		v.reset(OpRISCV64MOVDreg)
  5369  		v.AddArg(x)
  5370  		return true
  5371  	}
  5372  	// match: (MOVHreg x:(MOVHreg _))
  5373  	// result: (MOVDreg x)
  5374  	for {
  5375  		x := v_0
  5376  		if x.Op != OpRISCV64MOVHreg {
  5377  			break
  5378  		}
  5379  		v.reset(OpRISCV64MOVDreg)
  5380  		v.AddArg(x)
  5381  		return true
  5382  	}
  5383  	// match: (MOVHreg <t> x:(MOVHUload [off] {sym} ptr mem))
  5384  	// cond: x.Uses == 1 && clobber(x)
  5385  	// result: @x.Block (MOVHload <t> [off] {sym} ptr mem)
  5386  	for {
  5387  		t := v.Type
  5388  		x := v_0
  5389  		if x.Op != OpRISCV64MOVHUload {
  5390  			break
  5391  		}
  5392  		off := auxIntToInt32(x.AuxInt)
  5393  		sym := auxToSym(x.Aux)
  5394  		mem := x.Args[1]
  5395  		ptr := x.Args[0]
  5396  		if !(x.Uses == 1 && clobber(x)) {
  5397  			break
  5398  		}
  5399  		b = x.Block
  5400  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVHload, t)
  5401  		v.copyOf(v0)
  5402  		v0.AuxInt = int32ToAuxInt(off)
  5403  		v0.Aux = symToAux(sym)
  5404  		v0.AddArg2(ptr, mem)
  5405  		return true
  5406  	}
  5407  	return false
  5408  }
  5409  func rewriteValueRISCV64_OpRISCV64MOVHstore(v *Value) bool {
  5410  	v_2 := v.Args[2]
  5411  	v_1 := v.Args[1]
  5412  	v_0 := v.Args[0]
  5413  	b := v.Block
  5414  	config := b.Func.Config
  5415  	// match: (MOVHstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
  5416  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  5417  	// result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
  5418  	for {
  5419  		off1 := auxIntToInt32(v.AuxInt)
  5420  		sym1 := auxToSym(v.Aux)
  5421  		if v_0.Op != OpRISCV64MOVaddr {
  5422  			break
  5423  		}
  5424  		off2 := auxIntToInt32(v_0.AuxInt)
  5425  		sym2 := auxToSym(v_0.Aux)
  5426  		base := v_0.Args[0]
  5427  		val := v_1
  5428  		mem := v_2
  5429  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5430  			break
  5431  		}
  5432  		v.reset(OpRISCV64MOVHstore)
  5433  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5434  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5435  		v.AddArg3(base, val, mem)
  5436  		return true
  5437  	}
  5438  	// match: (MOVHstore [off1] {sym} (ADDI [off2] base) val mem)
  5439  	// cond: is32Bit(int64(off1)+off2)
  5440  	// result: (MOVHstore [off1+int32(off2)] {sym} base val mem)
  5441  	for {
  5442  		off1 := auxIntToInt32(v.AuxInt)
  5443  		sym := auxToSym(v.Aux)
  5444  		if v_0.Op != OpRISCV64ADDI {
  5445  			break
  5446  		}
  5447  		off2 := auxIntToInt64(v_0.AuxInt)
  5448  		base := v_0.Args[0]
  5449  		val := v_1
  5450  		mem := v_2
  5451  		if !(is32Bit(int64(off1) + off2)) {
  5452  			break
  5453  		}
  5454  		v.reset(OpRISCV64MOVHstore)
  5455  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5456  		v.Aux = symToAux(sym)
  5457  		v.AddArg3(base, val, mem)
  5458  		return true
  5459  	}
  5460  	// match: (MOVHstore [off] {sym} ptr (MOVDconst [0]) mem)
  5461  	// result: (MOVHstorezero [off] {sym} ptr mem)
  5462  	for {
  5463  		off := auxIntToInt32(v.AuxInt)
  5464  		sym := auxToSym(v.Aux)
  5465  		ptr := v_0
  5466  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  5467  			break
  5468  		}
  5469  		mem := v_2
  5470  		v.reset(OpRISCV64MOVHstorezero)
  5471  		v.AuxInt = int32ToAuxInt(off)
  5472  		v.Aux = symToAux(sym)
  5473  		v.AddArg2(ptr, mem)
  5474  		return true
  5475  	}
  5476  	// match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem)
  5477  	// result: (MOVHstore [off] {sym} ptr x mem)
  5478  	for {
  5479  		off := auxIntToInt32(v.AuxInt)
  5480  		sym := auxToSym(v.Aux)
  5481  		ptr := v_0
  5482  		if v_1.Op != OpRISCV64MOVHreg {
  5483  			break
  5484  		}
  5485  		x := v_1.Args[0]
  5486  		mem := v_2
  5487  		v.reset(OpRISCV64MOVHstore)
  5488  		v.AuxInt = int32ToAuxInt(off)
  5489  		v.Aux = symToAux(sym)
  5490  		v.AddArg3(ptr, x, mem)
  5491  		return true
  5492  	}
  5493  	// match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem)
  5494  	// result: (MOVHstore [off] {sym} ptr x mem)
  5495  	for {
  5496  		off := auxIntToInt32(v.AuxInt)
  5497  		sym := auxToSym(v.Aux)
  5498  		ptr := v_0
  5499  		if v_1.Op != OpRISCV64MOVWreg {
  5500  			break
  5501  		}
  5502  		x := v_1.Args[0]
  5503  		mem := v_2
  5504  		v.reset(OpRISCV64MOVHstore)
  5505  		v.AuxInt = int32ToAuxInt(off)
  5506  		v.Aux = symToAux(sym)
  5507  		v.AddArg3(ptr, x, mem)
  5508  		return true
  5509  	}
  5510  	// match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem)
  5511  	// result: (MOVHstore [off] {sym} ptr x mem)
  5512  	for {
  5513  		off := auxIntToInt32(v.AuxInt)
  5514  		sym := auxToSym(v.Aux)
  5515  		ptr := v_0
  5516  		if v_1.Op != OpRISCV64MOVHUreg {
  5517  			break
  5518  		}
  5519  		x := v_1.Args[0]
  5520  		mem := v_2
  5521  		v.reset(OpRISCV64MOVHstore)
  5522  		v.AuxInt = int32ToAuxInt(off)
  5523  		v.Aux = symToAux(sym)
  5524  		v.AddArg3(ptr, x, mem)
  5525  		return true
  5526  	}
  5527  	// match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem)
  5528  	// result: (MOVHstore [off] {sym} ptr x mem)
  5529  	for {
  5530  		off := auxIntToInt32(v.AuxInt)
  5531  		sym := auxToSym(v.Aux)
  5532  		ptr := v_0
  5533  		if v_1.Op != OpRISCV64MOVWUreg {
  5534  			break
  5535  		}
  5536  		x := v_1.Args[0]
  5537  		mem := v_2
  5538  		v.reset(OpRISCV64MOVHstore)
  5539  		v.AuxInt = int32ToAuxInt(off)
  5540  		v.Aux = symToAux(sym)
  5541  		v.AddArg3(ptr, x, mem)
  5542  		return true
  5543  	}
  5544  	return false
  5545  }
  5546  func rewriteValueRISCV64_OpRISCV64MOVHstorezero(v *Value) bool {
  5547  	v_1 := v.Args[1]
  5548  	v_0 := v.Args[0]
  5549  	b := v.Block
  5550  	config := b.Func.Config
  5551  	// match: (MOVHstorezero [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  5552  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  5553  	// result: (MOVHstorezero [off1+off2] {mergeSym(sym1,sym2)} base mem)
  5554  	for {
  5555  		off1 := auxIntToInt32(v.AuxInt)
  5556  		sym1 := auxToSym(v.Aux)
  5557  		if v_0.Op != OpRISCV64MOVaddr {
  5558  			break
  5559  		}
  5560  		off2 := auxIntToInt32(v_0.AuxInt)
  5561  		sym2 := auxToSym(v_0.Aux)
  5562  		base := v_0.Args[0]
  5563  		mem := v_1
  5564  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5565  			break
  5566  		}
  5567  		v.reset(OpRISCV64MOVHstorezero)
  5568  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5569  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5570  		v.AddArg2(base, mem)
  5571  		return true
  5572  	}
  5573  	// match: (MOVHstorezero [off1] {sym} (ADDI [off2] base) mem)
  5574  	// cond: is32Bit(int64(off1)+off2)
  5575  	// result: (MOVHstorezero [off1+int32(off2)] {sym} base mem)
  5576  	for {
  5577  		off1 := auxIntToInt32(v.AuxInt)
  5578  		sym := auxToSym(v.Aux)
  5579  		if v_0.Op != OpRISCV64ADDI {
  5580  			break
  5581  		}
  5582  		off2 := auxIntToInt64(v_0.AuxInt)
  5583  		base := v_0.Args[0]
  5584  		mem := v_1
  5585  		if !(is32Bit(int64(off1) + off2)) {
  5586  			break
  5587  		}
  5588  		v.reset(OpRISCV64MOVHstorezero)
  5589  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5590  		v.Aux = symToAux(sym)
  5591  		v.AddArg2(base, mem)
  5592  		return true
  5593  	}
  5594  	return false
  5595  }
  5596  func rewriteValueRISCV64_OpRISCV64MOVWUload(v *Value) bool {
  5597  	v_1 := v.Args[1]
  5598  	v_0 := v.Args[0]
  5599  	b := v.Block
  5600  	config := b.Func.Config
  5601  	// match: (MOVWUload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  5602  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  5603  	// result: (MOVWUload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  5604  	for {
  5605  		off1 := auxIntToInt32(v.AuxInt)
  5606  		sym1 := auxToSym(v.Aux)
  5607  		if v_0.Op != OpRISCV64MOVaddr {
  5608  			break
  5609  		}
  5610  		off2 := auxIntToInt32(v_0.AuxInt)
  5611  		sym2 := auxToSym(v_0.Aux)
  5612  		base := v_0.Args[0]
  5613  		mem := v_1
  5614  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5615  			break
  5616  		}
  5617  		v.reset(OpRISCV64MOVWUload)
  5618  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5619  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5620  		v.AddArg2(base, mem)
  5621  		return true
  5622  	}
  5623  	// match: (MOVWUload [off1] {sym} (ADDI [off2] base) mem)
  5624  	// cond: is32Bit(int64(off1)+off2)
  5625  	// result: (MOVWUload [off1+int32(off2)] {sym} base mem)
  5626  	for {
  5627  		off1 := auxIntToInt32(v.AuxInt)
  5628  		sym := auxToSym(v.Aux)
  5629  		if v_0.Op != OpRISCV64ADDI {
  5630  			break
  5631  		}
  5632  		off2 := auxIntToInt64(v_0.AuxInt)
  5633  		base := v_0.Args[0]
  5634  		mem := v_1
  5635  		if !(is32Bit(int64(off1) + off2)) {
  5636  			break
  5637  		}
  5638  		v.reset(OpRISCV64MOVWUload)
  5639  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5640  		v.Aux = symToAux(sym)
  5641  		v.AddArg2(base, mem)
  5642  		return true
  5643  	}
  5644  	return false
  5645  }
  5646  func rewriteValueRISCV64_OpRISCV64MOVWUreg(v *Value) bool {
  5647  	v_0 := v.Args[0]
  5648  	b := v.Block
  5649  	typ := &b.Func.Config.Types
  5650  	// match: (MOVWUreg x:(ANDI [c] y))
  5651  	// cond: c >= 0 && int64(uint32(c)) == c
  5652  	// result: x
  5653  	for {
  5654  		x := v_0
  5655  		if x.Op != OpRISCV64ANDI {
  5656  			break
  5657  		}
  5658  		c := auxIntToInt64(x.AuxInt)
  5659  		if !(c >= 0 && int64(uint32(c)) == c) {
  5660  			break
  5661  		}
  5662  		v.copyOf(x)
  5663  		return true
  5664  	}
  5665  	// match: (MOVWUreg (ANDI [c] x))
  5666  	// cond: c < 0
  5667  	// result: (AND (MOVDconst [int64(uint32(c))]) x)
  5668  	for {
  5669  		if v_0.Op != OpRISCV64ANDI {
  5670  			break
  5671  		}
  5672  		c := auxIntToInt64(v_0.AuxInt)
  5673  		x := v_0.Args[0]
  5674  		if !(c < 0) {
  5675  			break
  5676  		}
  5677  		v.reset(OpRISCV64AND)
  5678  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  5679  		v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
  5680  		v.AddArg2(v0, x)
  5681  		return true
  5682  	}
  5683  	// match: (MOVWUreg (MOVDconst [c]))
  5684  	// result: (MOVDconst [int64(uint32(c))])
  5685  	for {
  5686  		if v_0.Op != OpRISCV64MOVDconst {
  5687  			break
  5688  		}
  5689  		c := auxIntToInt64(v_0.AuxInt)
  5690  		v.reset(OpRISCV64MOVDconst)
  5691  		v.AuxInt = int64ToAuxInt(int64(uint32(c)))
  5692  		return true
  5693  	}
  5694  	// match: (MOVWUreg x:(MOVBUload _ _))
  5695  	// result: (MOVDreg x)
  5696  	for {
  5697  		x := v_0
  5698  		if x.Op != OpRISCV64MOVBUload {
  5699  			break
  5700  		}
  5701  		v.reset(OpRISCV64MOVDreg)
  5702  		v.AddArg(x)
  5703  		return true
  5704  	}
  5705  	// match: (MOVWUreg x:(MOVHUload _ _))
  5706  	// result: (MOVDreg x)
  5707  	for {
  5708  		x := v_0
  5709  		if x.Op != OpRISCV64MOVHUload {
  5710  			break
  5711  		}
  5712  		v.reset(OpRISCV64MOVDreg)
  5713  		v.AddArg(x)
  5714  		return true
  5715  	}
  5716  	// match: (MOVWUreg x:(MOVWUload _ _))
  5717  	// result: (MOVDreg x)
  5718  	for {
  5719  		x := v_0
  5720  		if x.Op != OpRISCV64MOVWUload {
  5721  			break
  5722  		}
  5723  		v.reset(OpRISCV64MOVDreg)
  5724  		v.AddArg(x)
  5725  		return true
  5726  	}
  5727  	// match: (MOVWUreg x:(MOVBUreg _))
  5728  	// result: (MOVDreg x)
  5729  	for {
  5730  		x := v_0
  5731  		if x.Op != OpRISCV64MOVBUreg {
  5732  			break
  5733  		}
  5734  		v.reset(OpRISCV64MOVDreg)
  5735  		v.AddArg(x)
  5736  		return true
  5737  	}
  5738  	// match: (MOVWUreg x:(MOVHUreg _))
  5739  	// result: (MOVDreg x)
  5740  	for {
  5741  		x := v_0
  5742  		if x.Op != OpRISCV64MOVHUreg {
  5743  			break
  5744  		}
  5745  		v.reset(OpRISCV64MOVDreg)
  5746  		v.AddArg(x)
  5747  		return true
  5748  	}
  5749  	// match: (MOVWUreg x:(MOVWUreg _))
  5750  	// result: (MOVDreg x)
  5751  	for {
  5752  		x := v_0
  5753  		if x.Op != OpRISCV64MOVWUreg {
  5754  			break
  5755  		}
  5756  		v.reset(OpRISCV64MOVDreg)
  5757  		v.AddArg(x)
  5758  		return true
  5759  	}
  5760  	// match: (MOVWUreg <t> x:(MOVWload [off] {sym} ptr mem))
  5761  	// cond: x.Uses == 1 && clobber(x)
  5762  	// result: @x.Block (MOVWUload <t> [off] {sym} ptr mem)
  5763  	for {
  5764  		t := v.Type
  5765  		x := v_0
  5766  		if x.Op != OpRISCV64MOVWload {
  5767  			break
  5768  		}
  5769  		off := auxIntToInt32(x.AuxInt)
  5770  		sym := auxToSym(x.Aux)
  5771  		mem := x.Args[1]
  5772  		ptr := x.Args[0]
  5773  		if !(x.Uses == 1 && clobber(x)) {
  5774  			break
  5775  		}
  5776  		b = x.Block
  5777  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVWUload, t)
  5778  		v.copyOf(v0)
  5779  		v0.AuxInt = int32ToAuxInt(off)
  5780  		v0.Aux = symToAux(sym)
  5781  		v0.AddArg2(ptr, mem)
  5782  		return true
  5783  	}
  5784  	return false
  5785  }
  5786  func rewriteValueRISCV64_OpRISCV64MOVWload(v *Value) bool {
  5787  	v_1 := v.Args[1]
  5788  	v_0 := v.Args[0]
  5789  	b := v.Block
  5790  	config := b.Func.Config
  5791  	// match: (MOVWload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  5792  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  5793  	// result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  5794  	for {
  5795  		off1 := auxIntToInt32(v.AuxInt)
  5796  		sym1 := auxToSym(v.Aux)
  5797  		if v_0.Op != OpRISCV64MOVaddr {
  5798  			break
  5799  		}
  5800  		off2 := auxIntToInt32(v_0.AuxInt)
  5801  		sym2 := auxToSym(v_0.Aux)
  5802  		base := v_0.Args[0]
  5803  		mem := v_1
  5804  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5805  			break
  5806  		}
  5807  		v.reset(OpRISCV64MOVWload)
  5808  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5809  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5810  		v.AddArg2(base, mem)
  5811  		return true
  5812  	}
  5813  	// match: (MOVWload [off1] {sym} (ADDI [off2] base) mem)
  5814  	// cond: is32Bit(int64(off1)+off2)
  5815  	// result: (MOVWload [off1+int32(off2)] {sym} base mem)
  5816  	for {
  5817  		off1 := auxIntToInt32(v.AuxInt)
  5818  		sym := auxToSym(v.Aux)
  5819  		if v_0.Op != OpRISCV64ADDI {
  5820  			break
  5821  		}
  5822  		off2 := auxIntToInt64(v_0.AuxInt)
  5823  		base := v_0.Args[0]
  5824  		mem := v_1
  5825  		if !(is32Bit(int64(off1) + off2)) {
  5826  			break
  5827  		}
  5828  		v.reset(OpRISCV64MOVWload)
  5829  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5830  		v.Aux = symToAux(sym)
  5831  		v.AddArg2(base, mem)
  5832  		return true
  5833  	}
  5834  	return false
  5835  }
  5836  func rewriteValueRISCV64_OpRISCV64MOVWreg(v *Value) bool {
  5837  	v_0 := v.Args[0]
  5838  	b := v.Block
  5839  	// match: (MOVWreg x:(ANDI [c] y))
  5840  	// cond: c >= 0 && int64(int32(c)) == c
  5841  	// result: x
  5842  	for {
  5843  		x := v_0
  5844  		if x.Op != OpRISCV64ANDI {
  5845  			break
  5846  		}
  5847  		c := auxIntToInt64(x.AuxInt)
  5848  		if !(c >= 0 && int64(int32(c)) == c) {
  5849  			break
  5850  		}
  5851  		v.copyOf(x)
  5852  		return true
  5853  	}
  5854  	// match: (MOVWreg (NEG x))
  5855  	// result: (NEGW x)
  5856  	for {
  5857  		if v_0.Op != OpRISCV64NEG {
  5858  			break
  5859  		}
  5860  		x := v_0.Args[0]
  5861  		v.reset(OpRISCV64NEGW)
  5862  		v.AddArg(x)
  5863  		return true
  5864  	}
  5865  	// match: (MOVWreg (MOVDconst [c]))
  5866  	// result: (MOVDconst [int64(int32(c))])
  5867  	for {
  5868  		if v_0.Op != OpRISCV64MOVDconst {
  5869  			break
  5870  		}
  5871  		c := auxIntToInt64(v_0.AuxInt)
  5872  		v.reset(OpRISCV64MOVDconst)
  5873  		v.AuxInt = int64ToAuxInt(int64(int32(c)))
  5874  		return true
  5875  	}
  5876  	// match: (MOVWreg x:(MOVBload _ _))
  5877  	// result: (MOVDreg x)
  5878  	for {
  5879  		x := v_0
  5880  		if x.Op != OpRISCV64MOVBload {
  5881  			break
  5882  		}
  5883  		v.reset(OpRISCV64MOVDreg)
  5884  		v.AddArg(x)
  5885  		return true
  5886  	}
  5887  	// match: (MOVWreg x:(MOVBUload _ _))
  5888  	// result: (MOVDreg x)
  5889  	for {
  5890  		x := v_0
  5891  		if x.Op != OpRISCV64MOVBUload {
  5892  			break
  5893  		}
  5894  		v.reset(OpRISCV64MOVDreg)
  5895  		v.AddArg(x)
  5896  		return true
  5897  	}
  5898  	// match: (MOVWreg x:(MOVHload _ _))
  5899  	// result: (MOVDreg x)
  5900  	for {
  5901  		x := v_0
  5902  		if x.Op != OpRISCV64MOVHload {
  5903  			break
  5904  		}
  5905  		v.reset(OpRISCV64MOVDreg)
  5906  		v.AddArg(x)
  5907  		return true
  5908  	}
  5909  	// match: (MOVWreg x:(MOVHUload _ _))
  5910  	// result: (MOVDreg x)
  5911  	for {
  5912  		x := v_0
  5913  		if x.Op != OpRISCV64MOVHUload {
  5914  			break
  5915  		}
  5916  		v.reset(OpRISCV64MOVDreg)
  5917  		v.AddArg(x)
  5918  		return true
  5919  	}
  5920  	// match: (MOVWreg x:(MOVWload _ _))
  5921  	// result: (MOVDreg x)
  5922  	for {
  5923  		x := v_0
  5924  		if x.Op != OpRISCV64MOVWload {
  5925  			break
  5926  		}
  5927  		v.reset(OpRISCV64MOVDreg)
  5928  		v.AddArg(x)
  5929  		return true
  5930  	}
  5931  	// match: (MOVWreg x:(ADDIW _))
  5932  	// result: (MOVDreg x)
  5933  	for {
  5934  		x := v_0
  5935  		if x.Op != OpRISCV64ADDIW {
  5936  			break
  5937  		}
  5938  		v.reset(OpRISCV64MOVDreg)
  5939  		v.AddArg(x)
  5940  		return true
  5941  	}
  5942  	// match: (MOVWreg x:(SUBW _ _))
  5943  	// result: (MOVDreg x)
  5944  	for {
  5945  		x := v_0
  5946  		if x.Op != OpRISCV64SUBW {
  5947  			break
  5948  		}
  5949  		v.reset(OpRISCV64MOVDreg)
  5950  		v.AddArg(x)
  5951  		return true
  5952  	}
  5953  	// match: (MOVWreg x:(NEGW _))
  5954  	// result: (MOVDreg x)
  5955  	for {
  5956  		x := v_0
  5957  		if x.Op != OpRISCV64NEGW {
  5958  			break
  5959  		}
  5960  		v.reset(OpRISCV64MOVDreg)
  5961  		v.AddArg(x)
  5962  		return true
  5963  	}
  5964  	// match: (MOVWreg x:(MULW _ _))
  5965  	// result: (MOVDreg x)
  5966  	for {
  5967  		x := v_0
  5968  		if x.Op != OpRISCV64MULW {
  5969  			break
  5970  		}
  5971  		v.reset(OpRISCV64MOVDreg)
  5972  		v.AddArg(x)
  5973  		return true
  5974  	}
  5975  	// match: (MOVWreg x:(DIVW _ _))
  5976  	// result: (MOVDreg x)
  5977  	for {
  5978  		x := v_0
  5979  		if x.Op != OpRISCV64DIVW {
  5980  			break
  5981  		}
  5982  		v.reset(OpRISCV64MOVDreg)
  5983  		v.AddArg(x)
  5984  		return true
  5985  	}
  5986  	// match: (MOVWreg x:(DIVUW _ _))
  5987  	// result: (MOVDreg x)
  5988  	for {
  5989  		x := v_0
  5990  		if x.Op != OpRISCV64DIVUW {
  5991  			break
  5992  		}
  5993  		v.reset(OpRISCV64MOVDreg)
  5994  		v.AddArg(x)
  5995  		return true
  5996  	}
  5997  	// match: (MOVWreg x:(REMW _ _))
  5998  	// result: (MOVDreg x)
  5999  	for {
  6000  		x := v_0
  6001  		if x.Op != OpRISCV64REMW {
  6002  			break
  6003  		}
  6004  		v.reset(OpRISCV64MOVDreg)
  6005  		v.AddArg(x)
  6006  		return true
  6007  	}
  6008  	// match: (MOVWreg x:(REMUW _ _))
  6009  	// result: (MOVDreg x)
  6010  	for {
  6011  		x := v_0
  6012  		if x.Op != OpRISCV64REMUW {
  6013  			break
  6014  		}
  6015  		v.reset(OpRISCV64MOVDreg)
  6016  		v.AddArg(x)
  6017  		return true
  6018  	}
  6019  	// match: (MOVWreg x:(ROLW _ _))
  6020  	// result: (MOVDreg x)
  6021  	for {
  6022  		x := v_0
  6023  		if x.Op != OpRISCV64ROLW {
  6024  			break
  6025  		}
  6026  		v.reset(OpRISCV64MOVDreg)
  6027  		v.AddArg(x)
  6028  		return true
  6029  	}
  6030  	// match: (MOVWreg x:(RORW _ _))
  6031  	// result: (MOVDreg x)
  6032  	for {
  6033  		x := v_0
  6034  		if x.Op != OpRISCV64RORW {
  6035  			break
  6036  		}
  6037  		v.reset(OpRISCV64MOVDreg)
  6038  		v.AddArg(x)
  6039  		return true
  6040  	}
  6041  	// match: (MOVWreg x:(RORIW _))
  6042  	// result: (MOVDreg x)
  6043  	for {
  6044  		x := v_0
  6045  		if x.Op != OpRISCV64RORIW {
  6046  			break
  6047  		}
  6048  		v.reset(OpRISCV64MOVDreg)
  6049  		v.AddArg(x)
  6050  		return true
  6051  	}
  6052  	// match: (MOVWreg x:(MOVBreg _))
  6053  	// result: (MOVDreg x)
  6054  	for {
  6055  		x := v_0
  6056  		if x.Op != OpRISCV64MOVBreg {
  6057  			break
  6058  		}
  6059  		v.reset(OpRISCV64MOVDreg)
  6060  		v.AddArg(x)
  6061  		return true
  6062  	}
  6063  	// match: (MOVWreg x:(MOVBUreg _))
  6064  	// result: (MOVDreg x)
  6065  	for {
  6066  		x := v_0
  6067  		if x.Op != OpRISCV64MOVBUreg {
  6068  			break
  6069  		}
  6070  		v.reset(OpRISCV64MOVDreg)
  6071  		v.AddArg(x)
  6072  		return true
  6073  	}
  6074  	// match: (MOVWreg x:(MOVHreg _))
  6075  	// result: (MOVDreg x)
  6076  	for {
  6077  		x := v_0
  6078  		if x.Op != OpRISCV64MOVHreg {
  6079  			break
  6080  		}
  6081  		v.reset(OpRISCV64MOVDreg)
  6082  		v.AddArg(x)
  6083  		return true
  6084  	}
  6085  	// match: (MOVWreg x:(MOVWreg _))
  6086  	// result: (MOVDreg x)
  6087  	for {
  6088  		x := v_0
  6089  		if x.Op != OpRISCV64MOVWreg {
  6090  			break
  6091  		}
  6092  		v.reset(OpRISCV64MOVDreg)
  6093  		v.AddArg(x)
  6094  		return true
  6095  	}
  6096  	// match: (MOVWreg <t> x:(MOVWUload [off] {sym} ptr mem))
  6097  	// cond: x.Uses == 1 && clobber(x)
  6098  	// result: @x.Block (MOVWload <t> [off] {sym} ptr mem)
  6099  	for {
  6100  		t := v.Type
  6101  		x := v_0
  6102  		if x.Op != OpRISCV64MOVWUload {
  6103  			break
  6104  		}
  6105  		off := auxIntToInt32(x.AuxInt)
  6106  		sym := auxToSym(x.Aux)
  6107  		mem := x.Args[1]
  6108  		ptr := x.Args[0]
  6109  		if !(x.Uses == 1 && clobber(x)) {
  6110  			break
  6111  		}
  6112  		b = x.Block
  6113  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVWload, t)
  6114  		v.copyOf(v0)
  6115  		v0.AuxInt = int32ToAuxInt(off)
  6116  		v0.Aux = symToAux(sym)
  6117  		v0.AddArg2(ptr, mem)
  6118  		return true
  6119  	}
  6120  	return false
  6121  }
  6122  func rewriteValueRISCV64_OpRISCV64MOVWstore(v *Value) bool {
  6123  	v_2 := v.Args[2]
  6124  	v_1 := v.Args[1]
  6125  	v_0 := v.Args[0]
  6126  	b := v.Block
  6127  	config := b.Func.Config
  6128  	// match: (MOVWstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
  6129  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  6130  	// result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
  6131  	for {
  6132  		off1 := auxIntToInt32(v.AuxInt)
  6133  		sym1 := auxToSym(v.Aux)
  6134  		if v_0.Op != OpRISCV64MOVaddr {
  6135  			break
  6136  		}
  6137  		off2 := auxIntToInt32(v_0.AuxInt)
  6138  		sym2 := auxToSym(v_0.Aux)
  6139  		base := v_0.Args[0]
  6140  		val := v_1
  6141  		mem := v_2
  6142  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  6143  			break
  6144  		}
  6145  		v.reset(OpRISCV64MOVWstore)
  6146  		v.AuxInt = int32ToAuxInt(off1 + off2)
  6147  		v.Aux = symToAux(mergeSym(sym1, sym2))
  6148  		v.AddArg3(base, val, mem)
  6149  		return true
  6150  	}
  6151  	// match: (MOVWstore [off1] {sym} (ADDI [off2] base) val mem)
  6152  	// cond: is32Bit(int64(off1)+off2)
  6153  	// result: (MOVWstore [off1+int32(off2)] {sym} base val mem)
  6154  	for {
  6155  		off1 := auxIntToInt32(v.AuxInt)
  6156  		sym := auxToSym(v.Aux)
  6157  		if v_0.Op != OpRISCV64ADDI {
  6158  			break
  6159  		}
  6160  		off2 := auxIntToInt64(v_0.AuxInt)
  6161  		base := v_0.Args[0]
  6162  		val := v_1
  6163  		mem := v_2
  6164  		if !(is32Bit(int64(off1) + off2)) {
  6165  			break
  6166  		}
  6167  		v.reset(OpRISCV64MOVWstore)
  6168  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  6169  		v.Aux = symToAux(sym)
  6170  		v.AddArg3(base, val, mem)
  6171  		return true
  6172  	}
  6173  	// match: (MOVWstore [off] {sym} ptr (MOVDconst [0]) mem)
  6174  	// result: (MOVWstorezero [off] {sym} ptr mem)
  6175  	for {
  6176  		off := auxIntToInt32(v.AuxInt)
  6177  		sym := auxToSym(v.Aux)
  6178  		ptr := v_0
  6179  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  6180  			break
  6181  		}
  6182  		mem := v_2
  6183  		v.reset(OpRISCV64MOVWstorezero)
  6184  		v.AuxInt = int32ToAuxInt(off)
  6185  		v.Aux = symToAux(sym)
  6186  		v.AddArg2(ptr, mem)
  6187  		return true
  6188  	}
  6189  	// match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem)
  6190  	// result: (MOVWstore [off] {sym} ptr x mem)
  6191  	for {
  6192  		off := auxIntToInt32(v.AuxInt)
  6193  		sym := auxToSym(v.Aux)
  6194  		ptr := v_0
  6195  		if v_1.Op != OpRISCV64MOVWreg {
  6196  			break
  6197  		}
  6198  		x := v_1.Args[0]
  6199  		mem := v_2
  6200  		v.reset(OpRISCV64MOVWstore)
  6201  		v.AuxInt = int32ToAuxInt(off)
  6202  		v.Aux = symToAux(sym)
  6203  		v.AddArg3(ptr, x, mem)
  6204  		return true
  6205  	}
  6206  	// match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem)
  6207  	// result: (MOVWstore [off] {sym} ptr x mem)
  6208  	for {
  6209  		off := auxIntToInt32(v.AuxInt)
  6210  		sym := auxToSym(v.Aux)
  6211  		ptr := v_0
  6212  		if v_1.Op != OpRISCV64MOVWUreg {
  6213  			break
  6214  		}
  6215  		x := v_1.Args[0]
  6216  		mem := v_2
  6217  		v.reset(OpRISCV64MOVWstore)
  6218  		v.AuxInt = int32ToAuxInt(off)
  6219  		v.Aux = symToAux(sym)
  6220  		v.AddArg3(ptr, x, mem)
  6221  		return true
  6222  	}
  6223  	return false
  6224  }
  6225  func rewriteValueRISCV64_OpRISCV64MOVWstorezero(v *Value) bool {
  6226  	v_1 := v.Args[1]
  6227  	v_0 := v.Args[0]
  6228  	b := v.Block
  6229  	config := b.Func.Config
  6230  	// match: (MOVWstorezero [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  6231  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  6232  	// result: (MOVWstorezero [off1+off2] {mergeSym(sym1,sym2)} base mem)
  6233  	for {
  6234  		off1 := auxIntToInt32(v.AuxInt)
  6235  		sym1 := auxToSym(v.Aux)
  6236  		if v_0.Op != OpRISCV64MOVaddr {
  6237  			break
  6238  		}
  6239  		off2 := auxIntToInt32(v_0.AuxInt)
  6240  		sym2 := auxToSym(v_0.Aux)
  6241  		base := v_0.Args[0]
  6242  		mem := v_1
  6243  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  6244  			break
  6245  		}
  6246  		v.reset(OpRISCV64MOVWstorezero)
  6247  		v.AuxInt = int32ToAuxInt(off1 + off2)
  6248  		v.Aux = symToAux(mergeSym(sym1, sym2))
  6249  		v.AddArg2(base, mem)
  6250  		return true
  6251  	}
  6252  	// match: (MOVWstorezero [off1] {sym} (ADDI [off2] base) mem)
  6253  	// cond: is32Bit(int64(off1)+off2)
  6254  	// result: (MOVWstorezero [off1+int32(off2)] {sym} base mem)
  6255  	for {
  6256  		off1 := auxIntToInt32(v.AuxInt)
  6257  		sym := auxToSym(v.Aux)
  6258  		if v_0.Op != OpRISCV64ADDI {
  6259  			break
  6260  		}
  6261  		off2 := auxIntToInt64(v_0.AuxInt)
  6262  		base := v_0.Args[0]
  6263  		mem := v_1
  6264  		if !(is32Bit(int64(off1) + off2)) {
  6265  			break
  6266  		}
  6267  		v.reset(OpRISCV64MOVWstorezero)
  6268  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  6269  		v.Aux = symToAux(sym)
  6270  		v.AddArg2(base, mem)
  6271  		return true
  6272  	}
  6273  	return false
  6274  }
  6275  func rewriteValueRISCV64_OpRISCV64NEG(v *Value) bool {
  6276  	v_0 := v.Args[0]
  6277  	b := v.Block
  6278  	// match: (NEG (SUB x y))
  6279  	// result: (SUB y x)
  6280  	for {
  6281  		if v_0.Op != OpRISCV64SUB {
  6282  			break
  6283  		}
  6284  		y := v_0.Args[1]
  6285  		x := v_0.Args[0]
  6286  		v.reset(OpRISCV64SUB)
  6287  		v.AddArg2(y, x)
  6288  		return true
  6289  	}
  6290  	// match: (NEG <t> s:(ADDI [val] (SUB x y)))
  6291  	// cond: s.Uses == 1 && is32Bit(-val)
  6292  	// result: (ADDI [-val] (SUB <t> y x))
  6293  	for {
  6294  		t := v.Type
  6295  		s := v_0
  6296  		if s.Op != OpRISCV64ADDI {
  6297  			break
  6298  		}
  6299  		val := auxIntToInt64(s.AuxInt)
  6300  		s_0 := s.Args[0]
  6301  		if s_0.Op != OpRISCV64SUB {
  6302  			break
  6303  		}
  6304  		y := s_0.Args[1]
  6305  		x := s_0.Args[0]
  6306  		if !(s.Uses == 1 && is32Bit(-val)) {
  6307  			break
  6308  		}
  6309  		v.reset(OpRISCV64ADDI)
  6310  		v.AuxInt = int64ToAuxInt(-val)
  6311  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, t)
  6312  		v0.AddArg2(y, x)
  6313  		v.AddArg(v0)
  6314  		return true
  6315  	}
  6316  	// match: (NEG (NEG x))
  6317  	// result: x
  6318  	for {
  6319  		if v_0.Op != OpRISCV64NEG {
  6320  			break
  6321  		}
  6322  		x := v_0.Args[0]
  6323  		v.copyOf(x)
  6324  		return true
  6325  	}
  6326  	// match: (NEG <t> s:(ADDI [val] (NEG x)))
  6327  	// cond: s.Uses == 1 && is32Bit(-val)
  6328  	// result: (ADDI [-val] x)
  6329  	for {
  6330  		s := v_0
  6331  		if s.Op != OpRISCV64ADDI {
  6332  			break
  6333  		}
  6334  		val := auxIntToInt64(s.AuxInt)
  6335  		s_0 := s.Args[0]
  6336  		if s_0.Op != OpRISCV64NEG {
  6337  			break
  6338  		}
  6339  		x := s_0.Args[0]
  6340  		if !(s.Uses == 1 && is32Bit(-val)) {
  6341  			break
  6342  		}
  6343  		v.reset(OpRISCV64ADDI)
  6344  		v.AuxInt = int64ToAuxInt(-val)
  6345  		v.AddArg(x)
  6346  		return true
  6347  	}
  6348  	// match: (NEG (MOVDconst [x]))
  6349  	// result: (MOVDconst [-x])
  6350  	for {
  6351  		if v_0.Op != OpRISCV64MOVDconst {
  6352  			break
  6353  		}
  6354  		x := auxIntToInt64(v_0.AuxInt)
  6355  		v.reset(OpRISCV64MOVDconst)
  6356  		v.AuxInt = int64ToAuxInt(-x)
  6357  		return true
  6358  	}
  6359  	return false
  6360  }
  6361  func rewriteValueRISCV64_OpRISCV64NEGW(v *Value) bool {
  6362  	v_0 := v.Args[0]
  6363  	// match: (NEGW (MOVDconst [x]))
  6364  	// result: (MOVDconst [int64(int32(-x))])
  6365  	for {
  6366  		if v_0.Op != OpRISCV64MOVDconst {
  6367  			break
  6368  		}
  6369  		x := auxIntToInt64(v_0.AuxInt)
  6370  		v.reset(OpRISCV64MOVDconst)
  6371  		v.AuxInt = int64ToAuxInt(int64(int32(-x)))
  6372  		return true
  6373  	}
  6374  	return false
  6375  }
  6376  func rewriteValueRISCV64_OpRISCV64OR(v *Value) bool {
  6377  	v_1 := v.Args[1]
  6378  	v_0 := v.Args[0]
  6379  	// match: (OR (MOVDconst [val]) x)
  6380  	// cond: is32Bit(val)
  6381  	// result: (ORI [val] x)
  6382  	for {
  6383  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  6384  			if v_0.Op != OpRISCV64MOVDconst {
  6385  				continue
  6386  			}
  6387  			val := auxIntToInt64(v_0.AuxInt)
  6388  			x := v_1
  6389  			if !(is32Bit(val)) {
  6390  				continue
  6391  			}
  6392  			v.reset(OpRISCV64ORI)
  6393  			v.AuxInt = int64ToAuxInt(val)
  6394  			v.AddArg(x)
  6395  			return true
  6396  		}
  6397  		break
  6398  	}
  6399  	return false
  6400  }
  6401  func rewriteValueRISCV64_OpRISCV64ORI(v *Value) bool {
  6402  	v_0 := v.Args[0]
  6403  	// match: (ORI [0] x)
  6404  	// result: x
  6405  	for {
  6406  		if auxIntToInt64(v.AuxInt) != 0 {
  6407  			break
  6408  		}
  6409  		x := v_0
  6410  		v.copyOf(x)
  6411  		return true
  6412  	}
  6413  	// match: (ORI [-1] x)
  6414  	// result: (MOVDconst [-1])
  6415  	for {
  6416  		if auxIntToInt64(v.AuxInt) != -1 {
  6417  			break
  6418  		}
  6419  		v.reset(OpRISCV64MOVDconst)
  6420  		v.AuxInt = int64ToAuxInt(-1)
  6421  		return true
  6422  	}
  6423  	// match: (ORI [x] (MOVDconst [y]))
  6424  	// result: (MOVDconst [x | y])
  6425  	for {
  6426  		x := auxIntToInt64(v.AuxInt)
  6427  		if v_0.Op != OpRISCV64MOVDconst {
  6428  			break
  6429  		}
  6430  		y := auxIntToInt64(v_0.AuxInt)
  6431  		v.reset(OpRISCV64MOVDconst)
  6432  		v.AuxInt = int64ToAuxInt(x | y)
  6433  		return true
  6434  	}
  6435  	// match: (ORI [x] (ORI [y] z))
  6436  	// result: (ORI [x | y] z)
  6437  	for {
  6438  		x := auxIntToInt64(v.AuxInt)
  6439  		if v_0.Op != OpRISCV64ORI {
  6440  			break
  6441  		}
  6442  		y := auxIntToInt64(v_0.AuxInt)
  6443  		z := v_0.Args[0]
  6444  		v.reset(OpRISCV64ORI)
  6445  		v.AuxInt = int64ToAuxInt(x | y)
  6446  		v.AddArg(z)
  6447  		return true
  6448  	}
  6449  	return false
  6450  }
  6451  func rewriteValueRISCV64_OpRISCV64ROL(v *Value) bool {
  6452  	v_1 := v.Args[1]
  6453  	v_0 := v.Args[0]
  6454  	// match: (ROL x (MOVDconst [val]))
  6455  	// result: (RORI [int64(int8(-val)&63)] x)
  6456  	for {
  6457  		x := v_0
  6458  		if v_1.Op != OpRISCV64MOVDconst {
  6459  			break
  6460  		}
  6461  		val := auxIntToInt64(v_1.AuxInt)
  6462  		v.reset(OpRISCV64RORI)
  6463  		v.AuxInt = int64ToAuxInt(int64(int8(-val) & 63))
  6464  		v.AddArg(x)
  6465  		return true
  6466  	}
  6467  	// match: (ROL x (NEG y))
  6468  	// result: (ROR x y)
  6469  	for {
  6470  		x := v_0
  6471  		if v_1.Op != OpRISCV64NEG {
  6472  			break
  6473  		}
  6474  		y := v_1.Args[0]
  6475  		v.reset(OpRISCV64ROR)
  6476  		v.AddArg2(x, y)
  6477  		return true
  6478  	}
  6479  	return false
  6480  }
  6481  func rewriteValueRISCV64_OpRISCV64ROLW(v *Value) bool {
  6482  	v_1 := v.Args[1]
  6483  	v_0 := v.Args[0]
  6484  	// match: (ROLW x (MOVDconst [val]))
  6485  	// result: (RORIW [int64(int8(-val)&31)] x)
  6486  	for {
  6487  		x := v_0
  6488  		if v_1.Op != OpRISCV64MOVDconst {
  6489  			break
  6490  		}
  6491  		val := auxIntToInt64(v_1.AuxInt)
  6492  		v.reset(OpRISCV64RORIW)
  6493  		v.AuxInt = int64ToAuxInt(int64(int8(-val) & 31))
  6494  		v.AddArg(x)
  6495  		return true
  6496  	}
  6497  	// match: (ROLW x (NEG y))
  6498  	// result: (RORW x y)
  6499  	for {
  6500  		x := v_0
  6501  		if v_1.Op != OpRISCV64NEG {
  6502  			break
  6503  		}
  6504  		y := v_1.Args[0]
  6505  		v.reset(OpRISCV64RORW)
  6506  		v.AddArg2(x, y)
  6507  		return true
  6508  	}
  6509  	return false
  6510  }
  6511  func rewriteValueRISCV64_OpRISCV64ROR(v *Value) bool {
  6512  	v_1 := v.Args[1]
  6513  	v_0 := v.Args[0]
  6514  	// match: (ROR x (MOVDconst [val]))
  6515  	// result: (RORI [int64(val&63)] x)
  6516  	for {
  6517  		x := v_0
  6518  		if v_1.Op != OpRISCV64MOVDconst {
  6519  			break
  6520  		}
  6521  		val := auxIntToInt64(v_1.AuxInt)
  6522  		v.reset(OpRISCV64RORI)
  6523  		v.AuxInt = int64ToAuxInt(int64(val & 63))
  6524  		v.AddArg(x)
  6525  		return true
  6526  	}
  6527  	return false
  6528  }
  6529  func rewriteValueRISCV64_OpRISCV64RORW(v *Value) bool {
  6530  	v_1 := v.Args[1]
  6531  	v_0 := v.Args[0]
  6532  	// match: (RORW x (MOVDconst [val]))
  6533  	// result: (RORIW [int64(val&31)] x)
  6534  	for {
  6535  		x := v_0
  6536  		if v_1.Op != OpRISCV64MOVDconst {
  6537  			break
  6538  		}
  6539  		val := auxIntToInt64(v_1.AuxInt)
  6540  		v.reset(OpRISCV64RORIW)
  6541  		v.AuxInt = int64ToAuxInt(int64(val & 31))
  6542  		v.AddArg(x)
  6543  		return true
  6544  	}
  6545  	return false
  6546  }
  6547  func rewriteValueRISCV64_OpRISCV64SEQZ(v *Value) bool {
  6548  	v_0 := v.Args[0]
  6549  	// match: (SEQZ (NEG x))
  6550  	// result: (SEQZ x)
  6551  	for {
  6552  		if v_0.Op != OpRISCV64NEG {
  6553  			break
  6554  		}
  6555  		x := v_0.Args[0]
  6556  		v.reset(OpRISCV64SEQZ)
  6557  		v.AddArg(x)
  6558  		return true
  6559  	}
  6560  	// match: (SEQZ (SEQZ x))
  6561  	// result: (SNEZ x)
  6562  	for {
  6563  		if v_0.Op != OpRISCV64SEQZ {
  6564  			break
  6565  		}
  6566  		x := v_0.Args[0]
  6567  		v.reset(OpRISCV64SNEZ)
  6568  		v.AddArg(x)
  6569  		return true
  6570  	}
  6571  	// match: (SEQZ (SNEZ x))
  6572  	// result: (SEQZ x)
  6573  	for {
  6574  		if v_0.Op != OpRISCV64SNEZ {
  6575  			break
  6576  		}
  6577  		x := v_0.Args[0]
  6578  		v.reset(OpRISCV64SEQZ)
  6579  		v.AddArg(x)
  6580  		return true
  6581  	}
  6582  	return false
  6583  }
  6584  func rewriteValueRISCV64_OpRISCV64SLL(v *Value) bool {
  6585  	v_1 := v.Args[1]
  6586  	v_0 := v.Args[0]
  6587  	// match: (SLL x (MOVDconst [val]))
  6588  	// result: (SLLI [int64(val&63)] x)
  6589  	for {
  6590  		x := v_0
  6591  		if v_1.Op != OpRISCV64MOVDconst {
  6592  			break
  6593  		}
  6594  		val := auxIntToInt64(v_1.AuxInt)
  6595  		v.reset(OpRISCV64SLLI)
  6596  		v.AuxInt = int64ToAuxInt(int64(val & 63))
  6597  		v.AddArg(x)
  6598  		return true
  6599  	}
  6600  	return false
  6601  }
  6602  func rewriteValueRISCV64_OpRISCV64SLLI(v *Value) bool {
  6603  	v_0 := v.Args[0]
  6604  	// match: (SLLI [x] (MOVDconst [y]))
  6605  	// cond: is32Bit(y << uint32(x))
  6606  	// result: (MOVDconst [y << uint32(x)])
  6607  	for {
  6608  		x := auxIntToInt64(v.AuxInt)
  6609  		if v_0.Op != OpRISCV64MOVDconst {
  6610  			break
  6611  		}
  6612  		y := auxIntToInt64(v_0.AuxInt)
  6613  		if !(is32Bit(y << uint32(x))) {
  6614  			break
  6615  		}
  6616  		v.reset(OpRISCV64MOVDconst)
  6617  		v.AuxInt = int64ToAuxInt(y << uint32(x))
  6618  		return true
  6619  	}
  6620  	return false
  6621  }
  6622  func rewriteValueRISCV64_OpRISCV64SLLW(v *Value) bool {
  6623  	v_1 := v.Args[1]
  6624  	v_0 := v.Args[0]
  6625  	// match: (SLLW x (MOVDconst [val]))
  6626  	// result: (SLLIW [int64(val&31)] x)
  6627  	for {
  6628  		x := v_0
  6629  		if v_1.Op != OpRISCV64MOVDconst {
  6630  			break
  6631  		}
  6632  		val := auxIntToInt64(v_1.AuxInt)
  6633  		v.reset(OpRISCV64SLLIW)
  6634  		v.AuxInt = int64ToAuxInt(int64(val & 31))
  6635  		v.AddArg(x)
  6636  		return true
  6637  	}
  6638  	return false
  6639  }
  6640  func rewriteValueRISCV64_OpRISCV64SLT(v *Value) bool {
  6641  	v_1 := v.Args[1]
  6642  	v_0 := v.Args[0]
  6643  	// match: (SLT x (MOVDconst [val]))
  6644  	// cond: val >= -2048 && val <= 2047
  6645  	// result: (SLTI [val] x)
  6646  	for {
  6647  		x := v_0
  6648  		if v_1.Op != OpRISCV64MOVDconst {
  6649  			break
  6650  		}
  6651  		val := auxIntToInt64(v_1.AuxInt)
  6652  		if !(val >= -2048 && val <= 2047) {
  6653  			break
  6654  		}
  6655  		v.reset(OpRISCV64SLTI)
  6656  		v.AuxInt = int64ToAuxInt(val)
  6657  		v.AddArg(x)
  6658  		return true
  6659  	}
  6660  	// match: (SLT x x)
  6661  	// result: (MOVDconst [0])
  6662  	for {
  6663  		x := v_0
  6664  		if x != v_1 {
  6665  			break
  6666  		}
  6667  		v.reset(OpRISCV64MOVDconst)
  6668  		v.AuxInt = int64ToAuxInt(0)
  6669  		return true
  6670  	}
  6671  	return false
  6672  }
  6673  func rewriteValueRISCV64_OpRISCV64SLTI(v *Value) bool {
  6674  	v_0 := v.Args[0]
  6675  	// match: (SLTI [x] (MOVDconst [y]))
  6676  	// result: (MOVDconst [b2i(int64(y) < int64(x))])
  6677  	for {
  6678  		x := auxIntToInt64(v.AuxInt)
  6679  		if v_0.Op != OpRISCV64MOVDconst {
  6680  			break
  6681  		}
  6682  		y := auxIntToInt64(v_0.AuxInt)
  6683  		v.reset(OpRISCV64MOVDconst)
  6684  		v.AuxInt = int64ToAuxInt(b2i(int64(y) < int64(x)))
  6685  		return true
  6686  	}
  6687  	// match: (SLTI [x] (ANDI [y] _))
  6688  	// cond: y >= 0 && int64(y) < int64(x)
  6689  	// result: (MOVDconst [1])
  6690  	for {
  6691  		x := auxIntToInt64(v.AuxInt)
  6692  		if v_0.Op != OpRISCV64ANDI {
  6693  			break
  6694  		}
  6695  		y := auxIntToInt64(v_0.AuxInt)
  6696  		if !(y >= 0 && int64(y) < int64(x)) {
  6697  			break
  6698  		}
  6699  		v.reset(OpRISCV64MOVDconst)
  6700  		v.AuxInt = int64ToAuxInt(1)
  6701  		return true
  6702  	}
  6703  	// match: (SLTI [x] (ORI [y] _))
  6704  	// cond: y >= 0 && int64(y) >= int64(x)
  6705  	// result: (MOVDconst [0])
  6706  	for {
  6707  		x := auxIntToInt64(v.AuxInt)
  6708  		if v_0.Op != OpRISCV64ORI {
  6709  			break
  6710  		}
  6711  		y := auxIntToInt64(v_0.AuxInt)
  6712  		if !(y >= 0 && int64(y) >= int64(x)) {
  6713  			break
  6714  		}
  6715  		v.reset(OpRISCV64MOVDconst)
  6716  		v.AuxInt = int64ToAuxInt(0)
  6717  		return true
  6718  	}
  6719  	return false
  6720  }
  6721  func rewriteValueRISCV64_OpRISCV64SLTIU(v *Value) bool {
  6722  	v_0 := v.Args[0]
  6723  	// match: (SLTIU [x] (MOVDconst [y]))
  6724  	// result: (MOVDconst [b2i(uint64(y) < uint64(x))])
  6725  	for {
  6726  		x := auxIntToInt64(v.AuxInt)
  6727  		if v_0.Op != OpRISCV64MOVDconst {
  6728  			break
  6729  		}
  6730  		y := auxIntToInt64(v_0.AuxInt)
  6731  		v.reset(OpRISCV64MOVDconst)
  6732  		v.AuxInt = int64ToAuxInt(b2i(uint64(y) < uint64(x)))
  6733  		return true
  6734  	}
  6735  	// match: (SLTIU [x] (ANDI [y] _))
  6736  	// cond: y >= 0 && uint64(y) < uint64(x)
  6737  	// result: (MOVDconst [1])
  6738  	for {
  6739  		x := auxIntToInt64(v.AuxInt)
  6740  		if v_0.Op != OpRISCV64ANDI {
  6741  			break
  6742  		}
  6743  		y := auxIntToInt64(v_0.AuxInt)
  6744  		if !(y >= 0 && uint64(y) < uint64(x)) {
  6745  			break
  6746  		}
  6747  		v.reset(OpRISCV64MOVDconst)
  6748  		v.AuxInt = int64ToAuxInt(1)
  6749  		return true
  6750  	}
  6751  	// match: (SLTIU [x] (ORI [y] _))
  6752  	// cond: y >= 0 && uint64(y) >= uint64(x)
  6753  	// result: (MOVDconst [0])
  6754  	for {
  6755  		x := auxIntToInt64(v.AuxInt)
  6756  		if v_0.Op != OpRISCV64ORI {
  6757  			break
  6758  		}
  6759  		y := auxIntToInt64(v_0.AuxInt)
  6760  		if !(y >= 0 && uint64(y) >= uint64(x)) {
  6761  			break
  6762  		}
  6763  		v.reset(OpRISCV64MOVDconst)
  6764  		v.AuxInt = int64ToAuxInt(0)
  6765  		return true
  6766  	}
  6767  	return false
  6768  }
  6769  func rewriteValueRISCV64_OpRISCV64SLTU(v *Value) bool {
  6770  	v_1 := v.Args[1]
  6771  	v_0 := v.Args[0]
  6772  	// match: (SLTU x (MOVDconst [val]))
  6773  	// cond: val >= -2048 && val <= 2047
  6774  	// result: (SLTIU [val] x)
  6775  	for {
  6776  		x := v_0
  6777  		if v_1.Op != OpRISCV64MOVDconst {
  6778  			break
  6779  		}
  6780  		val := auxIntToInt64(v_1.AuxInt)
  6781  		if !(val >= -2048 && val <= 2047) {
  6782  			break
  6783  		}
  6784  		v.reset(OpRISCV64SLTIU)
  6785  		v.AuxInt = int64ToAuxInt(val)
  6786  		v.AddArg(x)
  6787  		return true
  6788  	}
  6789  	// match: (SLTU x x)
  6790  	// result: (MOVDconst [0])
  6791  	for {
  6792  		x := v_0
  6793  		if x != v_1 {
  6794  			break
  6795  		}
  6796  		v.reset(OpRISCV64MOVDconst)
  6797  		v.AuxInt = int64ToAuxInt(0)
  6798  		return true
  6799  	}
  6800  	return false
  6801  }
  6802  func rewriteValueRISCV64_OpRISCV64SNEZ(v *Value) bool {
  6803  	v_0 := v.Args[0]
  6804  	// match: (SNEZ (NEG x))
  6805  	// result: (SNEZ x)
  6806  	for {
  6807  		if v_0.Op != OpRISCV64NEG {
  6808  			break
  6809  		}
  6810  		x := v_0.Args[0]
  6811  		v.reset(OpRISCV64SNEZ)
  6812  		v.AddArg(x)
  6813  		return true
  6814  	}
  6815  	// match: (SNEZ (SEQZ x))
  6816  	// result: (SEQZ x)
  6817  	for {
  6818  		if v_0.Op != OpRISCV64SEQZ {
  6819  			break
  6820  		}
  6821  		x := v_0.Args[0]
  6822  		v.reset(OpRISCV64SEQZ)
  6823  		v.AddArg(x)
  6824  		return true
  6825  	}
  6826  	// match: (SNEZ (SNEZ x))
  6827  	// result: (SNEZ x)
  6828  	for {
  6829  		if v_0.Op != OpRISCV64SNEZ {
  6830  			break
  6831  		}
  6832  		x := v_0.Args[0]
  6833  		v.reset(OpRISCV64SNEZ)
  6834  		v.AddArg(x)
  6835  		return true
  6836  	}
  6837  	return false
  6838  }
  6839  func rewriteValueRISCV64_OpRISCV64SRA(v *Value) bool {
  6840  	v_1 := v.Args[1]
  6841  	v_0 := v.Args[0]
  6842  	// match: (SRA x (MOVDconst [val]))
  6843  	// result: (SRAI [int64(val&63)] x)
  6844  	for {
  6845  		x := v_0
  6846  		if v_1.Op != OpRISCV64MOVDconst {
  6847  			break
  6848  		}
  6849  		val := auxIntToInt64(v_1.AuxInt)
  6850  		v.reset(OpRISCV64SRAI)
  6851  		v.AuxInt = int64ToAuxInt(int64(val & 63))
  6852  		v.AddArg(x)
  6853  		return true
  6854  	}
  6855  	return false
  6856  }
  6857  func rewriteValueRISCV64_OpRISCV64SRAI(v *Value) bool {
  6858  	v_0 := v.Args[0]
  6859  	b := v.Block
  6860  	// match: (SRAI <t> [x] (MOVWreg y))
  6861  	// cond: x >= 0 && x <= 31
  6862  	// result: (SRAIW <t> [int64(x)] y)
  6863  	for {
  6864  		t := v.Type
  6865  		x := auxIntToInt64(v.AuxInt)
  6866  		if v_0.Op != OpRISCV64MOVWreg {
  6867  			break
  6868  		}
  6869  		y := v_0.Args[0]
  6870  		if !(x >= 0 && x <= 31) {
  6871  			break
  6872  		}
  6873  		v.reset(OpRISCV64SRAIW)
  6874  		v.Type = t
  6875  		v.AuxInt = int64ToAuxInt(int64(x))
  6876  		v.AddArg(y)
  6877  		return true
  6878  	}
  6879  	// match: (SRAI <t> [x] (MOVBreg y))
  6880  	// cond: x >= 8
  6881  	// result: (SRAI [63] (SLLI <t> [56] y))
  6882  	for {
  6883  		t := v.Type
  6884  		x := auxIntToInt64(v.AuxInt)
  6885  		if v_0.Op != OpRISCV64MOVBreg {
  6886  			break
  6887  		}
  6888  		y := v_0.Args[0]
  6889  		if !(x >= 8) {
  6890  			break
  6891  		}
  6892  		v.reset(OpRISCV64SRAI)
  6893  		v.AuxInt = int64ToAuxInt(63)
  6894  		v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, t)
  6895  		v0.AuxInt = int64ToAuxInt(56)
  6896  		v0.AddArg(y)
  6897  		v.AddArg(v0)
  6898  		return true
  6899  	}
  6900  	// match: (SRAI <t> [x] (MOVHreg y))
  6901  	// cond: x >= 16
  6902  	// result: (SRAI [63] (SLLI <t> [48] y))
  6903  	for {
  6904  		t := v.Type
  6905  		x := auxIntToInt64(v.AuxInt)
  6906  		if v_0.Op != OpRISCV64MOVHreg {
  6907  			break
  6908  		}
  6909  		y := v_0.Args[0]
  6910  		if !(x >= 16) {
  6911  			break
  6912  		}
  6913  		v.reset(OpRISCV64SRAI)
  6914  		v.AuxInt = int64ToAuxInt(63)
  6915  		v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, t)
  6916  		v0.AuxInt = int64ToAuxInt(48)
  6917  		v0.AddArg(y)
  6918  		v.AddArg(v0)
  6919  		return true
  6920  	}
  6921  	// match: (SRAI <t> [x] (MOVWreg y))
  6922  	// cond: x >= 32
  6923  	// result: (SRAIW [31] y)
  6924  	for {
  6925  		x := auxIntToInt64(v.AuxInt)
  6926  		if v_0.Op != OpRISCV64MOVWreg {
  6927  			break
  6928  		}
  6929  		y := v_0.Args[0]
  6930  		if !(x >= 32) {
  6931  			break
  6932  		}
  6933  		v.reset(OpRISCV64SRAIW)
  6934  		v.AuxInt = int64ToAuxInt(31)
  6935  		v.AddArg(y)
  6936  		return true
  6937  	}
  6938  	// match: (SRAI [x] (MOVDconst [y]))
  6939  	// result: (MOVDconst [int64(y) >> uint32(x)])
  6940  	for {
  6941  		x := auxIntToInt64(v.AuxInt)
  6942  		if v_0.Op != OpRISCV64MOVDconst {
  6943  			break
  6944  		}
  6945  		y := auxIntToInt64(v_0.AuxInt)
  6946  		v.reset(OpRISCV64MOVDconst)
  6947  		v.AuxInt = int64ToAuxInt(int64(y) >> uint32(x))
  6948  		return true
  6949  	}
  6950  	return false
  6951  }
  6952  func rewriteValueRISCV64_OpRISCV64SRAW(v *Value) bool {
  6953  	v_1 := v.Args[1]
  6954  	v_0 := v.Args[0]
  6955  	// match: (SRAW x (MOVDconst [val]))
  6956  	// result: (SRAIW [int64(val&31)] x)
  6957  	for {
  6958  		x := v_0
  6959  		if v_1.Op != OpRISCV64MOVDconst {
  6960  			break
  6961  		}
  6962  		val := auxIntToInt64(v_1.AuxInt)
  6963  		v.reset(OpRISCV64SRAIW)
  6964  		v.AuxInt = int64ToAuxInt(int64(val & 31))
  6965  		v.AddArg(x)
  6966  		return true
  6967  	}
  6968  	return false
  6969  }
  6970  func rewriteValueRISCV64_OpRISCV64SRL(v *Value) bool {
  6971  	v_1 := v.Args[1]
  6972  	v_0 := v.Args[0]
  6973  	// match: (SRL x (MOVDconst [val]))
  6974  	// result: (SRLI [int64(val&63)] x)
  6975  	for {
  6976  		x := v_0
  6977  		if v_1.Op != OpRISCV64MOVDconst {
  6978  			break
  6979  		}
  6980  		val := auxIntToInt64(v_1.AuxInt)
  6981  		v.reset(OpRISCV64SRLI)
  6982  		v.AuxInt = int64ToAuxInt(int64(val & 63))
  6983  		v.AddArg(x)
  6984  		return true
  6985  	}
  6986  	return false
  6987  }
  6988  func rewriteValueRISCV64_OpRISCV64SRLI(v *Value) bool {
  6989  	v_0 := v.Args[0]
  6990  	// match: (SRLI <t> [x] (MOVWUreg y))
  6991  	// cond: x >= 0 && x <= 31
  6992  	// result: (SRLIW <t> [int64(x)] y)
  6993  	for {
  6994  		t := v.Type
  6995  		x := auxIntToInt64(v.AuxInt)
  6996  		if v_0.Op != OpRISCV64MOVWUreg {
  6997  			break
  6998  		}
  6999  		y := v_0.Args[0]
  7000  		if !(x >= 0 && x <= 31) {
  7001  			break
  7002  		}
  7003  		v.reset(OpRISCV64SRLIW)
  7004  		v.Type = t
  7005  		v.AuxInt = int64ToAuxInt(int64(x))
  7006  		v.AddArg(y)
  7007  		return true
  7008  	}
  7009  	// match: (SRLI <t> [x] (MOVBUreg y))
  7010  	// cond: x >= 8
  7011  	// result: (MOVDconst <t> [0])
  7012  	for {
  7013  		t := v.Type
  7014  		x := auxIntToInt64(v.AuxInt)
  7015  		if v_0.Op != OpRISCV64MOVBUreg {
  7016  			break
  7017  		}
  7018  		if !(x >= 8) {
  7019  			break
  7020  		}
  7021  		v.reset(OpRISCV64MOVDconst)
  7022  		v.Type = t
  7023  		v.AuxInt = int64ToAuxInt(0)
  7024  		return true
  7025  	}
  7026  	// match: (SRLI <t> [x] (MOVHUreg y))
  7027  	// cond: x >= 16
  7028  	// result: (MOVDconst <t> [0])
  7029  	for {
  7030  		t := v.Type
  7031  		x := auxIntToInt64(v.AuxInt)
  7032  		if v_0.Op != OpRISCV64MOVHUreg {
  7033  			break
  7034  		}
  7035  		if !(x >= 16) {
  7036  			break
  7037  		}
  7038  		v.reset(OpRISCV64MOVDconst)
  7039  		v.Type = t
  7040  		v.AuxInt = int64ToAuxInt(0)
  7041  		return true
  7042  	}
  7043  	// match: (SRLI <t> [x] (MOVWUreg y))
  7044  	// cond: x >= 32
  7045  	// result: (MOVDconst <t> [0])
  7046  	for {
  7047  		t := v.Type
  7048  		x := auxIntToInt64(v.AuxInt)
  7049  		if v_0.Op != OpRISCV64MOVWUreg {
  7050  			break
  7051  		}
  7052  		if !(x >= 32) {
  7053  			break
  7054  		}
  7055  		v.reset(OpRISCV64MOVDconst)
  7056  		v.Type = t
  7057  		v.AuxInt = int64ToAuxInt(0)
  7058  		return true
  7059  	}
  7060  	// match: (SRLI [x] (MOVDconst [y]))
  7061  	// result: (MOVDconst [int64(uint64(y) >> uint32(x))])
  7062  	for {
  7063  		x := auxIntToInt64(v.AuxInt)
  7064  		if v_0.Op != OpRISCV64MOVDconst {
  7065  			break
  7066  		}
  7067  		y := auxIntToInt64(v_0.AuxInt)
  7068  		v.reset(OpRISCV64MOVDconst)
  7069  		v.AuxInt = int64ToAuxInt(int64(uint64(y) >> uint32(x)))
  7070  		return true
  7071  	}
  7072  	return false
  7073  }
  7074  func rewriteValueRISCV64_OpRISCV64SRLW(v *Value) bool {
  7075  	v_1 := v.Args[1]
  7076  	v_0 := v.Args[0]
  7077  	// match: (SRLW x (MOVDconst [val]))
  7078  	// result: (SRLIW [int64(val&31)] x)
  7079  	for {
  7080  		x := v_0
  7081  		if v_1.Op != OpRISCV64MOVDconst {
  7082  			break
  7083  		}
  7084  		val := auxIntToInt64(v_1.AuxInt)
  7085  		v.reset(OpRISCV64SRLIW)
  7086  		v.AuxInt = int64ToAuxInt(int64(val & 31))
  7087  		v.AddArg(x)
  7088  		return true
  7089  	}
  7090  	return false
  7091  }
  7092  func rewriteValueRISCV64_OpRISCV64SUB(v *Value) bool {
  7093  	v_1 := v.Args[1]
  7094  	v_0 := v.Args[0]
  7095  	b := v.Block
  7096  	// match: (SUB x (MOVDconst [val]))
  7097  	// cond: is32Bit(-val)
  7098  	// result: (ADDI [-val] x)
  7099  	for {
  7100  		x := v_0
  7101  		if v_1.Op != OpRISCV64MOVDconst {
  7102  			break
  7103  		}
  7104  		val := auxIntToInt64(v_1.AuxInt)
  7105  		if !(is32Bit(-val)) {
  7106  			break
  7107  		}
  7108  		v.reset(OpRISCV64ADDI)
  7109  		v.AuxInt = int64ToAuxInt(-val)
  7110  		v.AddArg(x)
  7111  		return true
  7112  	}
  7113  	// match: (SUB <t> (MOVDconst [val]) y)
  7114  	// cond: is32Bit(-val)
  7115  	// result: (NEG (ADDI <t> [-val] y))
  7116  	for {
  7117  		t := v.Type
  7118  		if v_0.Op != OpRISCV64MOVDconst {
  7119  			break
  7120  		}
  7121  		val := auxIntToInt64(v_0.AuxInt)
  7122  		y := v_1
  7123  		if !(is32Bit(-val)) {
  7124  			break
  7125  		}
  7126  		v.reset(OpRISCV64NEG)
  7127  		v0 := b.NewValue0(v.Pos, OpRISCV64ADDI, t)
  7128  		v0.AuxInt = int64ToAuxInt(-val)
  7129  		v0.AddArg(y)
  7130  		v.AddArg(v0)
  7131  		return true
  7132  	}
  7133  	// match: (SUB x (MOVDconst [0]))
  7134  	// result: x
  7135  	for {
  7136  		x := v_0
  7137  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  7138  			break
  7139  		}
  7140  		v.copyOf(x)
  7141  		return true
  7142  	}
  7143  	// match: (SUB (MOVDconst [0]) x)
  7144  	// result: (NEG x)
  7145  	for {
  7146  		if v_0.Op != OpRISCV64MOVDconst || auxIntToInt64(v_0.AuxInt) != 0 {
  7147  			break
  7148  		}
  7149  		x := v_1
  7150  		v.reset(OpRISCV64NEG)
  7151  		v.AddArg(x)
  7152  		return true
  7153  	}
  7154  	return false
  7155  }
  7156  func rewriteValueRISCV64_OpRISCV64SUBW(v *Value) bool {
  7157  	v_1 := v.Args[1]
  7158  	v_0 := v.Args[0]
  7159  	// match: (SUBW x (MOVDconst [0]))
  7160  	// result: (ADDIW [0] x)
  7161  	for {
  7162  		x := v_0
  7163  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  7164  			break
  7165  		}
  7166  		v.reset(OpRISCV64ADDIW)
  7167  		v.AuxInt = int64ToAuxInt(0)
  7168  		v.AddArg(x)
  7169  		return true
  7170  	}
  7171  	// match: (SUBW (MOVDconst [0]) x)
  7172  	// result: (NEGW x)
  7173  	for {
  7174  		if v_0.Op != OpRISCV64MOVDconst || auxIntToInt64(v_0.AuxInt) != 0 {
  7175  			break
  7176  		}
  7177  		x := v_1
  7178  		v.reset(OpRISCV64NEGW)
  7179  		v.AddArg(x)
  7180  		return true
  7181  	}
  7182  	return false
  7183  }
  7184  func rewriteValueRISCV64_OpRISCV64XOR(v *Value) bool {
  7185  	v_1 := v.Args[1]
  7186  	v_0 := v.Args[0]
  7187  	// match: (XOR (MOVDconst [val]) x)
  7188  	// cond: is32Bit(val)
  7189  	// result: (XORI [val] x)
  7190  	for {
  7191  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7192  			if v_0.Op != OpRISCV64MOVDconst {
  7193  				continue
  7194  			}
  7195  			val := auxIntToInt64(v_0.AuxInt)
  7196  			x := v_1
  7197  			if !(is32Bit(val)) {
  7198  				continue
  7199  			}
  7200  			v.reset(OpRISCV64XORI)
  7201  			v.AuxInt = int64ToAuxInt(val)
  7202  			v.AddArg(x)
  7203  			return true
  7204  		}
  7205  		break
  7206  	}
  7207  	return false
  7208  }
  7209  func rewriteValueRISCV64_OpRotateLeft16(v *Value) bool {
  7210  	v_1 := v.Args[1]
  7211  	v_0 := v.Args[0]
  7212  	b := v.Block
  7213  	typ := &b.Func.Config.Types
  7214  	// match: (RotateLeft16 <t> x y)
  7215  	// result: (OR (SLL <t> x (ANDI [15] <y.Type> y)) (SRL <t> (ZeroExt16to64 x) (ANDI [15] <y.Type> (NEG <y.Type> y))))
  7216  	for {
  7217  		t := v.Type
  7218  		x := v_0
  7219  		y := v_1
  7220  		v.reset(OpRISCV64OR)
  7221  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  7222  		v1 := b.NewValue0(v.Pos, OpRISCV64ANDI, y.Type)
  7223  		v1.AuxInt = int64ToAuxInt(15)
  7224  		v1.AddArg(y)
  7225  		v0.AddArg2(x, v1)
  7226  		v2 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7227  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7228  		v3.AddArg(x)
  7229  		v4 := b.NewValue0(v.Pos, OpRISCV64ANDI, y.Type)
  7230  		v4.AuxInt = int64ToAuxInt(15)
  7231  		v5 := b.NewValue0(v.Pos, OpRISCV64NEG, y.Type)
  7232  		v5.AddArg(y)
  7233  		v4.AddArg(v5)
  7234  		v2.AddArg2(v3, v4)
  7235  		v.AddArg2(v0, v2)
  7236  		return true
  7237  	}
  7238  }
  7239  func rewriteValueRISCV64_OpRotateLeft8(v *Value) bool {
  7240  	v_1 := v.Args[1]
  7241  	v_0 := v.Args[0]
  7242  	b := v.Block
  7243  	typ := &b.Func.Config.Types
  7244  	// match: (RotateLeft8 <t> x y)
  7245  	// result: (OR (SLL <t> x (ANDI [7] <y.Type> y)) (SRL <t> (ZeroExt8to64 x) (ANDI [7] <y.Type> (NEG <y.Type> y))))
  7246  	for {
  7247  		t := v.Type
  7248  		x := v_0
  7249  		y := v_1
  7250  		v.reset(OpRISCV64OR)
  7251  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  7252  		v1 := b.NewValue0(v.Pos, OpRISCV64ANDI, y.Type)
  7253  		v1.AuxInt = int64ToAuxInt(7)
  7254  		v1.AddArg(y)
  7255  		v0.AddArg2(x, v1)
  7256  		v2 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7257  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7258  		v3.AddArg(x)
  7259  		v4 := b.NewValue0(v.Pos, OpRISCV64ANDI, y.Type)
  7260  		v4.AuxInt = int64ToAuxInt(7)
  7261  		v5 := b.NewValue0(v.Pos, OpRISCV64NEG, y.Type)
  7262  		v5.AddArg(y)
  7263  		v4.AddArg(v5)
  7264  		v2.AddArg2(v3, v4)
  7265  		v.AddArg2(v0, v2)
  7266  		return true
  7267  	}
  7268  }
  7269  func rewriteValueRISCV64_OpRsh16Ux16(v *Value) bool {
  7270  	v_1 := v.Args[1]
  7271  	v_0 := v.Args[0]
  7272  	b := v.Block
  7273  	typ := &b.Func.Config.Types
  7274  	// match: (Rsh16Ux16 <t> x y)
  7275  	// cond: !shiftIsBounded(v)
  7276  	// result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  7277  	for {
  7278  		t := v.Type
  7279  		x := v_0
  7280  		y := v_1
  7281  		if !(!shiftIsBounded(v)) {
  7282  			break
  7283  		}
  7284  		v.reset(OpRISCV64AND)
  7285  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7286  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7287  		v1.AddArg(x)
  7288  		v0.AddArg2(v1, y)
  7289  		v2 := b.NewValue0(v.Pos, OpNeg16, t)
  7290  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7291  		v3.AuxInt = int64ToAuxInt(64)
  7292  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7293  		v4.AddArg(y)
  7294  		v3.AddArg(v4)
  7295  		v2.AddArg(v3)
  7296  		v.AddArg2(v0, v2)
  7297  		return true
  7298  	}
  7299  	// match: (Rsh16Ux16 x y)
  7300  	// cond: shiftIsBounded(v)
  7301  	// result: (SRL (ZeroExt16to64 x) y)
  7302  	for {
  7303  		x := v_0
  7304  		y := v_1
  7305  		if !(shiftIsBounded(v)) {
  7306  			break
  7307  		}
  7308  		v.reset(OpRISCV64SRL)
  7309  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7310  		v0.AddArg(x)
  7311  		v.AddArg2(v0, y)
  7312  		return true
  7313  	}
  7314  	return false
  7315  }
  7316  func rewriteValueRISCV64_OpRsh16Ux32(v *Value) bool {
  7317  	v_1 := v.Args[1]
  7318  	v_0 := v.Args[0]
  7319  	b := v.Block
  7320  	typ := &b.Func.Config.Types
  7321  	// match: (Rsh16Ux32 <t> x y)
  7322  	// cond: !shiftIsBounded(v)
  7323  	// result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  7324  	for {
  7325  		t := v.Type
  7326  		x := v_0
  7327  		y := v_1
  7328  		if !(!shiftIsBounded(v)) {
  7329  			break
  7330  		}
  7331  		v.reset(OpRISCV64AND)
  7332  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7333  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7334  		v1.AddArg(x)
  7335  		v0.AddArg2(v1, y)
  7336  		v2 := b.NewValue0(v.Pos, OpNeg16, t)
  7337  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7338  		v3.AuxInt = int64ToAuxInt(64)
  7339  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7340  		v4.AddArg(y)
  7341  		v3.AddArg(v4)
  7342  		v2.AddArg(v3)
  7343  		v.AddArg2(v0, v2)
  7344  		return true
  7345  	}
  7346  	// match: (Rsh16Ux32 x y)
  7347  	// cond: shiftIsBounded(v)
  7348  	// result: (SRL (ZeroExt16to64 x) y)
  7349  	for {
  7350  		x := v_0
  7351  		y := v_1
  7352  		if !(shiftIsBounded(v)) {
  7353  			break
  7354  		}
  7355  		v.reset(OpRISCV64SRL)
  7356  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7357  		v0.AddArg(x)
  7358  		v.AddArg2(v0, y)
  7359  		return true
  7360  	}
  7361  	return false
  7362  }
  7363  func rewriteValueRISCV64_OpRsh16Ux64(v *Value) bool {
  7364  	v_1 := v.Args[1]
  7365  	v_0 := v.Args[0]
  7366  	b := v.Block
  7367  	typ := &b.Func.Config.Types
  7368  	// match: (Rsh16Ux64 <t> x y)
  7369  	// cond: !shiftIsBounded(v)
  7370  	// result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] y)))
  7371  	for {
  7372  		t := v.Type
  7373  		x := v_0
  7374  		y := v_1
  7375  		if !(!shiftIsBounded(v)) {
  7376  			break
  7377  		}
  7378  		v.reset(OpRISCV64AND)
  7379  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7380  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7381  		v1.AddArg(x)
  7382  		v0.AddArg2(v1, y)
  7383  		v2 := b.NewValue0(v.Pos, OpNeg16, t)
  7384  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7385  		v3.AuxInt = int64ToAuxInt(64)
  7386  		v3.AddArg(y)
  7387  		v2.AddArg(v3)
  7388  		v.AddArg2(v0, v2)
  7389  		return true
  7390  	}
  7391  	// match: (Rsh16Ux64 x y)
  7392  	// cond: shiftIsBounded(v)
  7393  	// result: (SRL (ZeroExt16to64 x) y)
  7394  	for {
  7395  		x := v_0
  7396  		y := v_1
  7397  		if !(shiftIsBounded(v)) {
  7398  			break
  7399  		}
  7400  		v.reset(OpRISCV64SRL)
  7401  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7402  		v0.AddArg(x)
  7403  		v.AddArg2(v0, y)
  7404  		return true
  7405  	}
  7406  	return false
  7407  }
  7408  func rewriteValueRISCV64_OpRsh16Ux8(v *Value) bool {
  7409  	v_1 := v.Args[1]
  7410  	v_0 := v.Args[0]
  7411  	b := v.Block
  7412  	typ := &b.Func.Config.Types
  7413  	// match: (Rsh16Ux8 <t> x y)
  7414  	// cond: !shiftIsBounded(v)
  7415  	// result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  7416  	for {
  7417  		t := v.Type
  7418  		x := v_0
  7419  		y := v_1
  7420  		if !(!shiftIsBounded(v)) {
  7421  			break
  7422  		}
  7423  		v.reset(OpRISCV64AND)
  7424  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7425  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7426  		v1.AddArg(x)
  7427  		v0.AddArg2(v1, y)
  7428  		v2 := b.NewValue0(v.Pos, OpNeg16, t)
  7429  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7430  		v3.AuxInt = int64ToAuxInt(64)
  7431  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7432  		v4.AddArg(y)
  7433  		v3.AddArg(v4)
  7434  		v2.AddArg(v3)
  7435  		v.AddArg2(v0, v2)
  7436  		return true
  7437  	}
  7438  	// match: (Rsh16Ux8 x y)
  7439  	// cond: shiftIsBounded(v)
  7440  	// result: (SRL (ZeroExt16to64 x) y)
  7441  	for {
  7442  		x := v_0
  7443  		y := v_1
  7444  		if !(shiftIsBounded(v)) {
  7445  			break
  7446  		}
  7447  		v.reset(OpRISCV64SRL)
  7448  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7449  		v0.AddArg(x)
  7450  		v.AddArg2(v0, y)
  7451  		return true
  7452  	}
  7453  	return false
  7454  }
  7455  func rewriteValueRISCV64_OpRsh16x16(v *Value) bool {
  7456  	v_1 := v.Args[1]
  7457  	v_0 := v.Args[0]
  7458  	b := v.Block
  7459  	typ := &b.Func.Config.Types
  7460  	// match: (Rsh16x16 <t> x y)
  7461  	// cond: !shiftIsBounded(v)
  7462  	// result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt16to64 y)))))
  7463  	for {
  7464  		t := v.Type
  7465  		x := v_0
  7466  		y := v_1
  7467  		if !(!shiftIsBounded(v)) {
  7468  			break
  7469  		}
  7470  		v.reset(OpRISCV64SRA)
  7471  		v.Type = t
  7472  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7473  		v0.AddArg(x)
  7474  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7475  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7476  		v2.AuxInt = int64ToAuxInt(-1)
  7477  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7478  		v3.AuxInt = int64ToAuxInt(64)
  7479  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7480  		v4.AddArg(y)
  7481  		v3.AddArg(v4)
  7482  		v2.AddArg(v3)
  7483  		v1.AddArg2(y, v2)
  7484  		v.AddArg2(v0, v1)
  7485  		return true
  7486  	}
  7487  	// match: (Rsh16x16 x y)
  7488  	// cond: shiftIsBounded(v)
  7489  	// result: (SRA (SignExt16to64 x) y)
  7490  	for {
  7491  		x := v_0
  7492  		y := v_1
  7493  		if !(shiftIsBounded(v)) {
  7494  			break
  7495  		}
  7496  		v.reset(OpRISCV64SRA)
  7497  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7498  		v0.AddArg(x)
  7499  		v.AddArg2(v0, y)
  7500  		return true
  7501  	}
  7502  	return false
  7503  }
  7504  func rewriteValueRISCV64_OpRsh16x32(v *Value) bool {
  7505  	v_1 := v.Args[1]
  7506  	v_0 := v.Args[0]
  7507  	b := v.Block
  7508  	typ := &b.Func.Config.Types
  7509  	// match: (Rsh16x32 <t> x y)
  7510  	// cond: !shiftIsBounded(v)
  7511  	// result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt32to64 y)))))
  7512  	for {
  7513  		t := v.Type
  7514  		x := v_0
  7515  		y := v_1
  7516  		if !(!shiftIsBounded(v)) {
  7517  			break
  7518  		}
  7519  		v.reset(OpRISCV64SRA)
  7520  		v.Type = t
  7521  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7522  		v0.AddArg(x)
  7523  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7524  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7525  		v2.AuxInt = int64ToAuxInt(-1)
  7526  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7527  		v3.AuxInt = int64ToAuxInt(64)
  7528  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7529  		v4.AddArg(y)
  7530  		v3.AddArg(v4)
  7531  		v2.AddArg(v3)
  7532  		v1.AddArg2(y, v2)
  7533  		v.AddArg2(v0, v1)
  7534  		return true
  7535  	}
  7536  	// match: (Rsh16x32 x y)
  7537  	// cond: shiftIsBounded(v)
  7538  	// result: (SRA (SignExt16to64 x) y)
  7539  	for {
  7540  		x := v_0
  7541  		y := v_1
  7542  		if !(shiftIsBounded(v)) {
  7543  			break
  7544  		}
  7545  		v.reset(OpRISCV64SRA)
  7546  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7547  		v0.AddArg(x)
  7548  		v.AddArg2(v0, y)
  7549  		return true
  7550  	}
  7551  	return false
  7552  }
  7553  func rewriteValueRISCV64_OpRsh16x64(v *Value) bool {
  7554  	v_1 := v.Args[1]
  7555  	v_0 := v.Args[0]
  7556  	b := v.Block
  7557  	typ := &b.Func.Config.Types
  7558  	// match: (Rsh16x64 <t> x y)
  7559  	// cond: !shiftIsBounded(v)
  7560  	// result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] y))))
  7561  	for {
  7562  		t := v.Type
  7563  		x := v_0
  7564  		y := v_1
  7565  		if !(!shiftIsBounded(v)) {
  7566  			break
  7567  		}
  7568  		v.reset(OpRISCV64SRA)
  7569  		v.Type = t
  7570  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7571  		v0.AddArg(x)
  7572  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7573  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7574  		v2.AuxInt = int64ToAuxInt(-1)
  7575  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7576  		v3.AuxInt = int64ToAuxInt(64)
  7577  		v3.AddArg(y)
  7578  		v2.AddArg(v3)
  7579  		v1.AddArg2(y, v2)
  7580  		v.AddArg2(v0, v1)
  7581  		return true
  7582  	}
  7583  	// match: (Rsh16x64 x y)
  7584  	// cond: shiftIsBounded(v)
  7585  	// result: (SRA (SignExt16to64 x) y)
  7586  	for {
  7587  		x := v_0
  7588  		y := v_1
  7589  		if !(shiftIsBounded(v)) {
  7590  			break
  7591  		}
  7592  		v.reset(OpRISCV64SRA)
  7593  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7594  		v0.AddArg(x)
  7595  		v.AddArg2(v0, y)
  7596  		return true
  7597  	}
  7598  	return false
  7599  }
  7600  func rewriteValueRISCV64_OpRsh16x8(v *Value) bool {
  7601  	v_1 := v.Args[1]
  7602  	v_0 := v.Args[0]
  7603  	b := v.Block
  7604  	typ := &b.Func.Config.Types
  7605  	// match: (Rsh16x8 <t> x y)
  7606  	// cond: !shiftIsBounded(v)
  7607  	// result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt8to64 y)))))
  7608  	for {
  7609  		t := v.Type
  7610  		x := v_0
  7611  		y := v_1
  7612  		if !(!shiftIsBounded(v)) {
  7613  			break
  7614  		}
  7615  		v.reset(OpRISCV64SRA)
  7616  		v.Type = t
  7617  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7618  		v0.AddArg(x)
  7619  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7620  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7621  		v2.AuxInt = int64ToAuxInt(-1)
  7622  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7623  		v3.AuxInt = int64ToAuxInt(64)
  7624  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7625  		v4.AddArg(y)
  7626  		v3.AddArg(v4)
  7627  		v2.AddArg(v3)
  7628  		v1.AddArg2(y, v2)
  7629  		v.AddArg2(v0, v1)
  7630  		return true
  7631  	}
  7632  	// match: (Rsh16x8 x y)
  7633  	// cond: shiftIsBounded(v)
  7634  	// result: (SRA (SignExt16to64 x) y)
  7635  	for {
  7636  		x := v_0
  7637  		y := v_1
  7638  		if !(shiftIsBounded(v)) {
  7639  			break
  7640  		}
  7641  		v.reset(OpRISCV64SRA)
  7642  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7643  		v0.AddArg(x)
  7644  		v.AddArg2(v0, y)
  7645  		return true
  7646  	}
  7647  	return false
  7648  }
  7649  func rewriteValueRISCV64_OpRsh32Ux16(v *Value) bool {
  7650  	v_1 := v.Args[1]
  7651  	v_0 := v.Args[0]
  7652  	b := v.Block
  7653  	typ := &b.Func.Config.Types
  7654  	// match: (Rsh32Ux16 <t> x y)
  7655  	// cond: !shiftIsBounded(v)
  7656  	// result: (AND (SRLW <t> x y) (Neg32 <t> (SLTIU <t> [32] (ZeroExt16to64 y))))
  7657  	for {
  7658  		t := v.Type
  7659  		x := v_0
  7660  		y := v_1
  7661  		if !(!shiftIsBounded(v)) {
  7662  			break
  7663  		}
  7664  		v.reset(OpRISCV64AND)
  7665  		v0 := b.NewValue0(v.Pos, OpRISCV64SRLW, t)
  7666  		v0.AddArg2(x, y)
  7667  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  7668  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7669  		v2.AuxInt = int64ToAuxInt(32)
  7670  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7671  		v3.AddArg(y)
  7672  		v2.AddArg(v3)
  7673  		v1.AddArg(v2)
  7674  		v.AddArg2(v0, v1)
  7675  		return true
  7676  	}
  7677  	// match: (Rsh32Ux16 x y)
  7678  	// cond: shiftIsBounded(v)
  7679  	// result: (SRLW x y)
  7680  	for {
  7681  		x := v_0
  7682  		y := v_1
  7683  		if !(shiftIsBounded(v)) {
  7684  			break
  7685  		}
  7686  		v.reset(OpRISCV64SRLW)
  7687  		v.AddArg2(x, y)
  7688  		return true
  7689  	}
  7690  	return false
  7691  }
  7692  func rewriteValueRISCV64_OpRsh32Ux32(v *Value) bool {
  7693  	v_1 := v.Args[1]
  7694  	v_0 := v.Args[0]
  7695  	b := v.Block
  7696  	typ := &b.Func.Config.Types
  7697  	// match: (Rsh32Ux32 <t> x y)
  7698  	// cond: !shiftIsBounded(v)
  7699  	// result: (AND (SRLW <t> x y) (Neg32 <t> (SLTIU <t> [32] (ZeroExt32to64 y))))
  7700  	for {
  7701  		t := v.Type
  7702  		x := v_0
  7703  		y := v_1
  7704  		if !(!shiftIsBounded(v)) {
  7705  			break
  7706  		}
  7707  		v.reset(OpRISCV64AND)
  7708  		v0 := b.NewValue0(v.Pos, OpRISCV64SRLW, t)
  7709  		v0.AddArg2(x, y)
  7710  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  7711  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7712  		v2.AuxInt = int64ToAuxInt(32)
  7713  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7714  		v3.AddArg(y)
  7715  		v2.AddArg(v3)
  7716  		v1.AddArg(v2)
  7717  		v.AddArg2(v0, v1)
  7718  		return true
  7719  	}
  7720  	// match: (Rsh32Ux32 x y)
  7721  	// cond: shiftIsBounded(v)
  7722  	// result: (SRLW x y)
  7723  	for {
  7724  		x := v_0
  7725  		y := v_1
  7726  		if !(shiftIsBounded(v)) {
  7727  			break
  7728  		}
  7729  		v.reset(OpRISCV64SRLW)
  7730  		v.AddArg2(x, y)
  7731  		return true
  7732  	}
  7733  	return false
  7734  }
  7735  func rewriteValueRISCV64_OpRsh32Ux64(v *Value) bool {
  7736  	v_1 := v.Args[1]
  7737  	v_0 := v.Args[0]
  7738  	b := v.Block
  7739  	// match: (Rsh32Ux64 <t> x y)
  7740  	// cond: !shiftIsBounded(v)
  7741  	// result: (AND (SRLW <t> x y) (Neg32 <t> (SLTIU <t> [32] y)))
  7742  	for {
  7743  		t := v.Type
  7744  		x := v_0
  7745  		y := v_1
  7746  		if !(!shiftIsBounded(v)) {
  7747  			break
  7748  		}
  7749  		v.reset(OpRISCV64AND)
  7750  		v0 := b.NewValue0(v.Pos, OpRISCV64SRLW, t)
  7751  		v0.AddArg2(x, y)
  7752  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  7753  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7754  		v2.AuxInt = int64ToAuxInt(32)
  7755  		v2.AddArg(y)
  7756  		v1.AddArg(v2)
  7757  		v.AddArg2(v0, v1)
  7758  		return true
  7759  	}
  7760  	// match: (Rsh32Ux64 x y)
  7761  	// cond: shiftIsBounded(v)
  7762  	// result: (SRLW x y)
  7763  	for {
  7764  		x := v_0
  7765  		y := v_1
  7766  		if !(shiftIsBounded(v)) {
  7767  			break
  7768  		}
  7769  		v.reset(OpRISCV64SRLW)
  7770  		v.AddArg2(x, y)
  7771  		return true
  7772  	}
  7773  	return false
  7774  }
  7775  func rewriteValueRISCV64_OpRsh32Ux8(v *Value) bool {
  7776  	v_1 := v.Args[1]
  7777  	v_0 := v.Args[0]
  7778  	b := v.Block
  7779  	typ := &b.Func.Config.Types
  7780  	// match: (Rsh32Ux8 <t> x y)
  7781  	// cond: !shiftIsBounded(v)
  7782  	// result: (AND (SRLW <t> x y) (Neg32 <t> (SLTIU <t> [32] (ZeroExt8to64 y))))
  7783  	for {
  7784  		t := v.Type
  7785  		x := v_0
  7786  		y := v_1
  7787  		if !(!shiftIsBounded(v)) {
  7788  			break
  7789  		}
  7790  		v.reset(OpRISCV64AND)
  7791  		v0 := b.NewValue0(v.Pos, OpRISCV64SRLW, t)
  7792  		v0.AddArg2(x, y)
  7793  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  7794  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7795  		v2.AuxInt = int64ToAuxInt(32)
  7796  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7797  		v3.AddArg(y)
  7798  		v2.AddArg(v3)
  7799  		v1.AddArg(v2)
  7800  		v.AddArg2(v0, v1)
  7801  		return true
  7802  	}
  7803  	// match: (Rsh32Ux8 x y)
  7804  	// cond: shiftIsBounded(v)
  7805  	// result: (SRLW x y)
  7806  	for {
  7807  		x := v_0
  7808  		y := v_1
  7809  		if !(shiftIsBounded(v)) {
  7810  			break
  7811  		}
  7812  		v.reset(OpRISCV64SRLW)
  7813  		v.AddArg2(x, y)
  7814  		return true
  7815  	}
  7816  	return false
  7817  }
  7818  func rewriteValueRISCV64_OpRsh32x16(v *Value) bool {
  7819  	v_1 := v.Args[1]
  7820  	v_0 := v.Args[0]
  7821  	b := v.Block
  7822  	typ := &b.Func.Config.Types
  7823  	// match: (Rsh32x16 <t> x y)
  7824  	// cond: !shiftIsBounded(v)
  7825  	// result: (SRAW <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [32] (ZeroExt16to64 y)))))
  7826  	for {
  7827  		t := v.Type
  7828  		x := v_0
  7829  		y := v_1
  7830  		if !(!shiftIsBounded(v)) {
  7831  			break
  7832  		}
  7833  		v.reset(OpRISCV64SRAW)
  7834  		v.Type = t
  7835  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7836  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7837  		v1.AuxInt = int64ToAuxInt(-1)
  7838  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7839  		v2.AuxInt = int64ToAuxInt(32)
  7840  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7841  		v3.AddArg(y)
  7842  		v2.AddArg(v3)
  7843  		v1.AddArg(v2)
  7844  		v0.AddArg2(y, v1)
  7845  		v.AddArg2(x, v0)
  7846  		return true
  7847  	}
  7848  	// match: (Rsh32x16 x y)
  7849  	// cond: shiftIsBounded(v)
  7850  	// result: (SRAW x y)
  7851  	for {
  7852  		x := v_0
  7853  		y := v_1
  7854  		if !(shiftIsBounded(v)) {
  7855  			break
  7856  		}
  7857  		v.reset(OpRISCV64SRAW)
  7858  		v.AddArg2(x, y)
  7859  		return true
  7860  	}
  7861  	return false
  7862  }
  7863  func rewriteValueRISCV64_OpRsh32x32(v *Value) bool {
  7864  	v_1 := v.Args[1]
  7865  	v_0 := v.Args[0]
  7866  	b := v.Block
  7867  	typ := &b.Func.Config.Types
  7868  	// match: (Rsh32x32 <t> x y)
  7869  	// cond: !shiftIsBounded(v)
  7870  	// result: (SRAW <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [32] (ZeroExt32to64 y)))))
  7871  	for {
  7872  		t := v.Type
  7873  		x := v_0
  7874  		y := v_1
  7875  		if !(!shiftIsBounded(v)) {
  7876  			break
  7877  		}
  7878  		v.reset(OpRISCV64SRAW)
  7879  		v.Type = t
  7880  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7881  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7882  		v1.AuxInt = int64ToAuxInt(-1)
  7883  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7884  		v2.AuxInt = int64ToAuxInt(32)
  7885  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7886  		v3.AddArg(y)
  7887  		v2.AddArg(v3)
  7888  		v1.AddArg(v2)
  7889  		v0.AddArg2(y, v1)
  7890  		v.AddArg2(x, v0)
  7891  		return true
  7892  	}
  7893  	// match: (Rsh32x32 x y)
  7894  	// cond: shiftIsBounded(v)
  7895  	// result: (SRAW x y)
  7896  	for {
  7897  		x := v_0
  7898  		y := v_1
  7899  		if !(shiftIsBounded(v)) {
  7900  			break
  7901  		}
  7902  		v.reset(OpRISCV64SRAW)
  7903  		v.AddArg2(x, y)
  7904  		return true
  7905  	}
  7906  	return false
  7907  }
  7908  func rewriteValueRISCV64_OpRsh32x64(v *Value) bool {
  7909  	v_1 := v.Args[1]
  7910  	v_0 := v.Args[0]
  7911  	b := v.Block
  7912  	// match: (Rsh32x64 <t> x y)
  7913  	// cond: !shiftIsBounded(v)
  7914  	// result: (SRAW <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [32] y))))
  7915  	for {
  7916  		t := v.Type
  7917  		x := v_0
  7918  		y := v_1
  7919  		if !(!shiftIsBounded(v)) {
  7920  			break
  7921  		}
  7922  		v.reset(OpRISCV64SRAW)
  7923  		v.Type = t
  7924  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7925  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7926  		v1.AuxInt = int64ToAuxInt(-1)
  7927  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7928  		v2.AuxInt = int64ToAuxInt(32)
  7929  		v2.AddArg(y)
  7930  		v1.AddArg(v2)
  7931  		v0.AddArg2(y, v1)
  7932  		v.AddArg2(x, v0)
  7933  		return true
  7934  	}
  7935  	// match: (Rsh32x64 x y)
  7936  	// cond: shiftIsBounded(v)
  7937  	// result: (SRAW x y)
  7938  	for {
  7939  		x := v_0
  7940  		y := v_1
  7941  		if !(shiftIsBounded(v)) {
  7942  			break
  7943  		}
  7944  		v.reset(OpRISCV64SRAW)
  7945  		v.AddArg2(x, y)
  7946  		return true
  7947  	}
  7948  	return false
  7949  }
  7950  func rewriteValueRISCV64_OpRsh32x8(v *Value) bool {
  7951  	v_1 := v.Args[1]
  7952  	v_0 := v.Args[0]
  7953  	b := v.Block
  7954  	typ := &b.Func.Config.Types
  7955  	// match: (Rsh32x8 <t> x y)
  7956  	// cond: !shiftIsBounded(v)
  7957  	// result: (SRAW <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [32] (ZeroExt8to64 y)))))
  7958  	for {
  7959  		t := v.Type
  7960  		x := v_0
  7961  		y := v_1
  7962  		if !(!shiftIsBounded(v)) {
  7963  			break
  7964  		}
  7965  		v.reset(OpRISCV64SRAW)
  7966  		v.Type = t
  7967  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7968  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7969  		v1.AuxInt = int64ToAuxInt(-1)
  7970  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7971  		v2.AuxInt = int64ToAuxInt(32)
  7972  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7973  		v3.AddArg(y)
  7974  		v2.AddArg(v3)
  7975  		v1.AddArg(v2)
  7976  		v0.AddArg2(y, v1)
  7977  		v.AddArg2(x, v0)
  7978  		return true
  7979  	}
  7980  	// match: (Rsh32x8 x y)
  7981  	// cond: shiftIsBounded(v)
  7982  	// result: (SRAW x y)
  7983  	for {
  7984  		x := v_0
  7985  		y := v_1
  7986  		if !(shiftIsBounded(v)) {
  7987  			break
  7988  		}
  7989  		v.reset(OpRISCV64SRAW)
  7990  		v.AddArg2(x, y)
  7991  		return true
  7992  	}
  7993  	return false
  7994  }
  7995  func rewriteValueRISCV64_OpRsh64Ux16(v *Value) bool {
  7996  	v_1 := v.Args[1]
  7997  	v_0 := v.Args[0]
  7998  	b := v.Block
  7999  	typ := &b.Func.Config.Types
  8000  	// match: (Rsh64Ux16 <t> x y)
  8001  	// cond: !shiftIsBounded(v)
  8002  	// result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  8003  	for {
  8004  		t := v.Type
  8005  		x := v_0
  8006  		y := v_1
  8007  		if !(!shiftIsBounded(v)) {
  8008  			break
  8009  		}
  8010  		v.reset(OpRISCV64AND)
  8011  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  8012  		v0.AddArg2(x, y)
  8013  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  8014  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8015  		v2.AuxInt = int64ToAuxInt(64)
  8016  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8017  		v3.AddArg(y)
  8018  		v2.AddArg(v3)
  8019  		v1.AddArg(v2)
  8020  		v.AddArg2(v0, v1)
  8021  		return true
  8022  	}
  8023  	// match: (Rsh64Ux16 x y)
  8024  	// cond: shiftIsBounded(v)
  8025  	// result: (SRL x y)
  8026  	for {
  8027  		x := v_0
  8028  		y := v_1
  8029  		if !(shiftIsBounded(v)) {
  8030  			break
  8031  		}
  8032  		v.reset(OpRISCV64SRL)
  8033  		v.AddArg2(x, y)
  8034  		return true
  8035  	}
  8036  	return false
  8037  }
  8038  func rewriteValueRISCV64_OpRsh64Ux32(v *Value) bool {
  8039  	v_1 := v.Args[1]
  8040  	v_0 := v.Args[0]
  8041  	b := v.Block
  8042  	typ := &b.Func.Config.Types
  8043  	// match: (Rsh64Ux32 <t> x y)
  8044  	// cond: !shiftIsBounded(v)
  8045  	// result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  8046  	for {
  8047  		t := v.Type
  8048  		x := v_0
  8049  		y := v_1
  8050  		if !(!shiftIsBounded(v)) {
  8051  			break
  8052  		}
  8053  		v.reset(OpRISCV64AND)
  8054  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  8055  		v0.AddArg2(x, y)
  8056  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  8057  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8058  		v2.AuxInt = int64ToAuxInt(64)
  8059  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8060  		v3.AddArg(y)
  8061  		v2.AddArg(v3)
  8062  		v1.AddArg(v2)
  8063  		v.AddArg2(v0, v1)
  8064  		return true
  8065  	}
  8066  	// match: (Rsh64Ux32 x y)
  8067  	// cond: shiftIsBounded(v)
  8068  	// result: (SRL x y)
  8069  	for {
  8070  		x := v_0
  8071  		y := v_1
  8072  		if !(shiftIsBounded(v)) {
  8073  			break
  8074  		}
  8075  		v.reset(OpRISCV64SRL)
  8076  		v.AddArg2(x, y)
  8077  		return true
  8078  	}
  8079  	return false
  8080  }
  8081  func rewriteValueRISCV64_OpRsh64Ux64(v *Value) bool {
  8082  	v_1 := v.Args[1]
  8083  	v_0 := v.Args[0]
  8084  	b := v.Block
  8085  	// match: (Rsh64Ux64 <t> x y)
  8086  	// cond: !shiftIsBounded(v)
  8087  	// result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] y)))
  8088  	for {
  8089  		t := v.Type
  8090  		x := v_0
  8091  		y := v_1
  8092  		if !(!shiftIsBounded(v)) {
  8093  			break
  8094  		}
  8095  		v.reset(OpRISCV64AND)
  8096  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  8097  		v0.AddArg2(x, y)
  8098  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  8099  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8100  		v2.AuxInt = int64ToAuxInt(64)
  8101  		v2.AddArg(y)
  8102  		v1.AddArg(v2)
  8103  		v.AddArg2(v0, v1)
  8104  		return true
  8105  	}
  8106  	// match: (Rsh64Ux64 x y)
  8107  	// cond: shiftIsBounded(v)
  8108  	// result: (SRL x y)
  8109  	for {
  8110  		x := v_0
  8111  		y := v_1
  8112  		if !(shiftIsBounded(v)) {
  8113  			break
  8114  		}
  8115  		v.reset(OpRISCV64SRL)
  8116  		v.AddArg2(x, y)
  8117  		return true
  8118  	}
  8119  	return false
  8120  }
  8121  func rewriteValueRISCV64_OpRsh64Ux8(v *Value) bool {
  8122  	v_1 := v.Args[1]
  8123  	v_0 := v.Args[0]
  8124  	b := v.Block
  8125  	typ := &b.Func.Config.Types
  8126  	// match: (Rsh64Ux8 <t> x y)
  8127  	// cond: !shiftIsBounded(v)
  8128  	// result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  8129  	for {
  8130  		t := v.Type
  8131  		x := v_0
  8132  		y := v_1
  8133  		if !(!shiftIsBounded(v)) {
  8134  			break
  8135  		}
  8136  		v.reset(OpRISCV64AND)
  8137  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  8138  		v0.AddArg2(x, y)
  8139  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  8140  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8141  		v2.AuxInt = int64ToAuxInt(64)
  8142  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8143  		v3.AddArg(y)
  8144  		v2.AddArg(v3)
  8145  		v1.AddArg(v2)
  8146  		v.AddArg2(v0, v1)
  8147  		return true
  8148  	}
  8149  	// match: (Rsh64Ux8 x y)
  8150  	// cond: shiftIsBounded(v)
  8151  	// result: (SRL x y)
  8152  	for {
  8153  		x := v_0
  8154  		y := v_1
  8155  		if !(shiftIsBounded(v)) {
  8156  			break
  8157  		}
  8158  		v.reset(OpRISCV64SRL)
  8159  		v.AddArg2(x, y)
  8160  		return true
  8161  	}
  8162  	return false
  8163  }
  8164  func rewriteValueRISCV64_OpRsh64x16(v *Value) bool {
  8165  	v_1 := v.Args[1]
  8166  	v_0 := v.Args[0]
  8167  	b := v.Block
  8168  	typ := &b.Func.Config.Types
  8169  	// match: (Rsh64x16 <t> x y)
  8170  	// cond: !shiftIsBounded(v)
  8171  	// result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt16to64 y)))))
  8172  	for {
  8173  		t := v.Type
  8174  		x := v_0
  8175  		y := v_1
  8176  		if !(!shiftIsBounded(v)) {
  8177  			break
  8178  		}
  8179  		v.reset(OpRISCV64SRA)
  8180  		v.Type = t
  8181  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8182  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8183  		v1.AuxInt = int64ToAuxInt(-1)
  8184  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8185  		v2.AuxInt = int64ToAuxInt(64)
  8186  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8187  		v3.AddArg(y)
  8188  		v2.AddArg(v3)
  8189  		v1.AddArg(v2)
  8190  		v0.AddArg2(y, v1)
  8191  		v.AddArg2(x, v0)
  8192  		return true
  8193  	}
  8194  	// match: (Rsh64x16 x y)
  8195  	// cond: shiftIsBounded(v)
  8196  	// result: (SRA x y)
  8197  	for {
  8198  		x := v_0
  8199  		y := v_1
  8200  		if !(shiftIsBounded(v)) {
  8201  			break
  8202  		}
  8203  		v.reset(OpRISCV64SRA)
  8204  		v.AddArg2(x, y)
  8205  		return true
  8206  	}
  8207  	return false
  8208  }
  8209  func rewriteValueRISCV64_OpRsh64x32(v *Value) bool {
  8210  	v_1 := v.Args[1]
  8211  	v_0 := v.Args[0]
  8212  	b := v.Block
  8213  	typ := &b.Func.Config.Types
  8214  	// match: (Rsh64x32 <t> x y)
  8215  	// cond: !shiftIsBounded(v)
  8216  	// result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt32to64 y)))))
  8217  	for {
  8218  		t := v.Type
  8219  		x := v_0
  8220  		y := v_1
  8221  		if !(!shiftIsBounded(v)) {
  8222  			break
  8223  		}
  8224  		v.reset(OpRISCV64SRA)
  8225  		v.Type = t
  8226  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8227  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8228  		v1.AuxInt = int64ToAuxInt(-1)
  8229  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8230  		v2.AuxInt = int64ToAuxInt(64)
  8231  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8232  		v3.AddArg(y)
  8233  		v2.AddArg(v3)
  8234  		v1.AddArg(v2)
  8235  		v0.AddArg2(y, v1)
  8236  		v.AddArg2(x, v0)
  8237  		return true
  8238  	}
  8239  	// match: (Rsh64x32 x y)
  8240  	// cond: shiftIsBounded(v)
  8241  	// result: (SRA x y)
  8242  	for {
  8243  		x := v_0
  8244  		y := v_1
  8245  		if !(shiftIsBounded(v)) {
  8246  			break
  8247  		}
  8248  		v.reset(OpRISCV64SRA)
  8249  		v.AddArg2(x, y)
  8250  		return true
  8251  	}
  8252  	return false
  8253  }
  8254  func rewriteValueRISCV64_OpRsh64x64(v *Value) bool {
  8255  	v_1 := v.Args[1]
  8256  	v_0 := v.Args[0]
  8257  	b := v.Block
  8258  	// match: (Rsh64x64 <t> x y)
  8259  	// cond: !shiftIsBounded(v)
  8260  	// result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] y))))
  8261  	for {
  8262  		t := v.Type
  8263  		x := v_0
  8264  		y := v_1
  8265  		if !(!shiftIsBounded(v)) {
  8266  			break
  8267  		}
  8268  		v.reset(OpRISCV64SRA)
  8269  		v.Type = t
  8270  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8271  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8272  		v1.AuxInt = int64ToAuxInt(-1)
  8273  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8274  		v2.AuxInt = int64ToAuxInt(64)
  8275  		v2.AddArg(y)
  8276  		v1.AddArg(v2)
  8277  		v0.AddArg2(y, v1)
  8278  		v.AddArg2(x, v0)
  8279  		return true
  8280  	}
  8281  	// match: (Rsh64x64 x y)
  8282  	// cond: shiftIsBounded(v)
  8283  	// result: (SRA x y)
  8284  	for {
  8285  		x := v_0
  8286  		y := v_1
  8287  		if !(shiftIsBounded(v)) {
  8288  			break
  8289  		}
  8290  		v.reset(OpRISCV64SRA)
  8291  		v.AddArg2(x, y)
  8292  		return true
  8293  	}
  8294  	return false
  8295  }
  8296  func rewriteValueRISCV64_OpRsh64x8(v *Value) bool {
  8297  	v_1 := v.Args[1]
  8298  	v_0 := v.Args[0]
  8299  	b := v.Block
  8300  	typ := &b.Func.Config.Types
  8301  	// match: (Rsh64x8 <t> x y)
  8302  	// cond: !shiftIsBounded(v)
  8303  	// result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt8to64 y)))))
  8304  	for {
  8305  		t := v.Type
  8306  		x := v_0
  8307  		y := v_1
  8308  		if !(!shiftIsBounded(v)) {
  8309  			break
  8310  		}
  8311  		v.reset(OpRISCV64SRA)
  8312  		v.Type = t
  8313  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8314  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8315  		v1.AuxInt = int64ToAuxInt(-1)
  8316  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8317  		v2.AuxInt = int64ToAuxInt(64)
  8318  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8319  		v3.AddArg(y)
  8320  		v2.AddArg(v3)
  8321  		v1.AddArg(v2)
  8322  		v0.AddArg2(y, v1)
  8323  		v.AddArg2(x, v0)
  8324  		return true
  8325  	}
  8326  	// match: (Rsh64x8 x y)
  8327  	// cond: shiftIsBounded(v)
  8328  	// result: (SRA x y)
  8329  	for {
  8330  		x := v_0
  8331  		y := v_1
  8332  		if !(shiftIsBounded(v)) {
  8333  			break
  8334  		}
  8335  		v.reset(OpRISCV64SRA)
  8336  		v.AddArg2(x, y)
  8337  		return true
  8338  	}
  8339  	return false
  8340  }
  8341  func rewriteValueRISCV64_OpRsh8Ux16(v *Value) bool {
  8342  	v_1 := v.Args[1]
  8343  	v_0 := v.Args[0]
  8344  	b := v.Block
  8345  	typ := &b.Func.Config.Types
  8346  	// match: (Rsh8Ux16 <t> x y)
  8347  	// cond: !shiftIsBounded(v)
  8348  	// result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  8349  	for {
  8350  		t := v.Type
  8351  		x := v_0
  8352  		y := v_1
  8353  		if !(!shiftIsBounded(v)) {
  8354  			break
  8355  		}
  8356  		v.reset(OpRISCV64AND)
  8357  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  8358  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8359  		v1.AddArg(x)
  8360  		v0.AddArg2(v1, y)
  8361  		v2 := b.NewValue0(v.Pos, OpNeg8, t)
  8362  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8363  		v3.AuxInt = int64ToAuxInt(64)
  8364  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8365  		v4.AddArg(y)
  8366  		v3.AddArg(v4)
  8367  		v2.AddArg(v3)
  8368  		v.AddArg2(v0, v2)
  8369  		return true
  8370  	}
  8371  	// match: (Rsh8Ux16 x y)
  8372  	// cond: shiftIsBounded(v)
  8373  	// result: (SRL (ZeroExt8to64 x) y)
  8374  	for {
  8375  		x := v_0
  8376  		y := v_1
  8377  		if !(shiftIsBounded(v)) {
  8378  			break
  8379  		}
  8380  		v.reset(OpRISCV64SRL)
  8381  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8382  		v0.AddArg(x)
  8383  		v.AddArg2(v0, y)
  8384  		return true
  8385  	}
  8386  	return false
  8387  }
  8388  func rewriteValueRISCV64_OpRsh8Ux32(v *Value) bool {
  8389  	v_1 := v.Args[1]
  8390  	v_0 := v.Args[0]
  8391  	b := v.Block
  8392  	typ := &b.Func.Config.Types
  8393  	// match: (Rsh8Ux32 <t> x y)
  8394  	// cond: !shiftIsBounded(v)
  8395  	// result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  8396  	for {
  8397  		t := v.Type
  8398  		x := v_0
  8399  		y := v_1
  8400  		if !(!shiftIsBounded(v)) {
  8401  			break
  8402  		}
  8403  		v.reset(OpRISCV64AND)
  8404  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  8405  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8406  		v1.AddArg(x)
  8407  		v0.AddArg2(v1, y)
  8408  		v2 := b.NewValue0(v.Pos, OpNeg8, t)
  8409  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8410  		v3.AuxInt = int64ToAuxInt(64)
  8411  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8412  		v4.AddArg(y)
  8413  		v3.AddArg(v4)
  8414  		v2.AddArg(v3)
  8415  		v.AddArg2(v0, v2)
  8416  		return true
  8417  	}
  8418  	// match: (Rsh8Ux32 x y)
  8419  	// cond: shiftIsBounded(v)
  8420  	// result: (SRL (ZeroExt8to64 x) y)
  8421  	for {
  8422  		x := v_0
  8423  		y := v_1
  8424  		if !(shiftIsBounded(v)) {
  8425  			break
  8426  		}
  8427  		v.reset(OpRISCV64SRL)
  8428  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8429  		v0.AddArg(x)
  8430  		v.AddArg2(v0, y)
  8431  		return true
  8432  	}
  8433  	return false
  8434  }
  8435  func rewriteValueRISCV64_OpRsh8Ux64(v *Value) bool {
  8436  	v_1 := v.Args[1]
  8437  	v_0 := v.Args[0]
  8438  	b := v.Block
  8439  	typ := &b.Func.Config.Types
  8440  	// match: (Rsh8Ux64 <t> x y)
  8441  	// cond: !shiftIsBounded(v)
  8442  	// result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] y)))
  8443  	for {
  8444  		t := v.Type
  8445  		x := v_0
  8446  		y := v_1
  8447  		if !(!shiftIsBounded(v)) {
  8448  			break
  8449  		}
  8450  		v.reset(OpRISCV64AND)
  8451  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  8452  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8453  		v1.AddArg(x)
  8454  		v0.AddArg2(v1, y)
  8455  		v2 := b.NewValue0(v.Pos, OpNeg8, t)
  8456  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8457  		v3.AuxInt = int64ToAuxInt(64)
  8458  		v3.AddArg(y)
  8459  		v2.AddArg(v3)
  8460  		v.AddArg2(v0, v2)
  8461  		return true
  8462  	}
  8463  	// match: (Rsh8Ux64 x y)
  8464  	// cond: shiftIsBounded(v)
  8465  	// result: (SRL (ZeroExt8to64 x) y)
  8466  	for {
  8467  		x := v_0
  8468  		y := v_1
  8469  		if !(shiftIsBounded(v)) {
  8470  			break
  8471  		}
  8472  		v.reset(OpRISCV64SRL)
  8473  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8474  		v0.AddArg(x)
  8475  		v.AddArg2(v0, y)
  8476  		return true
  8477  	}
  8478  	return false
  8479  }
  8480  func rewriteValueRISCV64_OpRsh8Ux8(v *Value) bool {
  8481  	v_1 := v.Args[1]
  8482  	v_0 := v.Args[0]
  8483  	b := v.Block
  8484  	typ := &b.Func.Config.Types
  8485  	// match: (Rsh8Ux8 <t> x y)
  8486  	// cond: !shiftIsBounded(v)
  8487  	// result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  8488  	for {
  8489  		t := v.Type
  8490  		x := v_0
  8491  		y := v_1
  8492  		if !(!shiftIsBounded(v)) {
  8493  			break
  8494  		}
  8495  		v.reset(OpRISCV64AND)
  8496  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  8497  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8498  		v1.AddArg(x)
  8499  		v0.AddArg2(v1, y)
  8500  		v2 := b.NewValue0(v.Pos, OpNeg8, t)
  8501  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8502  		v3.AuxInt = int64ToAuxInt(64)
  8503  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8504  		v4.AddArg(y)
  8505  		v3.AddArg(v4)
  8506  		v2.AddArg(v3)
  8507  		v.AddArg2(v0, v2)
  8508  		return true
  8509  	}
  8510  	// match: (Rsh8Ux8 x y)
  8511  	// cond: shiftIsBounded(v)
  8512  	// result: (SRL (ZeroExt8to64 x) y)
  8513  	for {
  8514  		x := v_0
  8515  		y := v_1
  8516  		if !(shiftIsBounded(v)) {
  8517  			break
  8518  		}
  8519  		v.reset(OpRISCV64SRL)
  8520  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8521  		v0.AddArg(x)
  8522  		v.AddArg2(v0, y)
  8523  		return true
  8524  	}
  8525  	return false
  8526  }
  8527  func rewriteValueRISCV64_OpRsh8x16(v *Value) bool {
  8528  	v_1 := v.Args[1]
  8529  	v_0 := v.Args[0]
  8530  	b := v.Block
  8531  	typ := &b.Func.Config.Types
  8532  	// match: (Rsh8x16 <t> x y)
  8533  	// cond: !shiftIsBounded(v)
  8534  	// result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt16to64 y)))))
  8535  	for {
  8536  		t := v.Type
  8537  		x := v_0
  8538  		y := v_1
  8539  		if !(!shiftIsBounded(v)) {
  8540  			break
  8541  		}
  8542  		v.reset(OpRISCV64SRA)
  8543  		v.Type = t
  8544  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8545  		v0.AddArg(x)
  8546  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8547  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8548  		v2.AuxInt = int64ToAuxInt(-1)
  8549  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8550  		v3.AuxInt = int64ToAuxInt(64)
  8551  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8552  		v4.AddArg(y)
  8553  		v3.AddArg(v4)
  8554  		v2.AddArg(v3)
  8555  		v1.AddArg2(y, v2)
  8556  		v.AddArg2(v0, v1)
  8557  		return true
  8558  	}
  8559  	// match: (Rsh8x16 x y)
  8560  	// cond: shiftIsBounded(v)
  8561  	// result: (SRA (SignExt8to64 x) y)
  8562  	for {
  8563  		x := v_0
  8564  		y := v_1
  8565  		if !(shiftIsBounded(v)) {
  8566  			break
  8567  		}
  8568  		v.reset(OpRISCV64SRA)
  8569  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8570  		v0.AddArg(x)
  8571  		v.AddArg2(v0, y)
  8572  		return true
  8573  	}
  8574  	return false
  8575  }
  8576  func rewriteValueRISCV64_OpRsh8x32(v *Value) bool {
  8577  	v_1 := v.Args[1]
  8578  	v_0 := v.Args[0]
  8579  	b := v.Block
  8580  	typ := &b.Func.Config.Types
  8581  	// match: (Rsh8x32 <t> x y)
  8582  	// cond: !shiftIsBounded(v)
  8583  	// result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt32to64 y)))))
  8584  	for {
  8585  		t := v.Type
  8586  		x := v_0
  8587  		y := v_1
  8588  		if !(!shiftIsBounded(v)) {
  8589  			break
  8590  		}
  8591  		v.reset(OpRISCV64SRA)
  8592  		v.Type = t
  8593  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8594  		v0.AddArg(x)
  8595  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8596  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8597  		v2.AuxInt = int64ToAuxInt(-1)
  8598  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8599  		v3.AuxInt = int64ToAuxInt(64)
  8600  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8601  		v4.AddArg(y)
  8602  		v3.AddArg(v4)
  8603  		v2.AddArg(v3)
  8604  		v1.AddArg2(y, v2)
  8605  		v.AddArg2(v0, v1)
  8606  		return true
  8607  	}
  8608  	// match: (Rsh8x32 x y)
  8609  	// cond: shiftIsBounded(v)
  8610  	// result: (SRA (SignExt8to64 x) y)
  8611  	for {
  8612  		x := v_0
  8613  		y := v_1
  8614  		if !(shiftIsBounded(v)) {
  8615  			break
  8616  		}
  8617  		v.reset(OpRISCV64SRA)
  8618  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8619  		v0.AddArg(x)
  8620  		v.AddArg2(v0, y)
  8621  		return true
  8622  	}
  8623  	return false
  8624  }
  8625  func rewriteValueRISCV64_OpRsh8x64(v *Value) bool {
  8626  	v_1 := v.Args[1]
  8627  	v_0 := v.Args[0]
  8628  	b := v.Block
  8629  	typ := &b.Func.Config.Types
  8630  	// match: (Rsh8x64 <t> x y)
  8631  	// cond: !shiftIsBounded(v)
  8632  	// result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] y))))
  8633  	for {
  8634  		t := v.Type
  8635  		x := v_0
  8636  		y := v_1
  8637  		if !(!shiftIsBounded(v)) {
  8638  			break
  8639  		}
  8640  		v.reset(OpRISCV64SRA)
  8641  		v.Type = t
  8642  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8643  		v0.AddArg(x)
  8644  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8645  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8646  		v2.AuxInt = int64ToAuxInt(-1)
  8647  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8648  		v3.AuxInt = int64ToAuxInt(64)
  8649  		v3.AddArg(y)
  8650  		v2.AddArg(v3)
  8651  		v1.AddArg2(y, v2)
  8652  		v.AddArg2(v0, v1)
  8653  		return true
  8654  	}
  8655  	// match: (Rsh8x64 x y)
  8656  	// cond: shiftIsBounded(v)
  8657  	// result: (SRA (SignExt8to64 x) y)
  8658  	for {
  8659  		x := v_0
  8660  		y := v_1
  8661  		if !(shiftIsBounded(v)) {
  8662  			break
  8663  		}
  8664  		v.reset(OpRISCV64SRA)
  8665  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8666  		v0.AddArg(x)
  8667  		v.AddArg2(v0, y)
  8668  		return true
  8669  	}
  8670  	return false
  8671  }
  8672  func rewriteValueRISCV64_OpRsh8x8(v *Value) bool {
  8673  	v_1 := v.Args[1]
  8674  	v_0 := v.Args[0]
  8675  	b := v.Block
  8676  	typ := &b.Func.Config.Types
  8677  	// match: (Rsh8x8 <t> x y)
  8678  	// cond: !shiftIsBounded(v)
  8679  	// result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt8to64 y)))))
  8680  	for {
  8681  		t := v.Type
  8682  		x := v_0
  8683  		y := v_1
  8684  		if !(!shiftIsBounded(v)) {
  8685  			break
  8686  		}
  8687  		v.reset(OpRISCV64SRA)
  8688  		v.Type = t
  8689  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8690  		v0.AddArg(x)
  8691  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8692  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8693  		v2.AuxInt = int64ToAuxInt(-1)
  8694  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8695  		v3.AuxInt = int64ToAuxInt(64)
  8696  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8697  		v4.AddArg(y)
  8698  		v3.AddArg(v4)
  8699  		v2.AddArg(v3)
  8700  		v1.AddArg2(y, v2)
  8701  		v.AddArg2(v0, v1)
  8702  		return true
  8703  	}
  8704  	// match: (Rsh8x8 x y)
  8705  	// cond: shiftIsBounded(v)
  8706  	// result: (SRA (SignExt8to64 x) y)
  8707  	for {
  8708  		x := v_0
  8709  		y := v_1
  8710  		if !(shiftIsBounded(v)) {
  8711  			break
  8712  		}
  8713  		v.reset(OpRISCV64SRA)
  8714  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8715  		v0.AddArg(x)
  8716  		v.AddArg2(v0, y)
  8717  		return true
  8718  	}
  8719  	return false
  8720  }
  8721  func rewriteValueRISCV64_OpSelect0(v *Value) bool {
  8722  	v_0 := v.Args[0]
  8723  	b := v.Block
  8724  	typ := &b.Func.Config.Types
  8725  	// match: (Select0 (Add64carry x y c))
  8726  	// result: (ADD (ADD <typ.UInt64> x y) c)
  8727  	for {
  8728  		if v_0.Op != OpAdd64carry {
  8729  			break
  8730  		}
  8731  		c := v_0.Args[2]
  8732  		x := v_0.Args[0]
  8733  		y := v_0.Args[1]
  8734  		v.reset(OpRISCV64ADD)
  8735  		v0 := b.NewValue0(v.Pos, OpRISCV64ADD, typ.UInt64)
  8736  		v0.AddArg2(x, y)
  8737  		v.AddArg2(v0, c)
  8738  		return true
  8739  	}
  8740  	// match: (Select0 (Sub64borrow x y c))
  8741  	// result: (SUB (SUB <typ.UInt64> x y) c)
  8742  	for {
  8743  		if v_0.Op != OpSub64borrow {
  8744  			break
  8745  		}
  8746  		c := v_0.Args[2]
  8747  		x := v_0.Args[0]
  8748  		y := v_0.Args[1]
  8749  		v.reset(OpRISCV64SUB)
  8750  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.UInt64)
  8751  		v0.AddArg2(x, y)
  8752  		v.AddArg2(v0, c)
  8753  		return true
  8754  	}
  8755  	// match: (Select0 m:(LoweredMuluhilo x y))
  8756  	// cond: m.Uses == 1
  8757  	// result: (MULHU x y)
  8758  	for {
  8759  		m := v_0
  8760  		if m.Op != OpRISCV64LoweredMuluhilo {
  8761  			break
  8762  		}
  8763  		y := m.Args[1]
  8764  		x := m.Args[0]
  8765  		if !(m.Uses == 1) {
  8766  			break
  8767  		}
  8768  		v.reset(OpRISCV64MULHU)
  8769  		v.AddArg2(x, y)
  8770  		return true
  8771  	}
  8772  	return false
  8773  }
  8774  func rewriteValueRISCV64_OpSelect1(v *Value) bool {
  8775  	v_0 := v.Args[0]
  8776  	b := v.Block
  8777  	typ := &b.Func.Config.Types
  8778  	// match: (Select1 (Add64carry x y c))
  8779  	// result: (OR (SLTU <typ.UInt64> s:(ADD <typ.UInt64> x y) x) (SLTU <typ.UInt64> (ADD <typ.UInt64> s c) s))
  8780  	for {
  8781  		if v_0.Op != OpAdd64carry {
  8782  			break
  8783  		}
  8784  		c := v_0.Args[2]
  8785  		x := v_0.Args[0]
  8786  		y := v_0.Args[1]
  8787  		v.reset(OpRISCV64OR)
  8788  		v0 := b.NewValue0(v.Pos, OpRISCV64SLTU, typ.UInt64)
  8789  		s := b.NewValue0(v.Pos, OpRISCV64ADD, typ.UInt64)
  8790  		s.AddArg2(x, y)
  8791  		v0.AddArg2(s, x)
  8792  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTU, typ.UInt64)
  8793  		v3 := b.NewValue0(v.Pos, OpRISCV64ADD, typ.UInt64)
  8794  		v3.AddArg2(s, c)
  8795  		v2.AddArg2(v3, s)
  8796  		v.AddArg2(v0, v2)
  8797  		return true
  8798  	}
  8799  	// match: (Select1 (Sub64borrow x y c))
  8800  	// result: (OR (SLTU <typ.UInt64> x s:(SUB <typ.UInt64> x y)) (SLTU <typ.UInt64> s (SUB <typ.UInt64> s c)))
  8801  	for {
  8802  		if v_0.Op != OpSub64borrow {
  8803  			break
  8804  		}
  8805  		c := v_0.Args[2]
  8806  		x := v_0.Args[0]
  8807  		y := v_0.Args[1]
  8808  		v.reset(OpRISCV64OR)
  8809  		v0 := b.NewValue0(v.Pos, OpRISCV64SLTU, typ.UInt64)
  8810  		s := b.NewValue0(v.Pos, OpRISCV64SUB, typ.UInt64)
  8811  		s.AddArg2(x, y)
  8812  		v0.AddArg2(x, s)
  8813  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTU, typ.UInt64)
  8814  		v3 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.UInt64)
  8815  		v3.AddArg2(s, c)
  8816  		v2.AddArg2(s, v3)
  8817  		v.AddArg2(v0, v2)
  8818  		return true
  8819  	}
  8820  	// match: (Select1 m:(LoweredMuluhilo x y))
  8821  	// cond: m.Uses == 1
  8822  	// result: (MUL x y)
  8823  	for {
  8824  		m := v_0
  8825  		if m.Op != OpRISCV64LoweredMuluhilo {
  8826  			break
  8827  		}
  8828  		y := m.Args[1]
  8829  		x := m.Args[0]
  8830  		if !(m.Uses == 1) {
  8831  			break
  8832  		}
  8833  		v.reset(OpRISCV64MUL)
  8834  		v.AddArg2(x, y)
  8835  		return true
  8836  	}
  8837  	return false
  8838  }
  8839  func rewriteValueRISCV64_OpSlicemask(v *Value) bool {
  8840  	v_0 := v.Args[0]
  8841  	b := v.Block
  8842  	// match: (Slicemask <t> x)
  8843  	// result: (SRAI [63] (NEG <t> x))
  8844  	for {
  8845  		t := v.Type
  8846  		x := v_0
  8847  		v.reset(OpRISCV64SRAI)
  8848  		v.AuxInt = int64ToAuxInt(63)
  8849  		v0 := b.NewValue0(v.Pos, OpRISCV64NEG, t)
  8850  		v0.AddArg(x)
  8851  		v.AddArg(v0)
  8852  		return true
  8853  	}
  8854  }
  8855  func rewriteValueRISCV64_OpStore(v *Value) bool {
  8856  	v_2 := v.Args[2]
  8857  	v_1 := v.Args[1]
  8858  	v_0 := v.Args[0]
  8859  	// match: (Store {t} ptr val mem)
  8860  	// cond: t.Size() == 1
  8861  	// result: (MOVBstore ptr val mem)
  8862  	for {
  8863  		t := auxToType(v.Aux)
  8864  		ptr := v_0
  8865  		val := v_1
  8866  		mem := v_2
  8867  		if !(t.Size() == 1) {
  8868  			break
  8869  		}
  8870  		v.reset(OpRISCV64MOVBstore)
  8871  		v.AddArg3(ptr, val, mem)
  8872  		return true
  8873  	}
  8874  	// match: (Store {t} ptr val mem)
  8875  	// cond: t.Size() == 2
  8876  	// result: (MOVHstore ptr val mem)
  8877  	for {
  8878  		t := auxToType(v.Aux)
  8879  		ptr := v_0
  8880  		val := v_1
  8881  		mem := v_2
  8882  		if !(t.Size() == 2) {
  8883  			break
  8884  		}
  8885  		v.reset(OpRISCV64MOVHstore)
  8886  		v.AddArg3(ptr, val, mem)
  8887  		return true
  8888  	}
  8889  	// match: (Store {t} ptr val mem)
  8890  	// cond: t.Size() == 4 && !t.IsFloat()
  8891  	// result: (MOVWstore ptr val mem)
  8892  	for {
  8893  		t := auxToType(v.Aux)
  8894  		ptr := v_0
  8895  		val := v_1
  8896  		mem := v_2
  8897  		if !(t.Size() == 4 && !t.IsFloat()) {
  8898  			break
  8899  		}
  8900  		v.reset(OpRISCV64MOVWstore)
  8901  		v.AddArg3(ptr, val, mem)
  8902  		return true
  8903  	}
  8904  	// match: (Store {t} ptr val mem)
  8905  	// cond: t.Size() == 8 && !t.IsFloat()
  8906  	// result: (MOVDstore ptr val mem)
  8907  	for {
  8908  		t := auxToType(v.Aux)
  8909  		ptr := v_0
  8910  		val := v_1
  8911  		mem := v_2
  8912  		if !(t.Size() == 8 && !t.IsFloat()) {
  8913  			break
  8914  		}
  8915  		v.reset(OpRISCV64MOVDstore)
  8916  		v.AddArg3(ptr, val, mem)
  8917  		return true
  8918  	}
  8919  	// match: (Store {t} ptr val mem)
  8920  	// cond: t.Size() == 4 && t.IsFloat()
  8921  	// result: (FMOVWstore ptr val mem)
  8922  	for {
  8923  		t := auxToType(v.Aux)
  8924  		ptr := v_0
  8925  		val := v_1
  8926  		mem := v_2
  8927  		if !(t.Size() == 4 && t.IsFloat()) {
  8928  			break
  8929  		}
  8930  		v.reset(OpRISCV64FMOVWstore)
  8931  		v.AddArg3(ptr, val, mem)
  8932  		return true
  8933  	}
  8934  	// match: (Store {t} ptr val mem)
  8935  	// cond: t.Size() == 8 && t.IsFloat()
  8936  	// result: (FMOVDstore ptr val mem)
  8937  	for {
  8938  		t := auxToType(v.Aux)
  8939  		ptr := v_0
  8940  		val := v_1
  8941  		mem := v_2
  8942  		if !(t.Size() == 8 && t.IsFloat()) {
  8943  			break
  8944  		}
  8945  		v.reset(OpRISCV64FMOVDstore)
  8946  		v.AddArg3(ptr, val, mem)
  8947  		return true
  8948  	}
  8949  	return false
  8950  }
  8951  func rewriteValueRISCV64_OpZero(v *Value) bool {
  8952  	v_1 := v.Args[1]
  8953  	v_0 := v.Args[0]
  8954  	b := v.Block
  8955  	config := b.Func.Config
  8956  	typ := &b.Func.Config.Types
  8957  	// match: (Zero [0] _ mem)
  8958  	// result: mem
  8959  	for {
  8960  		if auxIntToInt64(v.AuxInt) != 0 {
  8961  			break
  8962  		}
  8963  		mem := v_1
  8964  		v.copyOf(mem)
  8965  		return true
  8966  	}
  8967  	// match: (Zero [1] ptr mem)
  8968  	// result: (MOVBstore ptr (MOVDconst [0]) mem)
  8969  	for {
  8970  		if auxIntToInt64(v.AuxInt) != 1 {
  8971  			break
  8972  		}
  8973  		ptr := v_0
  8974  		mem := v_1
  8975  		v.reset(OpRISCV64MOVBstore)
  8976  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8977  		v0.AuxInt = int64ToAuxInt(0)
  8978  		v.AddArg3(ptr, v0, mem)
  8979  		return true
  8980  	}
  8981  	// match: (Zero [2] {t} ptr mem)
  8982  	// cond: t.Alignment()%2 == 0
  8983  	// result: (MOVHstore ptr (MOVDconst [0]) mem)
  8984  	for {
  8985  		if auxIntToInt64(v.AuxInt) != 2 {
  8986  			break
  8987  		}
  8988  		t := auxToType(v.Aux)
  8989  		ptr := v_0
  8990  		mem := v_1
  8991  		if !(t.Alignment()%2 == 0) {
  8992  			break
  8993  		}
  8994  		v.reset(OpRISCV64MOVHstore)
  8995  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8996  		v0.AuxInt = int64ToAuxInt(0)
  8997  		v.AddArg3(ptr, v0, mem)
  8998  		return true
  8999  	}
  9000  	// match: (Zero [2] ptr mem)
  9001  	// result: (MOVBstore [1] ptr (MOVDconst [0]) (MOVBstore ptr (MOVDconst [0]) mem))
  9002  	for {
  9003  		if auxIntToInt64(v.AuxInt) != 2 {
  9004  			break
  9005  		}
  9006  		ptr := v_0
  9007  		mem := v_1
  9008  		v.reset(OpRISCV64MOVBstore)
  9009  		v.AuxInt = int32ToAuxInt(1)
  9010  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9011  		v0.AuxInt = int64ToAuxInt(0)
  9012  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  9013  		v1.AddArg3(ptr, v0, mem)
  9014  		v.AddArg3(ptr, v0, v1)
  9015  		return true
  9016  	}
  9017  	// match: (Zero [4] {t} ptr mem)
  9018  	// cond: t.Alignment()%4 == 0
  9019  	// result: (MOVWstore ptr (MOVDconst [0]) mem)
  9020  	for {
  9021  		if auxIntToInt64(v.AuxInt) != 4 {
  9022  			break
  9023  		}
  9024  		t := auxToType(v.Aux)
  9025  		ptr := v_0
  9026  		mem := v_1
  9027  		if !(t.Alignment()%4 == 0) {
  9028  			break
  9029  		}
  9030  		v.reset(OpRISCV64MOVWstore)
  9031  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9032  		v0.AuxInt = int64ToAuxInt(0)
  9033  		v.AddArg3(ptr, v0, mem)
  9034  		return true
  9035  	}
  9036  	// match: (Zero [4] {t} ptr mem)
  9037  	// cond: t.Alignment()%2 == 0
  9038  	// result: (MOVHstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem))
  9039  	for {
  9040  		if auxIntToInt64(v.AuxInt) != 4 {
  9041  			break
  9042  		}
  9043  		t := auxToType(v.Aux)
  9044  		ptr := v_0
  9045  		mem := v_1
  9046  		if !(t.Alignment()%2 == 0) {
  9047  			break
  9048  		}
  9049  		v.reset(OpRISCV64MOVHstore)
  9050  		v.AuxInt = int32ToAuxInt(2)
  9051  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9052  		v0.AuxInt = int64ToAuxInt(0)
  9053  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  9054  		v1.AddArg3(ptr, v0, mem)
  9055  		v.AddArg3(ptr, v0, v1)
  9056  		return true
  9057  	}
  9058  	// match: (Zero [4] ptr mem)
  9059  	// result: (MOVBstore [3] ptr (MOVDconst [0]) (MOVBstore [2] ptr (MOVDconst [0]) (MOVBstore [1] ptr (MOVDconst [0]) (MOVBstore ptr (MOVDconst [0]) mem))))
  9060  	for {
  9061  		if auxIntToInt64(v.AuxInt) != 4 {
  9062  			break
  9063  		}
  9064  		ptr := v_0
  9065  		mem := v_1
  9066  		v.reset(OpRISCV64MOVBstore)
  9067  		v.AuxInt = int32ToAuxInt(3)
  9068  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9069  		v0.AuxInt = int64ToAuxInt(0)
  9070  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  9071  		v1.AuxInt = int32ToAuxInt(2)
  9072  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  9073  		v2.AuxInt = int32ToAuxInt(1)
  9074  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  9075  		v3.AddArg3(ptr, v0, mem)
  9076  		v2.AddArg3(ptr, v0, v3)
  9077  		v1.AddArg3(ptr, v0, v2)
  9078  		v.AddArg3(ptr, v0, v1)
  9079  		return true
  9080  	}
  9081  	// match: (Zero [8] {t} ptr mem)
  9082  	// cond: t.Alignment()%8 == 0
  9083  	// result: (MOVDstore ptr (MOVDconst [0]) mem)
  9084  	for {
  9085  		if auxIntToInt64(v.AuxInt) != 8 {
  9086  			break
  9087  		}
  9088  		t := auxToType(v.Aux)
  9089  		ptr := v_0
  9090  		mem := v_1
  9091  		if !(t.Alignment()%8 == 0) {
  9092  			break
  9093  		}
  9094  		v.reset(OpRISCV64MOVDstore)
  9095  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9096  		v0.AuxInt = int64ToAuxInt(0)
  9097  		v.AddArg3(ptr, v0, mem)
  9098  		return true
  9099  	}
  9100  	// match: (Zero [8] {t} ptr mem)
  9101  	// cond: t.Alignment()%4 == 0
  9102  	// result: (MOVWstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem))
  9103  	for {
  9104  		if auxIntToInt64(v.AuxInt) != 8 {
  9105  			break
  9106  		}
  9107  		t := auxToType(v.Aux)
  9108  		ptr := v_0
  9109  		mem := v_1
  9110  		if !(t.Alignment()%4 == 0) {
  9111  			break
  9112  		}
  9113  		v.reset(OpRISCV64MOVWstore)
  9114  		v.AuxInt = int32ToAuxInt(4)
  9115  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9116  		v0.AuxInt = int64ToAuxInt(0)
  9117  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
  9118  		v1.AddArg3(ptr, v0, mem)
  9119  		v.AddArg3(ptr, v0, v1)
  9120  		return true
  9121  	}
  9122  	// match: (Zero [8] {t} ptr mem)
  9123  	// cond: t.Alignment()%2 == 0
  9124  	// result: (MOVHstore [6] ptr (MOVDconst [0]) (MOVHstore [4] ptr (MOVDconst [0]) (MOVHstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem))))
  9125  	for {
  9126  		if auxIntToInt64(v.AuxInt) != 8 {
  9127  			break
  9128  		}
  9129  		t := auxToType(v.Aux)
  9130  		ptr := v_0
  9131  		mem := v_1
  9132  		if !(t.Alignment()%2 == 0) {
  9133  			break
  9134  		}
  9135  		v.reset(OpRISCV64MOVHstore)
  9136  		v.AuxInt = int32ToAuxInt(6)
  9137  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9138  		v0.AuxInt = int64ToAuxInt(0)
  9139  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  9140  		v1.AuxInt = int32ToAuxInt(4)
  9141  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  9142  		v2.AuxInt = int32ToAuxInt(2)
  9143  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  9144  		v3.AddArg3(ptr, v0, mem)
  9145  		v2.AddArg3(ptr, v0, v3)
  9146  		v1.AddArg3(ptr, v0, v2)
  9147  		v.AddArg3(ptr, v0, v1)
  9148  		return true
  9149  	}
  9150  	// match: (Zero [3] ptr mem)
  9151  	// result: (MOVBstore [2] ptr (MOVDconst [0]) (MOVBstore [1] ptr (MOVDconst [0]) (MOVBstore ptr (MOVDconst [0]) mem)))
  9152  	for {
  9153  		if auxIntToInt64(v.AuxInt) != 3 {
  9154  			break
  9155  		}
  9156  		ptr := v_0
  9157  		mem := v_1
  9158  		v.reset(OpRISCV64MOVBstore)
  9159  		v.AuxInt = int32ToAuxInt(2)
  9160  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9161  		v0.AuxInt = int64ToAuxInt(0)
  9162  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  9163  		v1.AuxInt = int32ToAuxInt(1)
  9164  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  9165  		v2.AddArg3(ptr, v0, mem)
  9166  		v1.AddArg3(ptr, v0, v2)
  9167  		v.AddArg3(ptr, v0, v1)
  9168  		return true
  9169  	}
  9170  	// match: (Zero [6] {t} ptr mem)
  9171  	// cond: t.Alignment()%2 == 0
  9172  	// result: (MOVHstore [4] ptr (MOVDconst [0]) (MOVHstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem)))
  9173  	for {
  9174  		if auxIntToInt64(v.AuxInt) != 6 {
  9175  			break
  9176  		}
  9177  		t := auxToType(v.Aux)
  9178  		ptr := v_0
  9179  		mem := v_1
  9180  		if !(t.Alignment()%2 == 0) {
  9181  			break
  9182  		}
  9183  		v.reset(OpRISCV64MOVHstore)
  9184  		v.AuxInt = int32ToAuxInt(4)
  9185  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9186  		v0.AuxInt = int64ToAuxInt(0)
  9187  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  9188  		v1.AuxInt = int32ToAuxInt(2)
  9189  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  9190  		v2.AddArg3(ptr, v0, mem)
  9191  		v1.AddArg3(ptr, v0, v2)
  9192  		v.AddArg3(ptr, v0, v1)
  9193  		return true
  9194  	}
  9195  	// match: (Zero [12] {t} ptr mem)
  9196  	// cond: t.Alignment()%4 == 0
  9197  	// result: (MOVWstore [8] ptr (MOVDconst [0]) (MOVWstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem)))
  9198  	for {
  9199  		if auxIntToInt64(v.AuxInt) != 12 {
  9200  			break
  9201  		}
  9202  		t := auxToType(v.Aux)
  9203  		ptr := v_0
  9204  		mem := v_1
  9205  		if !(t.Alignment()%4 == 0) {
  9206  			break
  9207  		}
  9208  		v.reset(OpRISCV64MOVWstore)
  9209  		v.AuxInt = int32ToAuxInt(8)
  9210  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9211  		v0.AuxInt = int64ToAuxInt(0)
  9212  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
  9213  		v1.AuxInt = int32ToAuxInt(4)
  9214  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
  9215  		v2.AddArg3(ptr, v0, mem)
  9216  		v1.AddArg3(ptr, v0, v2)
  9217  		v.AddArg3(ptr, v0, v1)
  9218  		return true
  9219  	}
  9220  	// match: (Zero [16] {t} ptr mem)
  9221  	// cond: t.Alignment()%8 == 0
  9222  	// result: (MOVDstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))
  9223  	for {
  9224  		if auxIntToInt64(v.AuxInt) != 16 {
  9225  			break
  9226  		}
  9227  		t := auxToType(v.Aux)
  9228  		ptr := v_0
  9229  		mem := v_1
  9230  		if !(t.Alignment()%8 == 0) {
  9231  			break
  9232  		}
  9233  		v.reset(OpRISCV64MOVDstore)
  9234  		v.AuxInt = int32ToAuxInt(8)
  9235  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9236  		v0.AuxInt = int64ToAuxInt(0)
  9237  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  9238  		v1.AddArg3(ptr, v0, mem)
  9239  		v.AddArg3(ptr, v0, v1)
  9240  		return true
  9241  	}
  9242  	// match: (Zero [24] {t} ptr mem)
  9243  	// cond: t.Alignment()%8 == 0
  9244  	// result: (MOVDstore [16] ptr (MOVDconst [0]) (MOVDstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)))
  9245  	for {
  9246  		if auxIntToInt64(v.AuxInt) != 24 {
  9247  			break
  9248  		}
  9249  		t := auxToType(v.Aux)
  9250  		ptr := v_0
  9251  		mem := v_1
  9252  		if !(t.Alignment()%8 == 0) {
  9253  			break
  9254  		}
  9255  		v.reset(OpRISCV64MOVDstore)
  9256  		v.AuxInt = int32ToAuxInt(16)
  9257  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9258  		v0.AuxInt = int64ToAuxInt(0)
  9259  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  9260  		v1.AuxInt = int32ToAuxInt(8)
  9261  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  9262  		v2.AddArg3(ptr, v0, mem)
  9263  		v1.AddArg3(ptr, v0, v2)
  9264  		v.AddArg3(ptr, v0, v1)
  9265  		return true
  9266  	}
  9267  	// match: (Zero [32] {t} ptr mem)
  9268  	// cond: t.Alignment()%8 == 0
  9269  	// result: (MOVDstore [24] ptr (MOVDconst [0]) (MOVDstore [16] ptr (MOVDconst [0]) (MOVDstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))))
  9270  	for {
  9271  		if auxIntToInt64(v.AuxInt) != 32 {
  9272  			break
  9273  		}
  9274  		t := auxToType(v.Aux)
  9275  		ptr := v_0
  9276  		mem := v_1
  9277  		if !(t.Alignment()%8 == 0) {
  9278  			break
  9279  		}
  9280  		v.reset(OpRISCV64MOVDstore)
  9281  		v.AuxInt = int32ToAuxInt(24)
  9282  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9283  		v0.AuxInt = int64ToAuxInt(0)
  9284  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  9285  		v1.AuxInt = int32ToAuxInt(16)
  9286  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  9287  		v2.AuxInt = int32ToAuxInt(8)
  9288  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  9289  		v3.AddArg3(ptr, v0, mem)
  9290  		v2.AddArg3(ptr, v0, v3)
  9291  		v1.AddArg3(ptr, v0, v2)
  9292  		v.AddArg3(ptr, v0, v1)
  9293  		return true
  9294  	}
  9295  	// match: (Zero [s] {t} ptr mem)
  9296  	// cond: s%8 == 0 && s <= 8*128 && t.Alignment()%8 == 0
  9297  	// result: (DUFFZERO [8 * (128 - s/8)] ptr mem)
  9298  	for {
  9299  		s := auxIntToInt64(v.AuxInt)
  9300  		t := auxToType(v.Aux)
  9301  		ptr := v_0
  9302  		mem := v_1
  9303  		if !(s%8 == 0 && s <= 8*128 && t.Alignment()%8 == 0) {
  9304  			break
  9305  		}
  9306  		v.reset(OpRISCV64DUFFZERO)
  9307  		v.AuxInt = int64ToAuxInt(8 * (128 - s/8))
  9308  		v.AddArg2(ptr, mem)
  9309  		return true
  9310  	}
  9311  	// match: (Zero [s] {t} ptr mem)
  9312  	// result: (LoweredZero [t.Alignment()] ptr (ADD <ptr.Type> ptr (MOVDconst [s-moveSize(t.Alignment(), config)])) mem)
  9313  	for {
  9314  		s := auxIntToInt64(v.AuxInt)
  9315  		t := auxToType(v.Aux)
  9316  		ptr := v_0
  9317  		mem := v_1
  9318  		v.reset(OpRISCV64LoweredZero)
  9319  		v.AuxInt = int64ToAuxInt(t.Alignment())
  9320  		v0 := b.NewValue0(v.Pos, OpRISCV64ADD, ptr.Type)
  9321  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9322  		v1.AuxInt = int64ToAuxInt(s - moveSize(t.Alignment(), config))
  9323  		v0.AddArg2(ptr, v1)
  9324  		v.AddArg3(ptr, v0, mem)
  9325  		return true
  9326  	}
  9327  }
  9328  func rewriteBlockRISCV64(b *Block) bool {
  9329  	typ := &b.Func.Config.Types
  9330  	switch b.Kind {
  9331  	case BlockRISCV64BEQ:
  9332  		// match: (BEQ (MOVDconst [0]) cond yes no)
  9333  		// result: (BEQZ cond yes no)
  9334  		for b.Controls[0].Op == OpRISCV64MOVDconst {
  9335  			v_0 := b.Controls[0]
  9336  			if auxIntToInt64(v_0.AuxInt) != 0 {
  9337  				break
  9338  			}
  9339  			cond := b.Controls[1]
  9340  			b.resetWithControl(BlockRISCV64BEQZ, cond)
  9341  			return true
  9342  		}
  9343  		// match: (BEQ cond (MOVDconst [0]) yes no)
  9344  		// result: (BEQZ cond yes no)
  9345  		for b.Controls[1].Op == OpRISCV64MOVDconst {
  9346  			cond := b.Controls[0]
  9347  			v_1 := b.Controls[1]
  9348  			if auxIntToInt64(v_1.AuxInt) != 0 {
  9349  				break
  9350  			}
  9351  			b.resetWithControl(BlockRISCV64BEQZ, cond)
  9352  			return true
  9353  		}
  9354  	case BlockRISCV64BEQZ:
  9355  		// match: (BEQZ (SEQZ x) yes no)
  9356  		// result: (BNEZ x yes no)
  9357  		for b.Controls[0].Op == OpRISCV64SEQZ {
  9358  			v_0 := b.Controls[0]
  9359  			x := v_0.Args[0]
  9360  			b.resetWithControl(BlockRISCV64BNEZ, x)
  9361  			return true
  9362  		}
  9363  		// match: (BEQZ (SNEZ x) yes no)
  9364  		// result: (BEQZ x yes no)
  9365  		for b.Controls[0].Op == OpRISCV64SNEZ {
  9366  			v_0 := b.Controls[0]
  9367  			x := v_0.Args[0]
  9368  			b.resetWithControl(BlockRISCV64BEQZ, x)
  9369  			return true
  9370  		}
  9371  		// match: (BEQZ (NEG x) yes no)
  9372  		// result: (BEQZ x yes no)
  9373  		for b.Controls[0].Op == OpRISCV64NEG {
  9374  			v_0 := b.Controls[0]
  9375  			x := v_0.Args[0]
  9376  			b.resetWithControl(BlockRISCV64BEQZ, x)
  9377  			return true
  9378  		}
  9379  		// match: (BEQZ (FNES <t> x y) yes no)
  9380  		// result: (BNEZ (FEQS <t> x y) yes no)
  9381  		for b.Controls[0].Op == OpRISCV64FNES {
  9382  			v_0 := b.Controls[0]
  9383  			t := v_0.Type
  9384  			_ = v_0.Args[1]
  9385  			v_0_0 := v_0.Args[0]
  9386  			v_0_1 := v_0.Args[1]
  9387  			for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  9388  				x := v_0_0
  9389  				y := v_0_1
  9390  				v0 := b.NewValue0(v_0.Pos, OpRISCV64FEQS, t)
  9391  				v0.AddArg2(x, y)
  9392  				b.resetWithControl(BlockRISCV64BNEZ, v0)
  9393  				return true
  9394  			}
  9395  		}
  9396  		// match: (BEQZ (FNED <t> x y) yes no)
  9397  		// result: (BNEZ (FEQD <t> x y) yes no)
  9398  		for b.Controls[0].Op == OpRISCV64FNED {
  9399  			v_0 := b.Controls[0]
  9400  			t := v_0.Type
  9401  			_ = v_0.Args[1]
  9402  			v_0_0 := v_0.Args[0]
  9403  			v_0_1 := v_0.Args[1]
  9404  			for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  9405  				x := v_0_0
  9406  				y := v_0_1
  9407  				v0 := b.NewValue0(v_0.Pos, OpRISCV64FEQD, t)
  9408  				v0.AddArg2(x, y)
  9409  				b.resetWithControl(BlockRISCV64BNEZ, v0)
  9410  				return true
  9411  			}
  9412  		}
  9413  		// match: (BEQZ (SUB x y) yes no)
  9414  		// result: (BEQ x y yes no)
  9415  		for b.Controls[0].Op == OpRISCV64SUB {
  9416  			v_0 := b.Controls[0]
  9417  			y := v_0.Args[1]
  9418  			x := v_0.Args[0]
  9419  			b.resetWithControl2(BlockRISCV64BEQ, x, y)
  9420  			return true
  9421  		}
  9422  		// match: (BEQZ (SLT x y) yes no)
  9423  		// result: (BGE x y yes no)
  9424  		for b.Controls[0].Op == OpRISCV64SLT {
  9425  			v_0 := b.Controls[0]
  9426  			y := v_0.Args[1]
  9427  			x := v_0.Args[0]
  9428  			b.resetWithControl2(BlockRISCV64BGE, x, y)
  9429  			return true
  9430  		}
  9431  		// match: (BEQZ (SLTU x y) yes no)
  9432  		// result: (BGEU x y yes no)
  9433  		for b.Controls[0].Op == OpRISCV64SLTU {
  9434  			v_0 := b.Controls[0]
  9435  			y := v_0.Args[1]
  9436  			x := v_0.Args[0]
  9437  			b.resetWithControl2(BlockRISCV64BGEU, x, y)
  9438  			return true
  9439  		}
  9440  		// match: (BEQZ (SLTI [x] y) yes no)
  9441  		// result: (BGE y (MOVDconst [x]) yes no)
  9442  		for b.Controls[0].Op == OpRISCV64SLTI {
  9443  			v_0 := b.Controls[0]
  9444  			x := auxIntToInt64(v_0.AuxInt)
  9445  			y := v_0.Args[0]
  9446  			v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9447  			v0.AuxInt = int64ToAuxInt(x)
  9448  			b.resetWithControl2(BlockRISCV64BGE, y, v0)
  9449  			return true
  9450  		}
  9451  		// match: (BEQZ (SLTIU [x] y) yes no)
  9452  		// result: (BGEU y (MOVDconst [x]) yes no)
  9453  		for b.Controls[0].Op == OpRISCV64SLTIU {
  9454  			v_0 := b.Controls[0]
  9455  			x := auxIntToInt64(v_0.AuxInt)
  9456  			y := v_0.Args[0]
  9457  			v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9458  			v0.AuxInt = int64ToAuxInt(x)
  9459  			b.resetWithControl2(BlockRISCV64BGEU, y, v0)
  9460  			return true
  9461  		}
  9462  	case BlockRISCV64BGE:
  9463  		// match: (BGE (MOVDconst [0]) cond yes no)
  9464  		// result: (BLEZ cond yes no)
  9465  		for b.Controls[0].Op == OpRISCV64MOVDconst {
  9466  			v_0 := b.Controls[0]
  9467  			if auxIntToInt64(v_0.AuxInt) != 0 {
  9468  				break
  9469  			}
  9470  			cond := b.Controls[1]
  9471  			b.resetWithControl(BlockRISCV64BLEZ, cond)
  9472  			return true
  9473  		}
  9474  		// match: (BGE cond (MOVDconst [0]) yes no)
  9475  		// result: (BGEZ cond yes no)
  9476  		for b.Controls[1].Op == OpRISCV64MOVDconst {
  9477  			cond := b.Controls[0]
  9478  			v_1 := b.Controls[1]
  9479  			if auxIntToInt64(v_1.AuxInt) != 0 {
  9480  				break
  9481  			}
  9482  			b.resetWithControl(BlockRISCV64BGEZ, cond)
  9483  			return true
  9484  		}
  9485  	case BlockRISCV64BGEU:
  9486  		// match: (BGEU (MOVDconst [0]) cond yes no)
  9487  		// result: (BEQZ cond yes no)
  9488  		for b.Controls[0].Op == OpRISCV64MOVDconst {
  9489  			v_0 := b.Controls[0]
  9490  			if auxIntToInt64(v_0.AuxInt) != 0 {
  9491  				break
  9492  			}
  9493  			cond := b.Controls[1]
  9494  			b.resetWithControl(BlockRISCV64BEQZ, cond)
  9495  			return true
  9496  		}
  9497  	case BlockRISCV64BLT:
  9498  		// match: (BLT (MOVDconst [0]) cond yes no)
  9499  		// result: (BGTZ cond yes no)
  9500  		for b.Controls[0].Op == OpRISCV64MOVDconst {
  9501  			v_0 := b.Controls[0]
  9502  			if auxIntToInt64(v_0.AuxInt) != 0 {
  9503  				break
  9504  			}
  9505  			cond := b.Controls[1]
  9506  			b.resetWithControl(BlockRISCV64BGTZ, cond)
  9507  			return true
  9508  		}
  9509  		// match: (BLT cond (MOVDconst [0]) yes no)
  9510  		// result: (BLTZ cond yes no)
  9511  		for b.Controls[1].Op == OpRISCV64MOVDconst {
  9512  			cond := b.Controls[0]
  9513  			v_1 := b.Controls[1]
  9514  			if auxIntToInt64(v_1.AuxInt) != 0 {
  9515  				break
  9516  			}
  9517  			b.resetWithControl(BlockRISCV64BLTZ, cond)
  9518  			return true
  9519  		}
  9520  	case BlockRISCV64BLTU:
  9521  		// match: (BLTU (MOVDconst [0]) cond yes no)
  9522  		// result: (BNEZ cond yes no)
  9523  		for b.Controls[0].Op == OpRISCV64MOVDconst {
  9524  			v_0 := b.Controls[0]
  9525  			if auxIntToInt64(v_0.AuxInt) != 0 {
  9526  				break
  9527  			}
  9528  			cond := b.Controls[1]
  9529  			b.resetWithControl(BlockRISCV64BNEZ, cond)
  9530  			return true
  9531  		}
  9532  	case BlockRISCV64BNE:
  9533  		// match: (BNE (MOVDconst [0]) cond yes no)
  9534  		// result: (BNEZ cond yes no)
  9535  		for b.Controls[0].Op == OpRISCV64MOVDconst {
  9536  			v_0 := b.Controls[0]
  9537  			if auxIntToInt64(v_0.AuxInt) != 0 {
  9538  				break
  9539  			}
  9540  			cond := b.Controls[1]
  9541  			b.resetWithControl(BlockRISCV64BNEZ, cond)
  9542  			return true
  9543  		}
  9544  		// match: (BNE cond (MOVDconst [0]) yes no)
  9545  		// result: (BNEZ cond yes no)
  9546  		for b.Controls[1].Op == OpRISCV64MOVDconst {
  9547  			cond := b.Controls[0]
  9548  			v_1 := b.Controls[1]
  9549  			if auxIntToInt64(v_1.AuxInt) != 0 {
  9550  				break
  9551  			}
  9552  			b.resetWithControl(BlockRISCV64BNEZ, cond)
  9553  			return true
  9554  		}
  9555  	case BlockRISCV64BNEZ:
  9556  		// match: (BNEZ (SEQZ x) yes no)
  9557  		// result: (BEQZ x yes no)
  9558  		for b.Controls[0].Op == OpRISCV64SEQZ {
  9559  			v_0 := b.Controls[0]
  9560  			x := v_0.Args[0]
  9561  			b.resetWithControl(BlockRISCV64BEQZ, x)
  9562  			return true
  9563  		}
  9564  		// match: (BNEZ (SNEZ x) yes no)
  9565  		// result: (BNEZ x yes no)
  9566  		for b.Controls[0].Op == OpRISCV64SNEZ {
  9567  			v_0 := b.Controls[0]
  9568  			x := v_0.Args[0]
  9569  			b.resetWithControl(BlockRISCV64BNEZ, x)
  9570  			return true
  9571  		}
  9572  		// match: (BNEZ (NEG x) yes no)
  9573  		// result: (BNEZ x yes no)
  9574  		for b.Controls[0].Op == OpRISCV64NEG {
  9575  			v_0 := b.Controls[0]
  9576  			x := v_0.Args[0]
  9577  			b.resetWithControl(BlockRISCV64BNEZ, x)
  9578  			return true
  9579  		}
  9580  		// match: (BNEZ (FNES <t> x y) yes no)
  9581  		// result: (BEQZ (FEQS <t> x y) yes no)
  9582  		for b.Controls[0].Op == OpRISCV64FNES {
  9583  			v_0 := b.Controls[0]
  9584  			t := v_0.Type
  9585  			_ = v_0.Args[1]
  9586  			v_0_0 := v_0.Args[0]
  9587  			v_0_1 := v_0.Args[1]
  9588  			for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  9589  				x := v_0_0
  9590  				y := v_0_1
  9591  				v0 := b.NewValue0(v_0.Pos, OpRISCV64FEQS, t)
  9592  				v0.AddArg2(x, y)
  9593  				b.resetWithControl(BlockRISCV64BEQZ, v0)
  9594  				return true
  9595  			}
  9596  		}
  9597  		// match: (BNEZ (FNED <t> x y) yes no)
  9598  		// result: (BEQZ (FEQD <t> x y) yes no)
  9599  		for b.Controls[0].Op == OpRISCV64FNED {
  9600  			v_0 := b.Controls[0]
  9601  			t := v_0.Type
  9602  			_ = v_0.Args[1]
  9603  			v_0_0 := v_0.Args[0]
  9604  			v_0_1 := v_0.Args[1]
  9605  			for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  9606  				x := v_0_0
  9607  				y := v_0_1
  9608  				v0 := b.NewValue0(v_0.Pos, OpRISCV64FEQD, t)
  9609  				v0.AddArg2(x, y)
  9610  				b.resetWithControl(BlockRISCV64BEQZ, v0)
  9611  				return true
  9612  			}
  9613  		}
  9614  		// match: (BNEZ (SUB x y) yes no)
  9615  		// result: (BNE x y yes no)
  9616  		for b.Controls[0].Op == OpRISCV64SUB {
  9617  			v_0 := b.Controls[0]
  9618  			y := v_0.Args[1]
  9619  			x := v_0.Args[0]
  9620  			b.resetWithControl2(BlockRISCV64BNE, x, y)
  9621  			return true
  9622  		}
  9623  		// match: (BNEZ (SLT x y) yes no)
  9624  		// result: (BLT x y yes no)
  9625  		for b.Controls[0].Op == OpRISCV64SLT {
  9626  			v_0 := b.Controls[0]
  9627  			y := v_0.Args[1]
  9628  			x := v_0.Args[0]
  9629  			b.resetWithControl2(BlockRISCV64BLT, x, y)
  9630  			return true
  9631  		}
  9632  		// match: (BNEZ (SLTU x y) yes no)
  9633  		// result: (BLTU x y yes no)
  9634  		for b.Controls[0].Op == OpRISCV64SLTU {
  9635  			v_0 := b.Controls[0]
  9636  			y := v_0.Args[1]
  9637  			x := v_0.Args[0]
  9638  			b.resetWithControl2(BlockRISCV64BLTU, x, y)
  9639  			return true
  9640  		}
  9641  		// match: (BNEZ (SLTI [x] y) yes no)
  9642  		// result: (BLT y (MOVDconst [x]) yes no)
  9643  		for b.Controls[0].Op == OpRISCV64SLTI {
  9644  			v_0 := b.Controls[0]
  9645  			x := auxIntToInt64(v_0.AuxInt)
  9646  			y := v_0.Args[0]
  9647  			v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9648  			v0.AuxInt = int64ToAuxInt(x)
  9649  			b.resetWithControl2(BlockRISCV64BLT, y, v0)
  9650  			return true
  9651  		}
  9652  		// match: (BNEZ (SLTIU [x] y) yes no)
  9653  		// result: (BLTU y (MOVDconst [x]) yes no)
  9654  		for b.Controls[0].Op == OpRISCV64SLTIU {
  9655  			v_0 := b.Controls[0]
  9656  			x := auxIntToInt64(v_0.AuxInt)
  9657  			y := v_0.Args[0]
  9658  			v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9659  			v0.AuxInt = int64ToAuxInt(x)
  9660  			b.resetWithControl2(BlockRISCV64BLTU, y, v0)
  9661  			return true
  9662  		}
  9663  	case BlockIf:
  9664  		// match: (If cond yes no)
  9665  		// result: (BNEZ (MOVBUreg <typ.UInt64> cond) yes no)
  9666  		for {
  9667  			cond := b.Controls[0]
  9668  			v0 := b.NewValue0(cond.Pos, OpRISCV64MOVBUreg, typ.UInt64)
  9669  			v0.AddArg(cond)
  9670  			b.resetWithControl(BlockRISCV64BNEZ, v0)
  9671  			return true
  9672  		}
  9673  	}
  9674  	return false
  9675  }
  9676  

View as plain text