Source file src/cmd/compile/internal/ssa/rewriteLOONG64.go

     1  // Code generated from _gen/LOONG64.rules using 'go generate'; DO NOT EDIT.
     2  
     3  package ssa
     4  
     5  import "cmd/compile/internal/types"
     6  
     7  func rewriteValueLOONG64(v *Value) bool {
     8  	switch v.Op {
     9  	case OpAdd16:
    10  		v.Op = OpLOONG64ADDV
    11  		return true
    12  	case OpAdd32:
    13  		v.Op = OpLOONG64ADDV
    14  		return true
    15  	case OpAdd32F:
    16  		v.Op = OpLOONG64ADDF
    17  		return true
    18  	case OpAdd64:
    19  		v.Op = OpLOONG64ADDV
    20  		return true
    21  	case OpAdd64F:
    22  		v.Op = OpLOONG64ADDD
    23  		return true
    24  	case OpAdd8:
    25  		v.Op = OpLOONG64ADDV
    26  		return true
    27  	case OpAddPtr:
    28  		v.Op = OpLOONG64ADDV
    29  		return true
    30  	case OpAddr:
    31  		return rewriteValueLOONG64_OpAddr(v)
    32  	case OpAnd16:
    33  		v.Op = OpLOONG64AND
    34  		return true
    35  	case OpAnd32:
    36  		v.Op = OpLOONG64AND
    37  		return true
    38  	case OpAnd64:
    39  		v.Op = OpLOONG64AND
    40  		return true
    41  	case OpAnd8:
    42  		v.Op = OpLOONG64AND
    43  		return true
    44  	case OpAndB:
    45  		v.Op = OpLOONG64AND
    46  		return true
    47  	case OpAtomicAdd32:
    48  		v.Op = OpLOONG64LoweredAtomicAdd32
    49  		return true
    50  	case OpAtomicAdd64:
    51  		v.Op = OpLOONG64LoweredAtomicAdd64
    52  		return true
    53  	case OpAtomicCompareAndSwap32:
    54  		return rewriteValueLOONG64_OpAtomicCompareAndSwap32(v)
    55  	case OpAtomicCompareAndSwap64:
    56  		v.Op = OpLOONG64LoweredAtomicCas64
    57  		return true
    58  	case OpAtomicExchange32:
    59  		v.Op = OpLOONG64LoweredAtomicExchange32
    60  		return true
    61  	case OpAtomicExchange64:
    62  		v.Op = OpLOONG64LoweredAtomicExchange64
    63  		return true
    64  	case OpAtomicLoad32:
    65  		v.Op = OpLOONG64LoweredAtomicLoad32
    66  		return true
    67  	case OpAtomicLoad64:
    68  		v.Op = OpLOONG64LoweredAtomicLoad64
    69  		return true
    70  	case OpAtomicLoad8:
    71  		v.Op = OpLOONG64LoweredAtomicLoad8
    72  		return true
    73  	case OpAtomicLoadPtr:
    74  		v.Op = OpLOONG64LoweredAtomicLoad64
    75  		return true
    76  	case OpAtomicStore32:
    77  		v.Op = OpLOONG64LoweredAtomicStore32
    78  		return true
    79  	case OpAtomicStore64:
    80  		v.Op = OpLOONG64LoweredAtomicStore64
    81  		return true
    82  	case OpAtomicStore8:
    83  		v.Op = OpLOONG64LoweredAtomicStore8
    84  		return true
    85  	case OpAtomicStorePtrNoWB:
    86  		v.Op = OpLOONG64LoweredAtomicStore64
    87  		return true
    88  	case OpAvg64u:
    89  		return rewriteValueLOONG64_OpAvg64u(v)
    90  	case OpClosureCall:
    91  		v.Op = OpLOONG64CALLclosure
    92  		return true
    93  	case OpCom16:
    94  		return rewriteValueLOONG64_OpCom16(v)
    95  	case OpCom32:
    96  		return rewriteValueLOONG64_OpCom32(v)
    97  	case OpCom64:
    98  		return rewriteValueLOONG64_OpCom64(v)
    99  	case OpCom8:
   100  		return rewriteValueLOONG64_OpCom8(v)
   101  	case OpCondSelect:
   102  		return rewriteValueLOONG64_OpCondSelect(v)
   103  	case OpConst16:
   104  		return rewriteValueLOONG64_OpConst16(v)
   105  	case OpConst32:
   106  		return rewriteValueLOONG64_OpConst32(v)
   107  	case OpConst32F:
   108  		return rewriteValueLOONG64_OpConst32F(v)
   109  	case OpConst64:
   110  		return rewriteValueLOONG64_OpConst64(v)
   111  	case OpConst64F:
   112  		return rewriteValueLOONG64_OpConst64F(v)
   113  	case OpConst8:
   114  		return rewriteValueLOONG64_OpConst8(v)
   115  	case OpConstBool:
   116  		return rewriteValueLOONG64_OpConstBool(v)
   117  	case OpConstNil:
   118  		return rewriteValueLOONG64_OpConstNil(v)
   119  	case OpCvt32Fto32:
   120  		v.Op = OpLOONG64TRUNCFW
   121  		return true
   122  	case OpCvt32Fto64:
   123  		v.Op = OpLOONG64TRUNCFV
   124  		return true
   125  	case OpCvt32Fto64F:
   126  		v.Op = OpLOONG64MOVFD
   127  		return true
   128  	case OpCvt32to32F:
   129  		v.Op = OpLOONG64MOVWF
   130  		return true
   131  	case OpCvt32to64F:
   132  		v.Op = OpLOONG64MOVWD
   133  		return true
   134  	case OpCvt64Fto32:
   135  		v.Op = OpLOONG64TRUNCDW
   136  		return true
   137  	case OpCvt64Fto32F:
   138  		v.Op = OpLOONG64MOVDF
   139  		return true
   140  	case OpCvt64Fto64:
   141  		v.Op = OpLOONG64TRUNCDV
   142  		return true
   143  	case OpCvt64to32F:
   144  		v.Op = OpLOONG64MOVVF
   145  		return true
   146  	case OpCvt64to64F:
   147  		v.Op = OpLOONG64MOVVD
   148  		return true
   149  	case OpCvtBoolToUint8:
   150  		v.Op = OpCopy
   151  		return true
   152  	case OpDiv16:
   153  		return rewriteValueLOONG64_OpDiv16(v)
   154  	case OpDiv16u:
   155  		return rewriteValueLOONG64_OpDiv16u(v)
   156  	case OpDiv32:
   157  		return rewriteValueLOONG64_OpDiv32(v)
   158  	case OpDiv32F:
   159  		v.Op = OpLOONG64DIVF
   160  		return true
   161  	case OpDiv32u:
   162  		return rewriteValueLOONG64_OpDiv32u(v)
   163  	case OpDiv64:
   164  		return rewriteValueLOONG64_OpDiv64(v)
   165  	case OpDiv64F:
   166  		v.Op = OpLOONG64DIVD
   167  		return true
   168  	case OpDiv64u:
   169  		v.Op = OpLOONG64DIVVU
   170  		return true
   171  	case OpDiv8:
   172  		return rewriteValueLOONG64_OpDiv8(v)
   173  	case OpDiv8u:
   174  		return rewriteValueLOONG64_OpDiv8u(v)
   175  	case OpEq16:
   176  		return rewriteValueLOONG64_OpEq16(v)
   177  	case OpEq32:
   178  		return rewriteValueLOONG64_OpEq32(v)
   179  	case OpEq32F:
   180  		return rewriteValueLOONG64_OpEq32F(v)
   181  	case OpEq64:
   182  		return rewriteValueLOONG64_OpEq64(v)
   183  	case OpEq64F:
   184  		return rewriteValueLOONG64_OpEq64F(v)
   185  	case OpEq8:
   186  		return rewriteValueLOONG64_OpEq8(v)
   187  	case OpEqB:
   188  		return rewriteValueLOONG64_OpEqB(v)
   189  	case OpEqPtr:
   190  		return rewriteValueLOONG64_OpEqPtr(v)
   191  	case OpGetCallerPC:
   192  		v.Op = OpLOONG64LoweredGetCallerPC
   193  		return true
   194  	case OpGetCallerSP:
   195  		v.Op = OpLOONG64LoweredGetCallerSP
   196  		return true
   197  	case OpGetClosurePtr:
   198  		v.Op = OpLOONG64LoweredGetClosurePtr
   199  		return true
   200  	case OpHmul32:
   201  		return rewriteValueLOONG64_OpHmul32(v)
   202  	case OpHmul32u:
   203  		return rewriteValueLOONG64_OpHmul32u(v)
   204  	case OpHmul64:
   205  		v.Op = OpLOONG64MULHV
   206  		return true
   207  	case OpHmul64u:
   208  		v.Op = OpLOONG64MULHVU
   209  		return true
   210  	case OpInterCall:
   211  		v.Op = OpLOONG64CALLinter
   212  		return true
   213  	case OpIsInBounds:
   214  		return rewriteValueLOONG64_OpIsInBounds(v)
   215  	case OpIsNonNil:
   216  		return rewriteValueLOONG64_OpIsNonNil(v)
   217  	case OpIsSliceInBounds:
   218  		return rewriteValueLOONG64_OpIsSliceInBounds(v)
   219  	case OpLOONG64ADDV:
   220  		return rewriteValueLOONG64_OpLOONG64ADDV(v)
   221  	case OpLOONG64ADDVconst:
   222  		return rewriteValueLOONG64_OpLOONG64ADDVconst(v)
   223  	case OpLOONG64AND:
   224  		return rewriteValueLOONG64_OpLOONG64AND(v)
   225  	case OpLOONG64ANDconst:
   226  		return rewriteValueLOONG64_OpLOONG64ANDconst(v)
   227  	case OpLOONG64DIVV:
   228  		return rewriteValueLOONG64_OpLOONG64DIVV(v)
   229  	case OpLOONG64DIVVU:
   230  		return rewriteValueLOONG64_OpLOONG64DIVVU(v)
   231  	case OpLOONG64LoweredAtomicAdd32:
   232  		return rewriteValueLOONG64_OpLOONG64LoweredAtomicAdd32(v)
   233  	case OpLOONG64LoweredAtomicAdd64:
   234  		return rewriteValueLOONG64_OpLOONG64LoweredAtomicAdd64(v)
   235  	case OpLOONG64LoweredAtomicStore32:
   236  		return rewriteValueLOONG64_OpLOONG64LoweredAtomicStore32(v)
   237  	case OpLOONG64LoweredAtomicStore64:
   238  		return rewriteValueLOONG64_OpLOONG64LoweredAtomicStore64(v)
   239  	case OpLOONG64MASKEQZ:
   240  		return rewriteValueLOONG64_OpLOONG64MASKEQZ(v)
   241  	case OpLOONG64MASKNEZ:
   242  		return rewriteValueLOONG64_OpLOONG64MASKNEZ(v)
   243  	case OpLOONG64MOVBUload:
   244  		return rewriteValueLOONG64_OpLOONG64MOVBUload(v)
   245  	case OpLOONG64MOVBUreg:
   246  		return rewriteValueLOONG64_OpLOONG64MOVBUreg(v)
   247  	case OpLOONG64MOVBload:
   248  		return rewriteValueLOONG64_OpLOONG64MOVBload(v)
   249  	case OpLOONG64MOVBreg:
   250  		return rewriteValueLOONG64_OpLOONG64MOVBreg(v)
   251  	case OpLOONG64MOVBstore:
   252  		return rewriteValueLOONG64_OpLOONG64MOVBstore(v)
   253  	case OpLOONG64MOVBstorezero:
   254  		return rewriteValueLOONG64_OpLOONG64MOVBstorezero(v)
   255  	case OpLOONG64MOVDload:
   256  		return rewriteValueLOONG64_OpLOONG64MOVDload(v)
   257  	case OpLOONG64MOVDstore:
   258  		return rewriteValueLOONG64_OpLOONG64MOVDstore(v)
   259  	case OpLOONG64MOVFload:
   260  		return rewriteValueLOONG64_OpLOONG64MOVFload(v)
   261  	case OpLOONG64MOVFstore:
   262  		return rewriteValueLOONG64_OpLOONG64MOVFstore(v)
   263  	case OpLOONG64MOVHUload:
   264  		return rewriteValueLOONG64_OpLOONG64MOVHUload(v)
   265  	case OpLOONG64MOVHUreg:
   266  		return rewriteValueLOONG64_OpLOONG64MOVHUreg(v)
   267  	case OpLOONG64MOVHload:
   268  		return rewriteValueLOONG64_OpLOONG64MOVHload(v)
   269  	case OpLOONG64MOVHreg:
   270  		return rewriteValueLOONG64_OpLOONG64MOVHreg(v)
   271  	case OpLOONG64MOVHstore:
   272  		return rewriteValueLOONG64_OpLOONG64MOVHstore(v)
   273  	case OpLOONG64MOVHstorezero:
   274  		return rewriteValueLOONG64_OpLOONG64MOVHstorezero(v)
   275  	case OpLOONG64MOVVload:
   276  		return rewriteValueLOONG64_OpLOONG64MOVVload(v)
   277  	case OpLOONG64MOVVreg:
   278  		return rewriteValueLOONG64_OpLOONG64MOVVreg(v)
   279  	case OpLOONG64MOVVstore:
   280  		return rewriteValueLOONG64_OpLOONG64MOVVstore(v)
   281  	case OpLOONG64MOVVstorezero:
   282  		return rewriteValueLOONG64_OpLOONG64MOVVstorezero(v)
   283  	case OpLOONG64MOVWUload:
   284  		return rewriteValueLOONG64_OpLOONG64MOVWUload(v)
   285  	case OpLOONG64MOVWUreg:
   286  		return rewriteValueLOONG64_OpLOONG64MOVWUreg(v)
   287  	case OpLOONG64MOVWload:
   288  		return rewriteValueLOONG64_OpLOONG64MOVWload(v)
   289  	case OpLOONG64MOVWreg:
   290  		return rewriteValueLOONG64_OpLOONG64MOVWreg(v)
   291  	case OpLOONG64MOVWstore:
   292  		return rewriteValueLOONG64_OpLOONG64MOVWstore(v)
   293  	case OpLOONG64MOVWstorezero:
   294  		return rewriteValueLOONG64_OpLOONG64MOVWstorezero(v)
   295  	case OpLOONG64MULV:
   296  		return rewriteValueLOONG64_OpLOONG64MULV(v)
   297  	case OpLOONG64NEGV:
   298  		return rewriteValueLOONG64_OpLOONG64NEGV(v)
   299  	case OpLOONG64NOR:
   300  		return rewriteValueLOONG64_OpLOONG64NOR(v)
   301  	case OpLOONG64NORconst:
   302  		return rewriteValueLOONG64_OpLOONG64NORconst(v)
   303  	case OpLOONG64OR:
   304  		return rewriteValueLOONG64_OpLOONG64OR(v)
   305  	case OpLOONG64ORconst:
   306  		return rewriteValueLOONG64_OpLOONG64ORconst(v)
   307  	case OpLOONG64REMV:
   308  		return rewriteValueLOONG64_OpLOONG64REMV(v)
   309  	case OpLOONG64REMVU:
   310  		return rewriteValueLOONG64_OpLOONG64REMVU(v)
   311  	case OpLOONG64ROTR:
   312  		return rewriteValueLOONG64_OpLOONG64ROTR(v)
   313  	case OpLOONG64ROTRV:
   314  		return rewriteValueLOONG64_OpLOONG64ROTRV(v)
   315  	case OpLOONG64SGT:
   316  		return rewriteValueLOONG64_OpLOONG64SGT(v)
   317  	case OpLOONG64SGTU:
   318  		return rewriteValueLOONG64_OpLOONG64SGTU(v)
   319  	case OpLOONG64SGTUconst:
   320  		return rewriteValueLOONG64_OpLOONG64SGTUconst(v)
   321  	case OpLOONG64SGTconst:
   322  		return rewriteValueLOONG64_OpLOONG64SGTconst(v)
   323  	case OpLOONG64SLLV:
   324  		return rewriteValueLOONG64_OpLOONG64SLLV(v)
   325  	case OpLOONG64SLLVconst:
   326  		return rewriteValueLOONG64_OpLOONG64SLLVconst(v)
   327  	case OpLOONG64SRAV:
   328  		return rewriteValueLOONG64_OpLOONG64SRAV(v)
   329  	case OpLOONG64SRAVconst:
   330  		return rewriteValueLOONG64_OpLOONG64SRAVconst(v)
   331  	case OpLOONG64SRLV:
   332  		return rewriteValueLOONG64_OpLOONG64SRLV(v)
   333  	case OpLOONG64SRLVconst:
   334  		return rewriteValueLOONG64_OpLOONG64SRLVconst(v)
   335  	case OpLOONG64SUBV:
   336  		return rewriteValueLOONG64_OpLOONG64SUBV(v)
   337  	case OpLOONG64SUBVconst:
   338  		return rewriteValueLOONG64_OpLOONG64SUBVconst(v)
   339  	case OpLOONG64XOR:
   340  		return rewriteValueLOONG64_OpLOONG64XOR(v)
   341  	case OpLOONG64XORconst:
   342  		return rewriteValueLOONG64_OpLOONG64XORconst(v)
   343  	case OpLeq16:
   344  		return rewriteValueLOONG64_OpLeq16(v)
   345  	case OpLeq16U:
   346  		return rewriteValueLOONG64_OpLeq16U(v)
   347  	case OpLeq32:
   348  		return rewriteValueLOONG64_OpLeq32(v)
   349  	case OpLeq32F:
   350  		return rewriteValueLOONG64_OpLeq32F(v)
   351  	case OpLeq32U:
   352  		return rewriteValueLOONG64_OpLeq32U(v)
   353  	case OpLeq64:
   354  		return rewriteValueLOONG64_OpLeq64(v)
   355  	case OpLeq64F:
   356  		return rewriteValueLOONG64_OpLeq64F(v)
   357  	case OpLeq64U:
   358  		return rewriteValueLOONG64_OpLeq64U(v)
   359  	case OpLeq8:
   360  		return rewriteValueLOONG64_OpLeq8(v)
   361  	case OpLeq8U:
   362  		return rewriteValueLOONG64_OpLeq8U(v)
   363  	case OpLess16:
   364  		return rewriteValueLOONG64_OpLess16(v)
   365  	case OpLess16U:
   366  		return rewriteValueLOONG64_OpLess16U(v)
   367  	case OpLess32:
   368  		return rewriteValueLOONG64_OpLess32(v)
   369  	case OpLess32F:
   370  		return rewriteValueLOONG64_OpLess32F(v)
   371  	case OpLess32U:
   372  		return rewriteValueLOONG64_OpLess32U(v)
   373  	case OpLess64:
   374  		return rewriteValueLOONG64_OpLess64(v)
   375  	case OpLess64F:
   376  		return rewriteValueLOONG64_OpLess64F(v)
   377  	case OpLess64U:
   378  		return rewriteValueLOONG64_OpLess64U(v)
   379  	case OpLess8:
   380  		return rewriteValueLOONG64_OpLess8(v)
   381  	case OpLess8U:
   382  		return rewriteValueLOONG64_OpLess8U(v)
   383  	case OpLoad:
   384  		return rewriteValueLOONG64_OpLoad(v)
   385  	case OpLocalAddr:
   386  		return rewriteValueLOONG64_OpLocalAddr(v)
   387  	case OpLsh16x16:
   388  		return rewriteValueLOONG64_OpLsh16x16(v)
   389  	case OpLsh16x32:
   390  		return rewriteValueLOONG64_OpLsh16x32(v)
   391  	case OpLsh16x64:
   392  		return rewriteValueLOONG64_OpLsh16x64(v)
   393  	case OpLsh16x8:
   394  		return rewriteValueLOONG64_OpLsh16x8(v)
   395  	case OpLsh32x16:
   396  		return rewriteValueLOONG64_OpLsh32x16(v)
   397  	case OpLsh32x32:
   398  		return rewriteValueLOONG64_OpLsh32x32(v)
   399  	case OpLsh32x64:
   400  		return rewriteValueLOONG64_OpLsh32x64(v)
   401  	case OpLsh32x8:
   402  		return rewriteValueLOONG64_OpLsh32x8(v)
   403  	case OpLsh64x16:
   404  		return rewriteValueLOONG64_OpLsh64x16(v)
   405  	case OpLsh64x32:
   406  		return rewriteValueLOONG64_OpLsh64x32(v)
   407  	case OpLsh64x64:
   408  		return rewriteValueLOONG64_OpLsh64x64(v)
   409  	case OpLsh64x8:
   410  		return rewriteValueLOONG64_OpLsh64x8(v)
   411  	case OpLsh8x16:
   412  		return rewriteValueLOONG64_OpLsh8x16(v)
   413  	case OpLsh8x32:
   414  		return rewriteValueLOONG64_OpLsh8x32(v)
   415  	case OpLsh8x64:
   416  		return rewriteValueLOONG64_OpLsh8x64(v)
   417  	case OpLsh8x8:
   418  		return rewriteValueLOONG64_OpLsh8x8(v)
   419  	case OpMod16:
   420  		return rewriteValueLOONG64_OpMod16(v)
   421  	case OpMod16u:
   422  		return rewriteValueLOONG64_OpMod16u(v)
   423  	case OpMod32:
   424  		return rewriteValueLOONG64_OpMod32(v)
   425  	case OpMod32u:
   426  		return rewriteValueLOONG64_OpMod32u(v)
   427  	case OpMod64:
   428  		return rewriteValueLOONG64_OpMod64(v)
   429  	case OpMod64u:
   430  		v.Op = OpLOONG64REMVU
   431  		return true
   432  	case OpMod8:
   433  		return rewriteValueLOONG64_OpMod8(v)
   434  	case OpMod8u:
   435  		return rewriteValueLOONG64_OpMod8u(v)
   436  	case OpMove:
   437  		return rewriteValueLOONG64_OpMove(v)
   438  	case OpMul16:
   439  		v.Op = OpLOONG64MULV
   440  		return true
   441  	case OpMul32:
   442  		v.Op = OpLOONG64MULV
   443  		return true
   444  	case OpMul32F:
   445  		v.Op = OpLOONG64MULF
   446  		return true
   447  	case OpMul64:
   448  		v.Op = OpLOONG64MULV
   449  		return true
   450  	case OpMul64F:
   451  		v.Op = OpLOONG64MULD
   452  		return true
   453  	case OpMul8:
   454  		v.Op = OpLOONG64MULV
   455  		return true
   456  	case OpNeg16:
   457  		v.Op = OpLOONG64NEGV
   458  		return true
   459  	case OpNeg32:
   460  		v.Op = OpLOONG64NEGV
   461  		return true
   462  	case OpNeg32F:
   463  		v.Op = OpLOONG64NEGF
   464  		return true
   465  	case OpNeg64:
   466  		v.Op = OpLOONG64NEGV
   467  		return true
   468  	case OpNeg64F:
   469  		v.Op = OpLOONG64NEGD
   470  		return true
   471  	case OpNeg8:
   472  		v.Op = OpLOONG64NEGV
   473  		return true
   474  	case OpNeq16:
   475  		return rewriteValueLOONG64_OpNeq16(v)
   476  	case OpNeq32:
   477  		return rewriteValueLOONG64_OpNeq32(v)
   478  	case OpNeq32F:
   479  		return rewriteValueLOONG64_OpNeq32F(v)
   480  	case OpNeq64:
   481  		return rewriteValueLOONG64_OpNeq64(v)
   482  	case OpNeq64F:
   483  		return rewriteValueLOONG64_OpNeq64F(v)
   484  	case OpNeq8:
   485  		return rewriteValueLOONG64_OpNeq8(v)
   486  	case OpNeqB:
   487  		v.Op = OpLOONG64XOR
   488  		return true
   489  	case OpNeqPtr:
   490  		return rewriteValueLOONG64_OpNeqPtr(v)
   491  	case OpNilCheck:
   492  		v.Op = OpLOONG64LoweredNilCheck
   493  		return true
   494  	case OpNot:
   495  		return rewriteValueLOONG64_OpNot(v)
   496  	case OpOffPtr:
   497  		return rewriteValueLOONG64_OpOffPtr(v)
   498  	case OpOr16:
   499  		v.Op = OpLOONG64OR
   500  		return true
   501  	case OpOr32:
   502  		v.Op = OpLOONG64OR
   503  		return true
   504  	case OpOr64:
   505  		v.Op = OpLOONG64OR
   506  		return true
   507  	case OpOr8:
   508  		v.Op = OpLOONG64OR
   509  		return true
   510  	case OpOrB:
   511  		v.Op = OpLOONG64OR
   512  		return true
   513  	case OpPanicBounds:
   514  		return rewriteValueLOONG64_OpPanicBounds(v)
   515  	case OpRotateLeft16:
   516  		return rewriteValueLOONG64_OpRotateLeft16(v)
   517  	case OpRotateLeft32:
   518  		return rewriteValueLOONG64_OpRotateLeft32(v)
   519  	case OpRotateLeft64:
   520  		return rewriteValueLOONG64_OpRotateLeft64(v)
   521  	case OpRotateLeft8:
   522  		return rewriteValueLOONG64_OpRotateLeft8(v)
   523  	case OpRound32F:
   524  		v.Op = OpCopy
   525  		return true
   526  	case OpRound64F:
   527  		v.Op = OpCopy
   528  		return true
   529  	case OpRsh16Ux16:
   530  		return rewriteValueLOONG64_OpRsh16Ux16(v)
   531  	case OpRsh16Ux32:
   532  		return rewriteValueLOONG64_OpRsh16Ux32(v)
   533  	case OpRsh16Ux64:
   534  		return rewriteValueLOONG64_OpRsh16Ux64(v)
   535  	case OpRsh16Ux8:
   536  		return rewriteValueLOONG64_OpRsh16Ux8(v)
   537  	case OpRsh16x16:
   538  		return rewriteValueLOONG64_OpRsh16x16(v)
   539  	case OpRsh16x32:
   540  		return rewriteValueLOONG64_OpRsh16x32(v)
   541  	case OpRsh16x64:
   542  		return rewriteValueLOONG64_OpRsh16x64(v)
   543  	case OpRsh16x8:
   544  		return rewriteValueLOONG64_OpRsh16x8(v)
   545  	case OpRsh32Ux16:
   546  		return rewriteValueLOONG64_OpRsh32Ux16(v)
   547  	case OpRsh32Ux32:
   548  		return rewriteValueLOONG64_OpRsh32Ux32(v)
   549  	case OpRsh32Ux64:
   550  		return rewriteValueLOONG64_OpRsh32Ux64(v)
   551  	case OpRsh32Ux8:
   552  		return rewriteValueLOONG64_OpRsh32Ux8(v)
   553  	case OpRsh32x16:
   554  		return rewriteValueLOONG64_OpRsh32x16(v)
   555  	case OpRsh32x32:
   556  		return rewriteValueLOONG64_OpRsh32x32(v)
   557  	case OpRsh32x64:
   558  		return rewriteValueLOONG64_OpRsh32x64(v)
   559  	case OpRsh32x8:
   560  		return rewriteValueLOONG64_OpRsh32x8(v)
   561  	case OpRsh64Ux16:
   562  		return rewriteValueLOONG64_OpRsh64Ux16(v)
   563  	case OpRsh64Ux32:
   564  		return rewriteValueLOONG64_OpRsh64Ux32(v)
   565  	case OpRsh64Ux64:
   566  		return rewriteValueLOONG64_OpRsh64Ux64(v)
   567  	case OpRsh64Ux8:
   568  		return rewriteValueLOONG64_OpRsh64Ux8(v)
   569  	case OpRsh64x16:
   570  		return rewriteValueLOONG64_OpRsh64x16(v)
   571  	case OpRsh64x32:
   572  		return rewriteValueLOONG64_OpRsh64x32(v)
   573  	case OpRsh64x64:
   574  		return rewriteValueLOONG64_OpRsh64x64(v)
   575  	case OpRsh64x8:
   576  		return rewriteValueLOONG64_OpRsh64x8(v)
   577  	case OpRsh8Ux16:
   578  		return rewriteValueLOONG64_OpRsh8Ux16(v)
   579  	case OpRsh8Ux32:
   580  		return rewriteValueLOONG64_OpRsh8Ux32(v)
   581  	case OpRsh8Ux64:
   582  		return rewriteValueLOONG64_OpRsh8Ux64(v)
   583  	case OpRsh8Ux8:
   584  		return rewriteValueLOONG64_OpRsh8Ux8(v)
   585  	case OpRsh8x16:
   586  		return rewriteValueLOONG64_OpRsh8x16(v)
   587  	case OpRsh8x32:
   588  		return rewriteValueLOONG64_OpRsh8x32(v)
   589  	case OpRsh8x64:
   590  		return rewriteValueLOONG64_OpRsh8x64(v)
   591  	case OpRsh8x8:
   592  		return rewriteValueLOONG64_OpRsh8x8(v)
   593  	case OpSelect0:
   594  		return rewriteValueLOONG64_OpSelect0(v)
   595  	case OpSelect1:
   596  		return rewriteValueLOONG64_OpSelect1(v)
   597  	case OpSignExt16to32:
   598  		v.Op = OpLOONG64MOVHreg
   599  		return true
   600  	case OpSignExt16to64:
   601  		v.Op = OpLOONG64MOVHreg
   602  		return true
   603  	case OpSignExt32to64:
   604  		v.Op = OpLOONG64MOVWreg
   605  		return true
   606  	case OpSignExt8to16:
   607  		v.Op = OpLOONG64MOVBreg
   608  		return true
   609  	case OpSignExt8to32:
   610  		v.Op = OpLOONG64MOVBreg
   611  		return true
   612  	case OpSignExt8to64:
   613  		v.Op = OpLOONG64MOVBreg
   614  		return true
   615  	case OpSlicemask:
   616  		return rewriteValueLOONG64_OpSlicemask(v)
   617  	case OpSqrt:
   618  		v.Op = OpLOONG64SQRTD
   619  		return true
   620  	case OpSqrt32:
   621  		v.Op = OpLOONG64SQRTF
   622  		return true
   623  	case OpStaticCall:
   624  		v.Op = OpLOONG64CALLstatic
   625  		return true
   626  	case OpStore:
   627  		return rewriteValueLOONG64_OpStore(v)
   628  	case OpSub16:
   629  		v.Op = OpLOONG64SUBV
   630  		return true
   631  	case OpSub32:
   632  		v.Op = OpLOONG64SUBV
   633  		return true
   634  	case OpSub32F:
   635  		v.Op = OpLOONG64SUBF
   636  		return true
   637  	case OpSub64:
   638  		v.Op = OpLOONG64SUBV
   639  		return true
   640  	case OpSub64F:
   641  		v.Op = OpLOONG64SUBD
   642  		return true
   643  	case OpSub8:
   644  		v.Op = OpLOONG64SUBV
   645  		return true
   646  	case OpSubPtr:
   647  		v.Op = OpLOONG64SUBV
   648  		return true
   649  	case OpTailCall:
   650  		v.Op = OpLOONG64CALLtail
   651  		return true
   652  	case OpTrunc16to8:
   653  		v.Op = OpCopy
   654  		return true
   655  	case OpTrunc32to16:
   656  		v.Op = OpCopy
   657  		return true
   658  	case OpTrunc32to8:
   659  		v.Op = OpCopy
   660  		return true
   661  	case OpTrunc64to16:
   662  		v.Op = OpCopy
   663  		return true
   664  	case OpTrunc64to32:
   665  		v.Op = OpCopy
   666  		return true
   667  	case OpTrunc64to8:
   668  		v.Op = OpCopy
   669  		return true
   670  	case OpWB:
   671  		v.Op = OpLOONG64LoweredWB
   672  		return true
   673  	case OpXor16:
   674  		v.Op = OpLOONG64XOR
   675  		return true
   676  	case OpXor32:
   677  		v.Op = OpLOONG64XOR
   678  		return true
   679  	case OpXor64:
   680  		v.Op = OpLOONG64XOR
   681  		return true
   682  	case OpXor8:
   683  		v.Op = OpLOONG64XOR
   684  		return true
   685  	case OpZero:
   686  		return rewriteValueLOONG64_OpZero(v)
   687  	case OpZeroExt16to32:
   688  		v.Op = OpLOONG64MOVHUreg
   689  		return true
   690  	case OpZeroExt16to64:
   691  		v.Op = OpLOONG64MOVHUreg
   692  		return true
   693  	case OpZeroExt32to64:
   694  		v.Op = OpLOONG64MOVWUreg
   695  		return true
   696  	case OpZeroExt8to16:
   697  		v.Op = OpLOONG64MOVBUreg
   698  		return true
   699  	case OpZeroExt8to32:
   700  		v.Op = OpLOONG64MOVBUreg
   701  		return true
   702  	case OpZeroExt8to64:
   703  		v.Op = OpLOONG64MOVBUreg
   704  		return true
   705  	}
   706  	return false
   707  }
   708  func rewriteValueLOONG64_OpAddr(v *Value) bool {
   709  	v_0 := v.Args[0]
   710  	// match: (Addr {sym} base)
   711  	// result: (MOVVaddr {sym} base)
   712  	for {
   713  		sym := auxToSym(v.Aux)
   714  		base := v_0
   715  		v.reset(OpLOONG64MOVVaddr)
   716  		v.Aux = symToAux(sym)
   717  		v.AddArg(base)
   718  		return true
   719  	}
   720  }
   721  func rewriteValueLOONG64_OpAtomicCompareAndSwap32(v *Value) bool {
   722  	v_3 := v.Args[3]
   723  	v_2 := v.Args[2]
   724  	v_1 := v.Args[1]
   725  	v_0 := v.Args[0]
   726  	b := v.Block
   727  	typ := &b.Func.Config.Types
   728  	// match: (AtomicCompareAndSwap32 ptr old new mem)
   729  	// result: (LoweredAtomicCas32 ptr (SignExt32to64 old) new mem)
   730  	for {
   731  		ptr := v_0
   732  		old := v_1
   733  		new := v_2
   734  		mem := v_3
   735  		v.reset(OpLOONG64LoweredAtomicCas32)
   736  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
   737  		v0.AddArg(old)
   738  		v.AddArg4(ptr, v0, new, mem)
   739  		return true
   740  	}
   741  }
   742  func rewriteValueLOONG64_OpAvg64u(v *Value) bool {
   743  	v_1 := v.Args[1]
   744  	v_0 := v.Args[0]
   745  	b := v.Block
   746  	// match: (Avg64u <t> x y)
   747  	// result: (ADDV (SRLVconst <t> (SUBV <t> x y) [1]) y)
   748  	for {
   749  		t := v.Type
   750  		x := v_0
   751  		y := v_1
   752  		v.reset(OpLOONG64ADDV)
   753  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLVconst, t)
   754  		v0.AuxInt = int64ToAuxInt(1)
   755  		v1 := b.NewValue0(v.Pos, OpLOONG64SUBV, t)
   756  		v1.AddArg2(x, y)
   757  		v0.AddArg(v1)
   758  		v.AddArg2(v0, y)
   759  		return true
   760  	}
   761  }
   762  func rewriteValueLOONG64_OpCom16(v *Value) bool {
   763  	v_0 := v.Args[0]
   764  	b := v.Block
   765  	typ := &b.Func.Config.Types
   766  	// match: (Com16 x)
   767  	// result: (NOR (MOVVconst [0]) x)
   768  	for {
   769  		x := v_0
   770  		v.reset(OpLOONG64NOR)
   771  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
   772  		v0.AuxInt = int64ToAuxInt(0)
   773  		v.AddArg2(v0, x)
   774  		return true
   775  	}
   776  }
   777  func rewriteValueLOONG64_OpCom32(v *Value) bool {
   778  	v_0 := v.Args[0]
   779  	b := v.Block
   780  	typ := &b.Func.Config.Types
   781  	// match: (Com32 x)
   782  	// result: (NOR (MOVVconst [0]) x)
   783  	for {
   784  		x := v_0
   785  		v.reset(OpLOONG64NOR)
   786  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
   787  		v0.AuxInt = int64ToAuxInt(0)
   788  		v.AddArg2(v0, x)
   789  		return true
   790  	}
   791  }
   792  func rewriteValueLOONG64_OpCom64(v *Value) bool {
   793  	v_0 := v.Args[0]
   794  	b := v.Block
   795  	typ := &b.Func.Config.Types
   796  	// match: (Com64 x)
   797  	// result: (NOR (MOVVconst [0]) x)
   798  	for {
   799  		x := v_0
   800  		v.reset(OpLOONG64NOR)
   801  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
   802  		v0.AuxInt = int64ToAuxInt(0)
   803  		v.AddArg2(v0, x)
   804  		return true
   805  	}
   806  }
   807  func rewriteValueLOONG64_OpCom8(v *Value) bool {
   808  	v_0 := v.Args[0]
   809  	b := v.Block
   810  	typ := &b.Func.Config.Types
   811  	// match: (Com8 x)
   812  	// result: (NOR (MOVVconst [0]) x)
   813  	for {
   814  		x := v_0
   815  		v.reset(OpLOONG64NOR)
   816  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
   817  		v0.AuxInt = int64ToAuxInt(0)
   818  		v.AddArg2(v0, x)
   819  		return true
   820  	}
   821  }
   822  func rewriteValueLOONG64_OpCondSelect(v *Value) bool {
   823  	v_2 := v.Args[2]
   824  	v_1 := v.Args[1]
   825  	v_0 := v.Args[0]
   826  	b := v.Block
   827  	// match: (CondSelect <t> x y cond)
   828  	// result: (OR (MASKEQZ <t> x cond) (MASKNEZ <t> y cond))
   829  	for {
   830  		t := v.Type
   831  		x := v_0
   832  		y := v_1
   833  		cond := v_2
   834  		v.reset(OpLOONG64OR)
   835  		v0 := b.NewValue0(v.Pos, OpLOONG64MASKEQZ, t)
   836  		v0.AddArg2(x, cond)
   837  		v1 := b.NewValue0(v.Pos, OpLOONG64MASKNEZ, t)
   838  		v1.AddArg2(y, cond)
   839  		v.AddArg2(v0, v1)
   840  		return true
   841  	}
   842  }
   843  func rewriteValueLOONG64_OpConst16(v *Value) bool {
   844  	// match: (Const16 [val])
   845  	// result: (MOVVconst [int64(val)])
   846  	for {
   847  		val := auxIntToInt16(v.AuxInt)
   848  		v.reset(OpLOONG64MOVVconst)
   849  		v.AuxInt = int64ToAuxInt(int64(val))
   850  		return true
   851  	}
   852  }
   853  func rewriteValueLOONG64_OpConst32(v *Value) bool {
   854  	// match: (Const32 [val])
   855  	// result: (MOVVconst [int64(val)])
   856  	for {
   857  		val := auxIntToInt32(v.AuxInt)
   858  		v.reset(OpLOONG64MOVVconst)
   859  		v.AuxInt = int64ToAuxInt(int64(val))
   860  		return true
   861  	}
   862  }
   863  func rewriteValueLOONG64_OpConst32F(v *Value) bool {
   864  	// match: (Const32F [val])
   865  	// result: (MOVFconst [float64(val)])
   866  	for {
   867  		val := auxIntToFloat32(v.AuxInt)
   868  		v.reset(OpLOONG64MOVFconst)
   869  		v.AuxInt = float64ToAuxInt(float64(val))
   870  		return true
   871  	}
   872  }
   873  func rewriteValueLOONG64_OpConst64(v *Value) bool {
   874  	// match: (Const64 [val])
   875  	// result: (MOVVconst [int64(val)])
   876  	for {
   877  		val := auxIntToInt64(v.AuxInt)
   878  		v.reset(OpLOONG64MOVVconst)
   879  		v.AuxInt = int64ToAuxInt(int64(val))
   880  		return true
   881  	}
   882  }
   883  func rewriteValueLOONG64_OpConst64F(v *Value) bool {
   884  	// match: (Const64F [val])
   885  	// result: (MOVDconst [float64(val)])
   886  	for {
   887  		val := auxIntToFloat64(v.AuxInt)
   888  		v.reset(OpLOONG64MOVDconst)
   889  		v.AuxInt = float64ToAuxInt(float64(val))
   890  		return true
   891  	}
   892  }
   893  func rewriteValueLOONG64_OpConst8(v *Value) bool {
   894  	// match: (Const8 [val])
   895  	// result: (MOVVconst [int64(val)])
   896  	for {
   897  		val := auxIntToInt8(v.AuxInt)
   898  		v.reset(OpLOONG64MOVVconst)
   899  		v.AuxInt = int64ToAuxInt(int64(val))
   900  		return true
   901  	}
   902  }
   903  func rewriteValueLOONG64_OpConstBool(v *Value) bool {
   904  	// match: (ConstBool [t])
   905  	// result: (MOVVconst [int64(b2i(t))])
   906  	for {
   907  		t := auxIntToBool(v.AuxInt)
   908  		v.reset(OpLOONG64MOVVconst)
   909  		v.AuxInt = int64ToAuxInt(int64(b2i(t)))
   910  		return true
   911  	}
   912  }
   913  func rewriteValueLOONG64_OpConstNil(v *Value) bool {
   914  	// match: (ConstNil)
   915  	// result: (MOVVconst [0])
   916  	for {
   917  		v.reset(OpLOONG64MOVVconst)
   918  		v.AuxInt = int64ToAuxInt(0)
   919  		return true
   920  	}
   921  }
   922  func rewriteValueLOONG64_OpDiv16(v *Value) bool {
   923  	v_1 := v.Args[1]
   924  	v_0 := v.Args[0]
   925  	b := v.Block
   926  	typ := &b.Func.Config.Types
   927  	// match: (Div16 x y)
   928  	// result: (DIVV (SignExt16to64 x) (SignExt16to64 y))
   929  	for {
   930  		x := v_0
   931  		y := v_1
   932  		v.reset(OpLOONG64DIVV)
   933  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
   934  		v0.AddArg(x)
   935  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
   936  		v1.AddArg(y)
   937  		v.AddArg2(v0, v1)
   938  		return true
   939  	}
   940  }
   941  func rewriteValueLOONG64_OpDiv16u(v *Value) bool {
   942  	v_1 := v.Args[1]
   943  	v_0 := v.Args[0]
   944  	b := v.Block
   945  	typ := &b.Func.Config.Types
   946  	// match: (Div16u x y)
   947  	// result: (DIVVU (ZeroExt16to64 x) (ZeroExt16to64 y))
   948  	for {
   949  		x := v_0
   950  		y := v_1
   951  		v.reset(OpLOONG64DIVVU)
   952  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
   953  		v0.AddArg(x)
   954  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
   955  		v1.AddArg(y)
   956  		v.AddArg2(v0, v1)
   957  		return true
   958  	}
   959  }
   960  func rewriteValueLOONG64_OpDiv32(v *Value) bool {
   961  	v_1 := v.Args[1]
   962  	v_0 := v.Args[0]
   963  	b := v.Block
   964  	typ := &b.Func.Config.Types
   965  	// match: (Div32 x y)
   966  	// result: (DIVV (SignExt32to64 x) (SignExt32to64 y))
   967  	for {
   968  		x := v_0
   969  		y := v_1
   970  		v.reset(OpLOONG64DIVV)
   971  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
   972  		v0.AddArg(x)
   973  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
   974  		v1.AddArg(y)
   975  		v.AddArg2(v0, v1)
   976  		return true
   977  	}
   978  }
   979  func rewriteValueLOONG64_OpDiv32u(v *Value) bool {
   980  	v_1 := v.Args[1]
   981  	v_0 := v.Args[0]
   982  	b := v.Block
   983  	typ := &b.Func.Config.Types
   984  	// match: (Div32u x y)
   985  	// result: (DIVVU (ZeroExt32to64 x) (ZeroExt32to64 y))
   986  	for {
   987  		x := v_0
   988  		y := v_1
   989  		v.reset(OpLOONG64DIVVU)
   990  		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
   991  		v0.AddArg(x)
   992  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
   993  		v1.AddArg(y)
   994  		v.AddArg2(v0, v1)
   995  		return true
   996  	}
   997  }
   998  func rewriteValueLOONG64_OpDiv64(v *Value) bool {
   999  	v_1 := v.Args[1]
  1000  	v_0 := v.Args[0]
  1001  	// match: (Div64 x y)
  1002  	// result: (DIVV x y)
  1003  	for {
  1004  		x := v_0
  1005  		y := v_1
  1006  		v.reset(OpLOONG64DIVV)
  1007  		v.AddArg2(x, y)
  1008  		return true
  1009  	}
  1010  }
  1011  func rewriteValueLOONG64_OpDiv8(v *Value) bool {
  1012  	v_1 := v.Args[1]
  1013  	v_0 := v.Args[0]
  1014  	b := v.Block
  1015  	typ := &b.Func.Config.Types
  1016  	// match: (Div8 x y)
  1017  	// result: (DIVV (SignExt8to64 x) (SignExt8to64 y))
  1018  	for {
  1019  		x := v_0
  1020  		y := v_1
  1021  		v.reset(OpLOONG64DIVV)
  1022  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1023  		v0.AddArg(x)
  1024  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1025  		v1.AddArg(y)
  1026  		v.AddArg2(v0, v1)
  1027  		return true
  1028  	}
  1029  }
  1030  func rewriteValueLOONG64_OpDiv8u(v *Value) bool {
  1031  	v_1 := v.Args[1]
  1032  	v_0 := v.Args[0]
  1033  	b := v.Block
  1034  	typ := &b.Func.Config.Types
  1035  	// match: (Div8u x y)
  1036  	// result: (DIVVU (ZeroExt8to64 x) (ZeroExt8to64 y))
  1037  	for {
  1038  		x := v_0
  1039  		y := v_1
  1040  		v.reset(OpLOONG64DIVVU)
  1041  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1042  		v0.AddArg(x)
  1043  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1044  		v1.AddArg(y)
  1045  		v.AddArg2(v0, v1)
  1046  		return true
  1047  	}
  1048  }
  1049  func rewriteValueLOONG64_OpEq16(v *Value) bool {
  1050  	v_1 := v.Args[1]
  1051  	v_0 := v.Args[0]
  1052  	b := v.Block
  1053  	typ := &b.Func.Config.Types
  1054  	// match: (Eq16 x y)
  1055  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt16to64 x) (ZeroExt16to64 y)))
  1056  	for {
  1057  		x := v_0
  1058  		y := v_1
  1059  		v.reset(OpLOONG64SGTU)
  1060  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1061  		v0.AuxInt = int64ToAuxInt(1)
  1062  		v1 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  1063  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1064  		v2.AddArg(x)
  1065  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1066  		v3.AddArg(y)
  1067  		v1.AddArg2(v2, v3)
  1068  		v.AddArg2(v0, v1)
  1069  		return true
  1070  	}
  1071  }
  1072  func rewriteValueLOONG64_OpEq32(v *Value) bool {
  1073  	v_1 := v.Args[1]
  1074  	v_0 := v.Args[0]
  1075  	b := v.Block
  1076  	typ := &b.Func.Config.Types
  1077  	// match: (Eq32 x y)
  1078  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt32to64 x) (ZeroExt32to64 y)))
  1079  	for {
  1080  		x := v_0
  1081  		y := v_1
  1082  		v.reset(OpLOONG64SGTU)
  1083  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1084  		v0.AuxInt = int64ToAuxInt(1)
  1085  		v1 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  1086  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1087  		v2.AddArg(x)
  1088  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1089  		v3.AddArg(y)
  1090  		v1.AddArg2(v2, v3)
  1091  		v.AddArg2(v0, v1)
  1092  		return true
  1093  	}
  1094  }
  1095  func rewriteValueLOONG64_OpEq32F(v *Value) bool {
  1096  	v_1 := v.Args[1]
  1097  	v_0 := v.Args[0]
  1098  	b := v.Block
  1099  	// match: (Eq32F x y)
  1100  	// result: (FPFlagTrue (CMPEQF x y))
  1101  	for {
  1102  		x := v_0
  1103  		y := v_1
  1104  		v.reset(OpLOONG64FPFlagTrue)
  1105  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPEQF, types.TypeFlags)
  1106  		v0.AddArg2(x, y)
  1107  		v.AddArg(v0)
  1108  		return true
  1109  	}
  1110  }
  1111  func rewriteValueLOONG64_OpEq64(v *Value) bool {
  1112  	v_1 := v.Args[1]
  1113  	v_0 := v.Args[0]
  1114  	b := v.Block
  1115  	typ := &b.Func.Config.Types
  1116  	// match: (Eq64 x y)
  1117  	// result: (SGTU (MOVVconst [1]) (XOR x y))
  1118  	for {
  1119  		x := v_0
  1120  		y := v_1
  1121  		v.reset(OpLOONG64SGTU)
  1122  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1123  		v0.AuxInt = int64ToAuxInt(1)
  1124  		v1 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  1125  		v1.AddArg2(x, y)
  1126  		v.AddArg2(v0, v1)
  1127  		return true
  1128  	}
  1129  }
  1130  func rewriteValueLOONG64_OpEq64F(v *Value) bool {
  1131  	v_1 := v.Args[1]
  1132  	v_0 := v.Args[0]
  1133  	b := v.Block
  1134  	// match: (Eq64F x y)
  1135  	// result: (FPFlagTrue (CMPEQD x y))
  1136  	for {
  1137  		x := v_0
  1138  		y := v_1
  1139  		v.reset(OpLOONG64FPFlagTrue)
  1140  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPEQD, types.TypeFlags)
  1141  		v0.AddArg2(x, y)
  1142  		v.AddArg(v0)
  1143  		return true
  1144  	}
  1145  }
  1146  func rewriteValueLOONG64_OpEq8(v *Value) bool {
  1147  	v_1 := v.Args[1]
  1148  	v_0 := v.Args[0]
  1149  	b := v.Block
  1150  	typ := &b.Func.Config.Types
  1151  	// match: (Eq8 x y)
  1152  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt8to64 x) (ZeroExt8to64 y)))
  1153  	for {
  1154  		x := v_0
  1155  		y := v_1
  1156  		v.reset(OpLOONG64SGTU)
  1157  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1158  		v0.AuxInt = int64ToAuxInt(1)
  1159  		v1 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  1160  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1161  		v2.AddArg(x)
  1162  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1163  		v3.AddArg(y)
  1164  		v1.AddArg2(v2, v3)
  1165  		v.AddArg2(v0, v1)
  1166  		return true
  1167  	}
  1168  }
  1169  func rewriteValueLOONG64_OpEqB(v *Value) bool {
  1170  	v_1 := v.Args[1]
  1171  	v_0 := v.Args[0]
  1172  	b := v.Block
  1173  	typ := &b.Func.Config.Types
  1174  	// match: (EqB x y)
  1175  	// result: (XOR (MOVVconst [1]) (XOR <typ.Bool> x y))
  1176  	for {
  1177  		x := v_0
  1178  		y := v_1
  1179  		v.reset(OpLOONG64XOR)
  1180  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1181  		v0.AuxInt = int64ToAuxInt(1)
  1182  		v1 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.Bool)
  1183  		v1.AddArg2(x, y)
  1184  		v.AddArg2(v0, v1)
  1185  		return true
  1186  	}
  1187  }
  1188  func rewriteValueLOONG64_OpEqPtr(v *Value) bool {
  1189  	v_1 := v.Args[1]
  1190  	v_0 := v.Args[0]
  1191  	b := v.Block
  1192  	typ := &b.Func.Config.Types
  1193  	// match: (EqPtr x y)
  1194  	// result: (SGTU (MOVVconst [1]) (XOR x y))
  1195  	for {
  1196  		x := v_0
  1197  		y := v_1
  1198  		v.reset(OpLOONG64SGTU)
  1199  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1200  		v0.AuxInt = int64ToAuxInt(1)
  1201  		v1 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  1202  		v1.AddArg2(x, y)
  1203  		v.AddArg2(v0, v1)
  1204  		return true
  1205  	}
  1206  }
  1207  func rewriteValueLOONG64_OpHmul32(v *Value) bool {
  1208  	v_1 := v.Args[1]
  1209  	v_0 := v.Args[0]
  1210  	b := v.Block
  1211  	typ := &b.Func.Config.Types
  1212  	// match: (Hmul32 x y)
  1213  	// result: (SRAVconst (MULV (SignExt32to64 x) (SignExt32to64 y)) [32])
  1214  	for {
  1215  		x := v_0
  1216  		y := v_1
  1217  		v.reset(OpLOONG64SRAVconst)
  1218  		v.AuxInt = int64ToAuxInt(32)
  1219  		v0 := b.NewValue0(v.Pos, OpLOONG64MULV, typ.Int64)
  1220  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1221  		v1.AddArg(x)
  1222  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1223  		v2.AddArg(y)
  1224  		v0.AddArg2(v1, v2)
  1225  		v.AddArg(v0)
  1226  		return true
  1227  	}
  1228  }
  1229  func rewriteValueLOONG64_OpHmul32u(v *Value) bool {
  1230  	v_1 := v.Args[1]
  1231  	v_0 := v.Args[0]
  1232  	b := v.Block
  1233  	typ := &b.Func.Config.Types
  1234  	// match: (Hmul32u x y)
  1235  	// result: (SRLVconst (MULV (ZeroExt32to64 x) (ZeroExt32to64 y)) [32])
  1236  	for {
  1237  		x := v_0
  1238  		y := v_1
  1239  		v.reset(OpLOONG64SRLVconst)
  1240  		v.AuxInt = int64ToAuxInt(32)
  1241  		v0 := b.NewValue0(v.Pos, OpLOONG64MULV, typ.Int64)
  1242  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1243  		v1.AddArg(x)
  1244  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1245  		v2.AddArg(y)
  1246  		v0.AddArg2(v1, v2)
  1247  		v.AddArg(v0)
  1248  		return true
  1249  	}
  1250  }
  1251  func rewriteValueLOONG64_OpIsInBounds(v *Value) bool {
  1252  	v_1 := v.Args[1]
  1253  	v_0 := v.Args[0]
  1254  	// match: (IsInBounds idx len)
  1255  	// result: (SGTU len idx)
  1256  	for {
  1257  		idx := v_0
  1258  		len := v_1
  1259  		v.reset(OpLOONG64SGTU)
  1260  		v.AddArg2(len, idx)
  1261  		return true
  1262  	}
  1263  }
  1264  func rewriteValueLOONG64_OpIsNonNil(v *Value) bool {
  1265  	v_0 := v.Args[0]
  1266  	b := v.Block
  1267  	typ := &b.Func.Config.Types
  1268  	// match: (IsNonNil ptr)
  1269  	// result: (SGTU ptr (MOVVconst [0]))
  1270  	for {
  1271  		ptr := v_0
  1272  		v.reset(OpLOONG64SGTU)
  1273  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1274  		v0.AuxInt = int64ToAuxInt(0)
  1275  		v.AddArg2(ptr, v0)
  1276  		return true
  1277  	}
  1278  }
  1279  func rewriteValueLOONG64_OpIsSliceInBounds(v *Value) bool {
  1280  	v_1 := v.Args[1]
  1281  	v_0 := v.Args[0]
  1282  	b := v.Block
  1283  	typ := &b.Func.Config.Types
  1284  	// match: (IsSliceInBounds idx len)
  1285  	// result: (XOR (MOVVconst [1]) (SGTU idx len))
  1286  	for {
  1287  		idx := v_0
  1288  		len := v_1
  1289  		v.reset(OpLOONG64XOR)
  1290  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1291  		v0.AuxInt = int64ToAuxInt(1)
  1292  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  1293  		v1.AddArg2(idx, len)
  1294  		v.AddArg2(v0, v1)
  1295  		return true
  1296  	}
  1297  }
  1298  func rewriteValueLOONG64_OpLOONG64ADDV(v *Value) bool {
  1299  	v_1 := v.Args[1]
  1300  	v_0 := v.Args[0]
  1301  	// match: (ADDV x (MOVVconst <t> [c]))
  1302  	// cond: is32Bit(c) && !t.IsPtr()
  1303  	// result: (ADDVconst [c] x)
  1304  	for {
  1305  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1306  			x := v_0
  1307  			if v_1.Op != OpLOONG64MOVVconst {
  1308  				continue
  1309  			}
  1310  			t := v_1.Type
  1311  			c := auxIntToInt64(v_1.AuxInt)
  1312  			if !(is32Bit(c) && !t.IsPtr()) {
  1313  				continue
  1314  			}
  1315  			v.reset(OpLOONG64ADDVconst)
  1316  			v.AuxInt = int64ToAuxInt(c)
  1317  			v.AddArg(x)
  1318  			return true
  1319  		}
  1320  		break
  1321  	}
  1322  	// match: (ADDV x (NEGV y))
  1323  	// result: (SUBV x y)
  1324  	for {
  1325  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1326  			x := v_0
  1327  			if v_1.Op != OpLOONG64NEGV {
  1328  				continue
  1329  			}
  1330  			y := v_1.Args[0]
  1331  			v.reset(OpLOONG64SUBV)
  1332  			v.AddArg2(x, y)
  1333  			return true
  1334  		}
  1335  		break
  1336  	}
  1337  	return false
  1338  }
  1339  func rewriteValueLOONG64_OpLOONG64ADDVconst(v *Value) bool {
  1340  	v_0 := v.Args[0]
  1341  	// match: (ADDVconst [off1] (MOVVaddr [off2] {sym} ptr))
  1342  	// cond: is32Bit(off1+int64(off2))
  1343  	// result: (MOVVaddr [int32(off1)+int32(off2)] {sym} ptr)
  1344  	for {
  1345  		off1 := auxIntToInt64(v.AuxInt)
  1346  		if v_0.Op != OpLOONG64MOVVaddr {
  1347  			break
  1348  		}
  1349  		off2 := auxIntToInt32(v_0.AuxInt)
  1350  		sym := auxToSym(v_0.Aux)
  1351  		ptr := v_0.Args[0]
  1352  		if !(is32Bit(off1 + int64(off2))) {
  1353  			break
  1354  		}
  1355  		v.reset(OpLOONG64MOVVaddr)
  1356  		v.AuxInt = int32ToAuxInt(int32(off1) + int32(off2))
  1357  		v.Aux = symToAux(sym)
  1358  		v.AddArg(ptr)
  1359  		return true
  1360  	}
  1361  	// match: (ADDVconst [0] x)
  1362  	// result: x
  1363  	for {
  1364  		if auxIntToInt64(v.AuxInt) != 0 {
  1365  			break
  1366  		}
  1367  		x := v_0
  1368  		v.copyOf(x)
  1369  		return true
  1370  	}
  1371  	// match: (ADDVconst [c] (MOVVconst [d]))
  1372  	// result: (MOVVconst [c+d])
  1373  	for {
  1374  		c := auxIntToInt64(v.AuxInt)
  1375  		if v_0.Op != OpLOONG64MOVVconst {
  1376  			break
  1377  		}
  1378  		d := auxIntToInt64(v_0.AuxInt)
  1379  		v.reset(OpLOONG64MOVVconst)
  1380  		v.AuxInt = int64ToAuxInt(c + d)
  1381  		return true
  1382  	}
  1383  	// match: (ADDVconst [c] (ADDVconst [d] x))
  1384  	// cond: is32Bit(c+d)
  1385  	// result: (ADDVconst [c+d] x)
  1386  	for {
  1387  		c := auxIntToInt64(v.AuxInt)
  1388  		if v_0.Op != OpLOONG64ADDVconst {
  1389  			break
  1390  		}
  1391  		d := auxIntToInt64(v_0.AuxInt)
  1392  		x := v_0.Args[0]
  1393  		if !(is32Bit(c + d)) {
  1394  			break
  1395  		}
  1396  		v.reset(OpLOONG64ADDVconst)
  1397  		v.AuxInt = int64ToAuxInt(c + d)
  1398  		v.AddArg(x)
  1399  		return true
  1400  	}
  1401  	// match: (ADDVconst [c] (SUBVconst [d] x))
  1402  	// cond: is32Bit(c-d)
  1403  	// result: (ADDVconst [c-d] x)
  1404  	for {
  1405  		c := auxIntToInt64(v.AuxInt)
  1406  		if v_0.Op != OpLOONG64SUBVconst {
  1407  			break
  1408  		}
  1409  		d := auxIntToInt64(v_0.AuxInt)
  1410  		x := v_0.Args[0]
  1411  		if !(is32Bit(c - d)) {
  1412  			break
  1413  		}
  1414  		v.reset(OpLOONG64ADDVconst)
  1415  		v.AuxInt = int64ToAuxInt(c - d)
  1416  		v.AddArg(x)
  1417  		return true
  1418  	}
  1419  	return false
  1420  }
  1421  func rewriteValueLOONG64_OpLOONG64AND(v *Value) bool {
  1422  	v_1 := v.Args[1]
  1423  	v_0 := v.Args[0]
  1424  	// match: (AND x (MOVVconst [c]))
  1425  	// cond: is32Bit(c)
  1426  	// result: (ANDconst [c] x)
  1427  	for {
  1428  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1429  			x := v_0
  1430  			if v_1.Op != OpLOONG64MOVVconst {
  1431  				continue
  1432  			}
  1433  			c := auxIntToInt64(v_1.AuxInt)
  1434  			if !(is32Bit(c)) {
  1435  				continue
  1436  			}
  1437  			v.reset(OpLOONG64ANDconst)
  1438  			v.AuxInt = int64ToAuxInt(c)
  1439  			v.AddArg(x)
  1440  			return true
  1441  		}
  1442  		break
  1443  	}
  1444  	// match: (AND x x)
  1445  	// result: x
  1446  	for {
  1447  		x := v_0
  1448  		if x != v_1 {
  1449  			break
  1450  		}
  1451  		v.copyOf(x)
  1452  		return true
  1453  	}
  1454  	return false
  1455  }
  1456  func rewriteValueLOONG64_OpLOONG64ANDconst(v *Value) bool {
  1457  	v_0 := v.Args[0]
  1458  	// match: (ANDconst [0] _)
  1459  	// result: (MOVVconst [0])
  1460  	for {
  1461  		if auxIntToInt64(v.AuxInt) != 0 {
  1462  			break
  1463  		}
  1464  		v.reset(OpLOONG64MOVVconst)
  1465  		v.AuxInt = int64ToAuxInt(0)
  1466  		return true
  1467  	}
  1468  	// match: (ANDconst [-1] x)
  1469  	// result: x
  1470  	for {
  1471  		if auxIntToInt64(v.AuxInt) != -1 {
  1472  			break
  1473  		}
  1474  		x := v_0
  1475  		v.copyOf(x)
  1476  		return true
  1477  	}
  1478  	// match: (ANDconst [c] (MOVVconst [d]))
  1479  	// result: (MOVVconst [c&d])
  1480  	for {
  1481  		c := auxIntToInt64(v.AuxInt)
  1482  		if v_0.Op != OpLOONG64MOVVconst {
  1483  			break
  1484  		}
  1485  		d := auxIntToInt64(v_0.AuxInt)
  1486  		v.reset(OpLOONG64MOVVconst)
  1487  		v.AuxInt = int64ToAuxInt(c & d)
  1488  		return true
  1489  	}
  1490  	// match: (ANDconst [c] (ANDconst [d] x))
  1491  	// result: (ANDconst [c&d] x)
  1492  	for {
  1493  		c := auxIntToInt64(v.AuxInt)
  1494  		if v_0.Op != OpLOONG64ANDconst {
  1495  			break
  1496  		}
  1497  		d := auxIntToInt64(v_0.AuxInt)
  1498  		x := v_0.Args[0]
  1499  		v.reset(OpLOONG64ANDconst)
  1500  		v.AuxInt = int64ToAuxInt(c & d)
  1501  		v.AddArg(x)
  1502  		return true
  1503  	}
  1504  	return false
  1505  }
  1506  func rewriteValueLOONG64_OpLOONG64DIVV(v *Value) bool {
  1507  	v_1 := v.Args[1]
  1508  	v_0 := v.Args[0]
  1509  	// match: (DIVV (MOVVconst [c]) (MOVVconst [d]))
  1510  	// cond: d != 0
  1511  	// result: (MOVVconst [c/d])
  1512  	for {
  1513  		if v_0.Op != OpLOONG64MOVVconst {
  1514  			break
  1515  		}
  1516  		c := auxIntToInt64(v_0.AuxInt)
  1517  		if v_1.Op != OpLOONG64MOVVconst {
  1518  			break
  1519  		}
  1520  		d := auxIntToInt64(v_1.AuxInt)
  1521  		if !(d != 0) {
  1522  			break
  1523  		}
  1524  		v.reset(OpLOONG64MOVVconst)
  1525  		v.AuxInt = int64ToAuxInt(c / d)
  1526  		return true
  1527  	}
  1528  	return false
  1529  }
  1530  func rewriteValueLOONG64_OpLOONG64DIVVU(v *Value) bool {
  1531  	v_1 := v.Args[1]
  1532  	v_0 := v.Args[0]
  1533  	// match: (DIVVU x (MOVVconst [1]))
  1534  	// result: x
  1535  	for {
  1536  		x := v_0
  1537  		if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != 1 {
  1538  			break
  1539  		}
  1540  		v.copyOf(x)
  1541  		return true
  1542  	}
  1543  	// match: (DIVVU x (MOVVconst [c]))
  1544  	// cond: isPowerOfTwo64(c)
  1545  	// result: (SRLVconst [log64(c)] x)
  1546  	for {
  1547  		x := v_0
  1548  		if v_1.Op != OpLOONG64MOVVconst {
  1549  			break
  1550  		}
  1551  		c := auxIntToInt64(v_1.AuxInt)
  1552  		if !(isPowerOfTwo64(c)) {
  1553  			break
  1554  		}
  1555  		v.reset(OpLOONG64SRLVconst)
  1556  		v.AuxInt = int64ToAuxInt(log64(c))
  1557  		v.AddArg(x)
  1558  		return true
  1559  	}
  1560  	// match: (DIVVU (MOVVconst [c]) (MOVVconst [d]))
  1561  	// cond: d != 0
  1562  	// result: (MOVVconst [int64(uint64(c)/uint64(d))])
  1563  	for {
  1564  		if v_0.Op != OpLOONG64MOVVconst {
  1565  			break
  1566  		}
  1567  		c := auxIntToInt64(v_0.AuxInt)
  1568  		if v_1.Op != OpLOONG64MOVVconst {
  1569  			break
  1570  		}
  1571  		d := auxIntToInt64(v_1.AuxInt)
  1572  		if !(d != 0) {
  1573  			break
  1574  		}
  1575  		v.reset(OpLOONG64MOVVconst)
  1576  		v.AuxInt = int64ToAuxInt(int64(uint64(c) / uint64(d)))
  1577  		return true
  1578  	}
  1579  	return false
  1580  }
  1581  func rewriteValueLOONG64_OpLOONG64LoweredAtomicAdd32(v *Value) bool {
  1582  	v_2 := v.Args[2]
  1583  	v_1 := v.Args[1]
  1584  	v_0 := v.Args[0]
  1585  	// match: (LoweredAtomicAdd32 ptr (MOVVconst [c]) mem)
  1586  	// cond: is32Bit(c)
  1587  	// result: (LoweredAtomicAddconst32 [int32(c)] ptr mem)
  1588  	for {
  1589  		ptr := v_0
  1590  		if v_1.Op != OpLOONG64MOVVconst {
  1591  			break
  1592  		}
  1593  		c := auxIntToInt64(v_1.AuxInt)
  1594  		mem := v_2
  1595  		if !(is32Bit(c)) {
  1596  			break
  1597  		}
  1598  		v.reset(OpLOONG64LoweredAtomicAddconst32)
  1599  		v.AuxInt = int32ToAuxInt(int32(c))
  1600  		v.AddArg2(ptr, mem)
  1601  		return true
  1602  	}
  1603  	return false
  1604  }
  1605  func rewriteValueLOONG64_OpLOONG64LoweredAtomicAdd64(v *Value) bool {
  1606  	v_2 := v.Args[2]
  1607  	v_1 := v.Args[1]
  1608  	v_0 := v.Args[0]
  1609  	// match: (LoweredAtomicAdd64 ptr (MOVVconst [c]) mem)
  1610  	// cond: is32Bit(c)
  1611  	// result: (LoweredAtomicAddconst64 [c] ptr mem)
  1612  	for {
  1613  		ptr := v_0
  1614  		if v_1.Op != OpLOONG64MOVVconst {
  1615  			break
  1616  		}
  1617  		c := auxIntToInt64(v_1.AuxInt)
  1618  		mem := v_2
  1619  		if !(is32Bit(c)) {
  1620  			break
  1621  		}
  1622  		v.reset(OpLOONG64LoweredAtomicAddconst64)
  1623  		v.AuxInt = int64ToAuxInt(c)
  1624  		v.AddArg2(ptr, mem)
  1625  		return true
  1626  	}
  1627  	return false
  1628  }
  1629  func rewriteValueLOONG64_OpLOONG64LoweredAtomicStore32(v *Value) bool {
  1630  	v_2 := v.Args[2]
  1631  	v_1 := v.Args[1]
  1632  	v_0 := v.Args[0]
  1633  	// match: (LoweredAtomicStore32 ptr (MOVVconst [0]) mem)
  1634  	// result: (LoweredAtomicStorezero32 ptr mem)
  1635  	for {
  1636  		ptr := v_0
  1637  		if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
  1638  			break
  1639  		}
  1640  		mem := v_2
  1641  		v.reset(OpLOONG64LoweredAtomicStorezero32)
  1642  		v.AddArg2(ptr, mem)
  1643  		return true
  1644  	}
  1645  	return false
  1646  }
  1647  func rewriteValueLOONG64_OpLOONG64LoweredAtomicStore64(v *Value) bool {
  1648  	v_2 := v.Args[2]
  1649  	v_1 := v.Args[1]
  1650  	v_0 := v.Args[0]
  1651  	// match: (LoweredAtomicStore64 ptr (MOVVconst [0]) mem)
  1652  	// result: (LoweredAtomicStorezero64 ptr mem)
  1653  	for {
  1654  		ptr := v_0
  1655  		if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
  1656  			break
  1657  		}
  1658  		mem := v_2
  1659  		v.reset(OpLOONG64LoweredAtomicStorezero64)
  1660  		v.AddArg2(ptr, mem)
  1661  		return true
  1662  	}
  1663  	return false
  1664  }
  1665  func rewriteValueLOONG64_OpLOONG64MASKEQZ(v *Value) bool {
  1666  	v_1 := v.Args[1]
  1667  	v_0 := v.Args[0]
  1668  	// match: (MASKEQZ (MOVVconst [0]) cond)
  1669  	// result: (MOVVconst [0])
  1670  	for {
  1671  		if v_0.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0.AuxInt) != 0 {
  1672  			break
  1673  		}
  1674  		v.reset(OpLOONG64MOVVconst)
  1675  		v.AuxInt = int64ToAuxInt(0)
  1676  		return true
  1677  	}
  1678  	// match: (MASKEQZ x (MOVVconst [c]))
  1679  	// cond: c == 0
  1680  	// result: (MOVVconst [0])
  1681  	for {
  1682  		if v_1.Op != OpLOONG64MOVVconst {
  1683  			break
  1684  		}
  1685  		c := auxIntToInt64(v_1.AuxInt)
  1686  		if !(c == 0) {
  1687  			break
  1688  		}
  1689  		v.reset(OpLOONG64MOVVconst)
  1690  		v.AuxInt = int64ToAuxInt(0)
  1691  		return true
  1692  	}
  1693  	// match: (MASKEQZ x (MOVVconst [c]))
  1694  	// cond: c != 0
  1695  	// result: x
  1696  	for {
  1697  		x := v_0
  1698  		if v_1.Op != OpLOONG64MOVVconst {
  1699  			break
  1700  		}
  1701  		c := auxIntToInt64(v_1.AuxInt)
  1702  		if !(c != 0) {
  1703  			break
  1704  		}
  1705  		v.copyOf(x)
  1706  		return true
  1707  	}
  1708  	return false
  1709  }
  1710  func rewriteValueLOONG64_OpLOONG64MASKNEZ(v *Value) bool {
  1711  	v_0 := v.Args[0]
  1712  	// match: (MASKNEZ (MOVVconst [0]) cond)
  1713  	// result: (MOVVconst [0])
  1714  	for {
  1715  		if v_0.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0.AuxInt) != 0 {
  1716  			break
  1717  		}
  1718  		v.reset(OpLOONG64MOVVconst)
  1719  		v.AuxInt = int64ToAuxInt(0)
  1720  		return true
  1721  	}
  1722  	return false
  1723  }
  1724  func rewriteValueLOONG64_OpLOONG64MOVBUload(v *Value) bool {
  1725  	v_1 := v.Args[1]
  1726  	v_0 := v.Args[0]
  1727  	b := v.Block
  1728  	config := b.Func.Config
  1729  	// match: (MOVBUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  1730  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  1731  	// result: (MOVBUload [off1+int32(off2)] {sym} ptr mem)
  1732  	for {
  1733  		off1 := auxIntToInt32(v.AuxInt)
  1734  		sym := auxToSym(v.Aux)
  1735  		if v_0.Op != OpLOONG64ADDVconst {
  1736  			break
  1737  		}
  1738  		off2 := auxIntToInt64(v_0.AuxInt)
  1739  		ptr := v_0.Args[0]
  1740  		mem := v_1
  1741  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  1742  			break
  1743  		}
  1744  		v.reset(OpLOONG64MOVBUload)
  1745  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  1746  		v.Aux = symToAux(sym)
  1747  		v.AddArg2(ptr, mem)
  1748  		return true
  1749  	}
  1750  	// match: (MOVBUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  1751  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  1752  	// result: (MOVBUload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  1753  	for {
  1754  		off1 := auxIntToInt32(v.AuxInt)
  1755  		sym1 := auxToSym(v.Aux)
  1756  		if v_0.Op != OpLOONG64MOVVaddr {
  1757  			break
  1758  		}
  1759  		off2 := auxIntToInt32(v_0.AuxInt)
  1760  		sym2 := auxToSym(v_0.Aux)
  1761  		ptr := v_0.Args[0]
  1762  		mem := v_1
  1763  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  1764  			break
  1765  		}
  1766  		v.reset(OpLOONG64MOVBUload)
  1767  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  1768  		v.Aux = symToAux(mergeSym(sym1, sym2))
  1769  		v.AddArg2(ptr, mem)
  1770  		return true
  1771  	}
  1772  	return false
  1773  }
  1774  func rewriteValueLOONG64_OpLOONG64MOVBUreg(v *Value) bool {
  1775  	v_0 := v.Args[0]
  1776  	// match: (MOVBUreg x:(SGT _ _))
  1777  	// result: x
  1778  	for {
  1779  		x := v_0
  1780  		if x.Op != OpLOONG64SGT {
  1781  			break
  1782  		}
  1783  		v.copyOf(x)
  1784  		return true
  1785  	}
  1786  	// match: (MOVBUreg x:(SGTU _ _))
  1787  	// result: x
  1788  	for {
  1789  		x := v_0
  1790  		if x.Op != OpLOONG64SGTU {
  1791  			break
  1792  		}
  1793  		v.copyOf(x)
  1794  		return true
  1795  	}
  1796  	// match: (MOVBUreg x:(MOVBUload _ _))
  1797  	// result: (MOVVreg x)
  1798  	for {
  1799  		x := v_0
  1800  		if x.Op != OpLOONG64MOVBUload {
  1801  			break
  1802  		}
  1803  		v.reset(OpLOONG64MOVVreg)
  1804  		v.AddArg(x)
  1805  		return true
  1806  	}
  1807  	// match: (MOVBUreg x:(MOVBUreg _))
  1808  	// result: (MOVVreg x)
  1809  	for {
  1810  		x := v_0
  1811  		if x.Op != OpLOONG64MOVBUreg {
  1812  			break
  1813  		}
  1814  		v.reset(OpLOONG64MOVVreg)
  1815  		v.AddArg(x)
  1816  		return true
  1817  	}
  1818  	// match: (MOVBUreg (MOVVconst [c]))
  1819  	// result: (MOVVconst [int64(uint8(c))])
  1820  	for {
  1821  		if v_0.Op != OpLOONG64MOVVconst {
  1822  			break
  1823  		}
  1824  		c := auxIntToInt64(v_0.AuxInt)
  1825  		v.reset(OpLOONG64MOVVconst)
  1826  		v.AuxInt = int64ToAuxInt(int64(uint8(c)))
  1827  		return true
  1828  	}
  1829  	return false
  1830  }
  1831  func rewriteValueLOONG64_OpLOONG64MOVBload(v *Value) bool {
  1832  	v_1 := v.Args[1]
  1833  	v_0 := v.Args[0]
  1834  	b := v.Block
  1835  	config := b.Func.Config
  1836  	// match: (MOVBload [off1] {sym} (ADDVconst [off2] ptr) mem)
  1837  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  1838  	// result: (MOVBload [off1+int32(off2)] {sym} ptr mem)
  1839  	for {
  1840  		off1 := auxIntToInt32(v.AuxInt)
  1841  		sym := auxToSym(v.Aux)
  1842  		if v_0.Op != OpLOONG64ADDVconst {
  1843  			break
  1844  		}
  1845  		off2 := auxIntToInt64(v_0.AuxInt)
  1846  		ptr := v_0.Args[0]
  1847  		mem := v_1
  1848  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  1849  			break
  1850  		}
  1851  		v.reset(OpLOONG64MOVBload)
  1852  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  1853  		v.Aux = symToAux(sym)
  1854  		v.AddArg2(ptr, mem)
  1855  		return true
  1856  	}
  1857  	// match: (MOVBload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  1858  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  1859  	// result: (MOVBload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  1860  	for {
  1861  		off1 := auxIntToInt32(v.AuxInt)
  1862  		sym1 := auxToSym(v.Aux)
  1863  		if v_0.Op != OpLOONG64MOVVaddr {
  1864  			break
  1865  		}
  1866  		off2 := auxIntToInt32(v_0.AuxInt)
  1867  		sym2 := auxToSym(v_0.Aux)
  1868  		ptr := v_0.Args[0]
  1869  		mem := v_1
  1870  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  1871  			break
  1872  		}
  1873  		v.reset(OpLOONG64MOVBload)
  1874  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  1875  		v.Aux = symToAux(mergeSym(sym1, sym2))
  1876  		v.AddArg2(ptr, mem)
  1877  		return true
  1878  	}
  1879  	return false
  1880  }
  1881  func rewriteValueLOONG64_OpLOONG64MOVBreg(v *Value) bool {
  1882  	v_0 := v.Args[0]
  1883  	// match: (MOVBreg x:(MOVBload _ _))
  1884  	// result: (MOVVreg x)
  1885  	for {
  1886  		x := v_0
  1887  		if x.Op != OpLOONG64MOVBload {
  1888  			break
  1889  		}
  1890  		v.reset(OpLOONG64MOVVreg)
  1891  		v.AddArg(x)
  1892  		return true
  1893  	}
  1894  	// match: (MOVBreg x:(MOVBreg _))
  1895  	// result: (MOVVreg x)
  1896  	for {
  1897  		x := v_0
  1898  		if x.Op != OpLOONG64MOVBreg {
  1899  			break
  1900  		}
  1901  		v.reset(OpLOONG64MOVVreg)
  1902  		v.AddArg(x)
  1903  		return true
  1904  	}
  1905  	// match: (MOVBreg (MOVVconst [c]))
  1906  	// result: (MOVVconst [int64(int8(c))])
  1907  	for {
  1908  		if v_0.Op != OpLOONG64MOVVconst {
  1909  			break
  1910  		}
  1911  		c := auxIntToInt64(v_0.AuxInt)
  1912  		v.reset(OpLOONG64MOVVconst)
  1913  		v.AuxInt = int64ToAuxInt(int64(int8(c)))
  1914  		return true
  1915  	}
  1916  	return false
  1917  }
  1918  func rewriteValueLOONG64_OpLOONG64MOVBstore(v *Value) bool {
  1919  	v_2 := v.Args[2]
  1920  	v_1 := v.Args[1]
  1921  	v_0 := v.Args[0]
  1922  	b := v.Block
  1923  	config := b.Func.Config
  1924  	// match: (MOVBstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  1925  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  1926  	// result: (MOVBstore [off1+int32(off2)] {sym} ptr val mem)
  1927  	for {
  1928  		off1 := auxIntToInt32(v.AuxInt)
  1929  		sym := auxToSym(v.Aux)
  1930  		if v_0.Op != OpLOONG64ADDVconst {
  1931  			break
  1932  		}
  1933  		off2 := auxIntToInt64(v_0.AuxInt)
  1934  		ptr := v_0.Args[0]
  1935  		val := v_1
  1936  		mem := v_2
  1937  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  1938  			break
  1939  		}
  1940  		v.reset(OpLOONG64MOVBstore)
  1941  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  1942  		v.Aux = symToAux(sym)
  1943  		v.AddArg3(ptr, val, mem)
  1944  		return true
  1945  	}
  1946  	// match: (MOVBstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  1947  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  1948  	// result: (MOVBstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  1949  	for {
  1950  		off1 := auxIntToInt32(v.AuxInt)
  1951  		sym1 := auxToSym(v.Aux)
  1952  		if v_0.Op != OpLOONG64MOVVaddr {
  1953  			break
  1954  		}
  1955  		off2 := auxIntToInt32(v_0.AuxInt)
  1956  		sym2 := auxToSym(v_0.Aux)
  1957  		ptr := v_0.Args[0]
  1958  		val := v_1
  1959  		mem := v_2
  1960  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  1961  			break
  1962  		}
  1963  		v.reset(OpLOONG64MOVBstore)
  1964  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  1965  		v.Aux = symToAux(mergeSym(sym1, sym2))
  1966  		v.AddArg3(ptr, val, mem)
  1967  		return true
  1968  	}
  1969  	// match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem)
  1970  	// result: (MOVBstore [off] {sym} ptr x mem)
  1971  	for {
  1972  		off := auxIntToInt32(v.AuxInt)
  1973  		sym := auxToSym(v.Aux)
  1974  		ptr := v_0
  1975  		if v_1.Op != OpLOONG64MOVBreg {
  1976  			break
  1977  		}
  1978  		x := v_1.Args[0]
  1979  		mem := v_2
  1980  		v.reset(OpLOONG64MOVBstore)
  1981  		v.AuxInt = int32ToAuxInt(off)
  1982  		v.Aux = symToAux(sym)
  1983  		v.AddArg3(ptr, x, mem)
  1984  		return true
  1985  	}
  1986  	// match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem)
  1987  	// result: (MOVBstore [off] {sym} ptr x mem)
  1988  	for {
  1989  		off := auxIntToInt32(v.AuxInt)
  1990  		sym := auxToSym(v.Aux)
  1991  		ptr := v_0
  1992  		if v_1.Op != OpLOONG64MOVBUreg {
  1993  			break
  1994  		}
  1995  		x := v_1.Args[0]
  1996  		mem := v_2
  1997  		v.reset(OpLOONG64MOVBstore)
  1998  		v.AuxInt = int32ToAuxInt(off)
  1999  		v.Aux = symToAux(sym)
  2000  		v.AddArg3(ptr, x, mem)
  2001  		return true
  2002  	}
  2003  	// match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem)
  2004  	// result: (MOVBstore [off] {sym} ptr x mem)
  2005  	for {
  2006  		off := auxIntToInt32(v.AuxInt)
  2007  		sym := auxToSym(v.Aux)
  2008  		ptr := v_0
  2009  		if v_1.Op != OpLOONG64MOVHreg {
  2010  			break
  2011  		}
  2012  		x := v_1.Args[0]
  2013  		mem := v_2
  2014  		v.reset(OpLOONG64MOVBstore)
  2015  		v.AuxInt = int32ToAuxInt(off)
  2016  		v.Aux = symToAux(sym)
  2017  		v.AddArg3(ptr, x, mem)
  2018  		return true
  2019  	}
  2020  	// match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem)
  2021  	// result: (MOVBstore [off] {sym} ptr x mem)
  2022  	for {
  2023  		off := auxIntToInt32(v.AuxInt)
  2024  		sym := auxToSym(v.Aux)
  2025  		ptr := v_0
  2026  		if v_1.Op != OpLOONG64MOVHUreg {
  2027  			break
  2028  		}
  2029  		x := v_1.Args[0]
  2030  		mem := v_2
  2031  		v.reset(OpLOONG64MOVBstore)
  2032  		v.AuxInt = int32ToAuxInt(off)
  2033  		v.Aux = symToAux(sym)
  2034  		v.AddArg3(ptr, x, mem)
  2035  		return true
  2036  	}
  2037  	// match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem)
  2038  	// result: (MOVBstore [off] {sym} ptr x mem)
  2039  	for {
  2040  		off := auxIntToInt32(v.AuxInt)
  2041  		sym := auxToSym(v.Aux)
  2042  		ptr := v_0
  2043  		if v_1.Op != OpLOONG64MOVWreg {
  2044  			break
  2045  		}
  2046  		x := v_1.Args[0]
  2047  		mem := v_2
  2048  		v.reset(OpLOONG64MOVBstore)
  2049  		v.AuxInt = int32ToAuxInt(off)
  2050  		v.Aux = symToAux(sym)
  2051  		v.AddArg3(ptr, x, mem)
  2052  		return true
  2053  	}
  2054  	// match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem)
  2055  	// result: (MOVBstore [off] {sym} ptr x mem)
  2056  	for {
  2057  		off := auxIntToInt32(v.AuxInt)
  2058  		sym := auxToSym(v.Aux)
  2059  		ptr := v_0
  2060  		if v_1.Op != OpLOONG64MOVWUreg {
  2061  			break
  2062  		}
  2063  		x := v_1.Args[0]
  2064  		mem := v_2
  2065  		v.reset(OpLOONG64MOVBstore)
  2066  		v.AuxInt = int32ToAuxInt(off)
  2067  		v.Aux = symToAux(sym)
  2068  		v.AddArg3(ptr, x, mem)
  2069  		return true
  2070  	}
  2071  	return false
  2072  }
  2073  func rewriteValueLOONG64_OpLOONG64MOVBstorezero(v *Value) bool {
  2074  	v_1 := v.Args[1]
  2075  	v_0 := v.Args[0]
  2076  	b := v.Block
  2077  	config := b.Func.Config
  2078  	// match: (MOVBstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  2079  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2080  	// result: (MOVBstorezero [off1+int32(off2)] {sym} ptr mem)
  2081  	for {
  2082  		off1 := auxIntToInt32(v.AuxInt)
  2083  		sym := auxToSym(v.Aux)
  2084  		if v_0.Op != OpLOONG64ADDVconst {
  2085  			break
  2086  		}
  2087  		off2 := auxIntToInt64(v_0.AuxInt)
  2088  		ptr := v_0.Args[0]
  2089  		mem := v_1
  2090  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2091  			break
  2092  		}
  2093  		v.reset(OpLOONG64MOVBstorezero)
  2094  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2095  		v.Aux = symToAux(sym)
  2096  		v.AddArg2(ptr, mem)
  2097  		return true
  2098  	}
  2099  	// match: (MOVBstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  2100  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2101  	// result: (MOVBstorezero [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  2102  	for {
  2103  		off1 := auxIntToInt32(v.AuxInt)
  2104  		sym1 := auxToSym(v.Aux)
  2105  		if v_0.Op != OpLOONG64MOVVaddr {
  2106  			break
  2107  		}
  2108  		off2 := auxIntToInt32(v_0.AuxInt)
  2109  		sym2 := auxToSym(v_0.Aux)
  2110  		ptr := v_0.Args[0]
  2111  		mem := v_1
  2112  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2113  			break
  2114  		}
  2115  		v.reset(OpLOONG64MOVBstorezero)
  2116  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2117  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2118  		v.AddArg2(ptr, mem)
  2119  		return true
  2120  	}
  2121  	return false
  2122  }
  2123  func rewriteValueLOONG64_OpLOONG64MOVDload(v *Value) bool {
  2124  	v_1 := v.Args[1]
  2125  	v_0 := v.Args[0]
  2126  	b := v.Block
  2127  	config := b.Func.Config
  2128  	// match: (MOVDload [off1] {sym} (ADDVconst [off2] ptr) mem)
  2129  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2130  	// result: (MOVDload [off1+int32(off2)] {sym} ptr mem)
  2131  	for {
  2132  		off1 := auxIntToInt32(v.AuxInt)
  2133  		sym := auxToSym(v.Aux)
  2134  		if v_0.Op != OpLOONG64ADDVconst {
  2135  			break
  2136  		}
  2137  		off2 := auxIntToInt64(v_0.AuxInt)
  2138  		ptr := v_0.Args[0]
  2139  		mem := v_1
  2140  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2141  			break
  2142  		}
  2143  		v.reset(OpLOONG64MOVDload)
  2144  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2145  		v.Aux = symToAux(sym)
  2146  		v.AddArg2(ptr, mem)
  2147  		return true
  2148  	}
  2149  	// match: (MOVDload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  2150  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2151  	// result: (MOVDload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  2152  	for {
  2153  		off1 := auxIntToInt32(v.AuxInt)
  2154  		sym1 := auxToSym(v.Aux)
  2155  		if v_0.Op != OpLOONG64MOVVaddr {
  2156  			break
  2157  		}
  2158  		off2 := auxIntToInt32(v_0.AuxInt)
  2159  		sym2 := auxToSym(v_0.Aux)
  2160  		ptr := v_0.Args[0]
  2161  		mem := v_1
  2162  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2163  			break
  2164  		}
  2165  		v.reset(OpLOONG64MOVDload)
  2166  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2167  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2168  		v.AddArg2(ptr, mem)
  2169  		return true
  2170  	}
  2171  	return false
  2172  }
  2173  func rewriteValueLOONG64_OpLOONG64MOVDstore(v *Value) bool {
  2174  	v_2 := v.Args[2]
  2175  	v_1 := v.Args[1]
  2176  	v_0 := v.Args[0]
  2177  	b := v.Block
  2178  	config := b.Func.Config
  2179  	// match: (MOVDstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  2180  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2181  	// result: (MOVDstore [off1+int32(off2)] {sym} ptr val mem)
  2182  	for {
  2183  		off1 := auxIntToInt32(v.AuxInt)
  2184  		sym := auxToSym(v.Aux)
  2185  		if v_0.Op != OpLOONG64ADDVconst {
  2186  			break
  2187  		}
  2188  		off2 := auxIntToInt64(v_0.AuxInt)
  2189  		ptr := v_0.Args[0]
  2190  		val := v_1
  2191  		mem := v_2
  2192  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2193  			break
  2194  		}
  2195  		v.reset(OpLOONG64MOVDstore)
  2196  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2197  		v.Aux = symToAux(sym)
  2198  		v.AddArg3(ptr, val, mem)
  2199  		return true
  2200  	}
  2201  	// match: (MOVDstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  2202  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2203  	// result: (MOVDstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  2204  	for {
  2205  		off1 := auxIntToInt32(v.AuxInt)
  2206  		sym1 := auxToSym(v.Aux)
  2207  		if v_0.Op != OpLOONG64MOVVaddr {
  2208  			break
  2209  		}
  2210  		off2 := auxIntToInt32(v_0.AuxInt)
  2211  		sym2 := auxToSym(v_0.Aux)
  2212  		ptr := v_0.Args[0]
  2213  		val := v_1
  2214  		mem := v_2
  2215  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2216  			break
  2217  		}
  2218  		v.reset(OpLOONG64MOVDstore)
  2219  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2220  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2221  		v.AddArg3(ptr, val, mem)
  2222  		return true
  2223  	}
  2224  	return false
  2225  }
  2226  func rewriteValueLOONG64_OpLOONG64MOVFload(v *Value) bool {
  2227  	v_1 := v.Args[1]
  2228  	v_0 := v.Args[0]
  2229  	b := v.Block
  2230  	config := b.Func.Config
  2231  	// match: (MOVFload [off1] {sym} (ADDVconst [off2] ptr) mem)
  2232  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2233  	// result: (MOVFload [off1+int32(off2)] {sym} ptr mem)
  2234  	for {
  2235  		off1 := auxIntToInt32(v.AuxInt)
  2236  		sym := auxToSym(v.Aux)
  2237  		if v_0.Op != OpLOONG64ADDVconst {
  2238  			break
  2239  		}
  2240  		off2 := auxIntToInt64(v_0.AuxInt)
  2241  		ptr := v_0.Args[0]
  2242  		mem := v_1
  2243  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2244  			break
  2245  		}
  2246  		v.reset(OpLOONG64MOVFload)
  2247  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2248  		v.Aux = symToAux(sym)
  2249  		v.AddArg2(ptr, mem)
  2250  		return true
  2251  	}
  2252  	// match: (MOVFload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  2253  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2254  	// result: (MOVFload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  2255  	for {
  2256  		off1 := auxIntToInt32(v.AuxInt)
  2257  		sym1 := auxToSym(v.Aux)
  2258  		if v_0.Op != OpLOONG64MOVVaddr {
  2259  			break
  2260  		}
  2261  		off2 := auxIntToInt32(v_0.AuxInt)
  2262  		sym2 := auxToSym(v_0.Aux)
  2263  		ptr := v_0.Args[0]
  2264  		mem := v_1
  2265  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2266  			break
  2267  		}
  2268  		v.reset(OpLOONG64MOVFload)
  2269  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2270  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2271  		v.AddArg2(ptr, mem)
  2272  		return true
  2273  	}
  2274  	return false
  2275  }
  2276  func rewriteValueLOONG64_OpLOONG64MOVFstore(v *Value) bool {
  2277  	v_2 := v.Args[2]
  2278  	v_1 := v.Args[1]
  2279  	v_0 := v.Args[0]
  2280  	b := v.Block
  2281  	config := b.Func.Config
  2282  	// match: (MOVFstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  2283  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2284  	// result: (MOVFstore [off1+int32(off2)] {sym} ptr val mem)
  2285  	for {
  2286  		off1 := auxIntToInt32(v.AuxInt)
  2287  		sym := auxToSym(v.Aux)
  2288  		if v_0.Op != OpLOONG64ADDVconst {
  2289  			break
  2290  		}
  2291  		off2 := auxIntToInt64(v_0.AuxInt)
  2292  		ptr := v_0.Args[0]
  2293  		val := v_1
  2294  		mem := v_2
  2295  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2296  			break
  2297  		}
  2298  		v.reset(OpLOONG64MOVFstore)
  2299  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2300  		v.Aux = symToAux(sym)
  2301  		v.AddArg3(ptr, val, mem)
  2302  		return true
  2303  	}
  2304  	// match: (MOVFstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  2305  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2306  	// result: (MOVFstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  2307  	for {
  2308  		off1 := auxIntToInt32(v.AuxInt)
  2309  		sym1 := auxToSym(v.Aux)
  2310  		if v_0.Op != OpLOONG64MOVVaddr {
  2311  			break
  2312  		}
  2313  		off2 := auxIntToInt32(v_0.AuxInt)
  2314  		sym2 := auxToSym(v_0.Aux)
  2315  		ptr := v_0.Args[0]
  2316  		val := v_1
  2317  		mem := v_2
  2318  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2319  			break
  2320  		}
  2321  		v.reset(OpLOONG64MOVFstore)
  2322  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2323  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2324  		v.AddArg3(ptr, val, mem)
  2325  		return true
  2326  	}
  2327  	return false
  2328  }
  2329  func rewriteValueLOONG64_OpLOONG64MOVHUload(v *Value) bool {
  2330  	v_1 := v.Args[1]
  2331  	v_0 := v.Args[0]
  2332  	b := v.Block
  2333  	config := b.Func.Config
  2334  	// match: (MOVHUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  2335  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2336  	// result: (MOVHUload [off1+int32(off2)] {sym} ptr mem)
  2337  	for {
  2338  		off1 := auxIntToInt32(v.AuxInt)
  2339  		sym := auxToSym(v.Aux)
  2340  		if v_0.Op != OpLOONG64ADDVconst {
  2341  			break
  2342  		}
  2343  		off2 := auxIntToInt64(v_0.AuxInt)
  2344  		ptr := v_0.Args[0]
  2345  		mem := v_1
  2346  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2347  			break
  2348  		}
  2349  		v.reset(OpLOONG64MOVHUload)
  2350  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2351  		v.Aux = symToAux(sym)
  2352  		v.AddArg2(ptr, mem)
  2353  		return true
  2354  	}
  2355  	// match: (MOVHUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  2356  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2357  	// result: (MOVHUload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  2358  	for {
  2359  		off1 := auxIntToInt32(v.AuxInt)
  2360  		sym1 := auxToSym(v.Aux)
  2361  		if v_0.Op != OpLOONG64MOVVaddr {
  2362  			break
  2363  		}
  2364  		off2 := auxIntToInt32(v_0.AuxInt)
  2365  		sym2 := auxToSym(v_0.Aux)
  2366  		ptr := v_0.Args[0]
  2367  		mem := v_1
  2368  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2369  			break
  2370  		}
  2371  		v.reset(OpLOONG64MOVHUload)
  2372  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2373  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2374  		v.AddArg2(ptr, mem)
  2375  		return true
  2376  	}
  2377  	return false
  2378  }
  2379  func rewriteValueLOONG64_OpLOONG64MOVHUreg(v *Value) bool {
  2380  	v_0 := v.Args[0]
  2381  	// match: (MOVHUreg x:(MOVBUload _ _))
  2382  	// result: (MOVVreg x)
  2383  	for {
  2384  		x := v_0
  2385  		if x.Op != OpLOONG64MOVBUload {
  2386  			break
  2387  		}
  2388  		v.reset(OpLOONG64MOVVreg)
  2389  		v.AddArg(x)
  2390  		return true
  2391  	}
  2392  	// match: (MOVHUreg x:(MOVHUload _ _))
  2393  	// result: (MOVVreg x)
  2394  	for {
  2395  		x := v_0
  2396  		if x.Op != OpLOONG64MOVHUload {
  2397  			break
  2398  		}
  2399  		v.reset(OpLOONG64MOVVreg)
  2400  		v.AddArg(x)
  2401  		return true
  2402  	}
  2403  	// match: (MOVHUreg x:(MOVBUreg _))
  2404  	// result: (MOVVreg x)
  2405  	for {
  2406  		x := v_0
  2407  		if x.Op != OpLOONG64MOVBUreg {
  2408  			break
  2409  		}
  2410  		v.reset(OpLOONG64MOVVreg)
  2411  		v.AddArg(x)
  2412  		return true
  2413  	}
  2414  	// match: (MOVHUreg x:(MOVHUreg _))
  2415  	// result: (MOVVreg x)
  2416  	for {
  2417  		x := v_0
  2418  		if x.Op != OpLOONG64MOVHUreg {
  2419  			break
  2420  		}
  2421  		v.reset(OpLOONG64MOVVreg)
  2422  		v.AddArg(x)
  2423  		return true
  2424  	}
  2425  	// match: (MOVHUreg (MOVVconst [c]))
  2426  	// result: (MOVVconst [int64(uint16(c))])
  2427  	for {
  2428  		if v_0.Op != OpLOONG64MOVVconst {
  2429  			break
  2430  		}
  2431  		c := auxIntToInt64(v_0.AuxInt)
  2432  		v.reset(OpLOONG64MOVVconst)
  2433  		v.AuxInt = int64ToAuxInt(int64(uint16(c)))
  2434  		return true
  2435  	}
  2436  	return false
  2437  }
  2438  func rewriteValueLOONG64_OpLOONG64MOVHload(v *Value) bool {
  2439  	v_1 := v.Args[1]
  2440  	v_0 := v.Args[0]
  2441  	b := v.Block
  2442  	config := b.Func.Config
  2443  	// match: (MOVHload [off1] {sym} (ADDVconst [off2] ptr) mem)
  2444  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2445  	// result: (MOVHload [off1+int32(off2)] {sym} ptr mem)
  2446  	for {
  2447  		off1 := auxIntToInt32(v.AuxInt)
  2448  		sym := auxToSym(v.Aux)
  2449  		if v_0.Op != OpLOONG64ADDVconst {
  2450  			break
  2451  		}
  2452  		off2 := auxIntToInt64(v_0.AuxInt)
  2453  		ptr := v_0.Args[0]
  2454  		mem := v_1
  2455  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2456  			break
  2457  		}
  2458  		v.reset(OpLOONG64MOVHload)
  2459  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2460  		v.Aux = symToAux(sym)
  2461  		v.AddArg2(ptr, mem)
  2462  		return true
  2463  	}
  2464  	// match: (MOVHload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  2465  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2466  	// result: (MOVHload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  2467  	for {
  2468  		off1 := auxIntToInt32(v.AuxInt)
  2469  		sym1 := auxToSym(v.Aux)
  2470  		if v_0.Op != OpLOONG64MOVVaddr {
  2471  			break
  2472  		}
  2473  		off2 := auxIntToInt32(v_0.AuxInt)
  2474  		sym2 := auxToSym(v_0.Aux)
  2475  		ptr := v_0.Args[0]
  2476  		mem := v_1
  2477  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2478  			break
  2479  		}
  2480  		v.reset(OpLOONG64MOVHload)
  2481  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2482  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2483  		v.AddArg2(ptr, mem)
  2484  		return true
  2485  	}
  2486  	return false
  2487  }
  2488  func rewriteValueLOONG64_OpLOONG64MOVHreg(v *Value) bool {
  2489  	v_0 := v.Args[0]
  2490  	// match: (MOVHreg x:(MOVBload _ _))
  2491  	// result: (MOVVreg x)
  2492  	for {
  2493  		x := v_0
  2494  		if x.Op != OpLOONG64MOVBload {
  2495  			break
  2496  		}
  2497  		v.reset(OpLOONG64MOVVreg)
  2498  		v.AddArg(x)
  2499  		return true
  2500  	}
  2501  	// match: (MOVHreg x:(MOVBUload _ _))
  2502  	// result: (MOVVreg x)
  2503  	for {
  2504  		x := v_0
  2505  		if x.Op != OpLOONG64MOVBUload {
  2506  			break
  2507  		}
  2508  		v.reset(OpLOONG64MOVVreg)
  2509  		v.AddArg(x)
  2510  		return true
  2511  	}
  2512  	// match: (MOVHreg x:(MOVHload _ _))
  2513  	// result: (MOVVreg x)
  2514  	for {
  2515  		x := v_0
  2516  		if x.Op != OpLOONG64MOVHload {
  2517  			break
  2518  		}
  2519  		v.reset(OpLOONG64MOVVreg)
  2520  		v.AddArg(x)
  2521  		return true
  2522  	}
  2523  	// match: (MOVHreg x:(MOVBreg _))
  2524  	// result: (MOVVreg x)
  2525  	for {
  2526  		x := v_0
  2527  		if x.Op != OpLOONG64MOVBreg {
  2528  			break
  2529  		}
  2530  		v.reset(OpLOONG64MOVVreg)
  2531  		v.AddArg(x)
  2532  		return true
  2533  	}
  2534  	// match: (MOVHreg x:(MOVBUreg _))
  2535  	// result: (MOVVreg x)
  2536  	for {
  2537  		x := v_0
  2538  		if x.Op != OpLOONG64MOVBUreg {
  2539  			break
  2540  		}
  2541  		v.reset(OpLOONG64MOVVreg)
  2542  		v.AddArg(x)
  2543  		return true
  2544  	}
  2545  	// match: (MOVHreg x:(MOVHreg _))
  2546  	// result: (MOVVreg x)
  2547  	for {
  2548  		x := v_0
  2549  		if x.Op != OpLOONG64MOVHreg {
  2550  			break
  2551  		}
  2552  		v.reset(OpLOONG64MOVVreg)
  2553  		v.AddArg(x)
  2554  		return true
  2555  	}
  2556  	// match: (MOVHreg (MOVVconst [c]))
  2557  	// result: (MOVVconst [int64(int16(c))])
  2558  	for {
  2559  		if v_0.Op != OpLOONG64MOVVconst {
  2560  			break
  2561  		}
  2562  		c := auxIntToInt64(v_0.AuxInt)
  2563  		v.reset(OpLOONG64MOVVconst)
  2564  		v.AuxInt = int64ToAuxInt(int64(int16(c)))
  2565  		return true
  2566  	}
  2567  	return false
  2568  }
  2569  func rewriteValueLOONG64_OpLOONG64MOVHstore(v *Value) bool {
  2570  	v_2 := v.Args[2]
  2571  	v_1 := v.Args[1]
  2572  	v_0 := v.Args[0]
  2573  	b := v.Block
  2574  	config := b.Func.Config
  2575  	// match: (MOVHstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  2576  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2577  	// result: (MOVHstore [off1+int32(off2)] {sym} ptr val mem)
  2578  	for {
  2579  		off1 := auxIntToInt32(v.AuxInt)
  2580  		sym := auxToSym(v.Aux)
  2581  		if v_0.Op != OpLOONG64ADDVconst {
  2582  			break
  2583  		}
  2584  		off2 := auxIntToInt64(v_0.AuxInt)
  2585  		ptr := v_0.Args[0]
  2586  		val := v_1
  2587  		mem := v_2
  2588  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2589  			break
  2590  		}
  2591  		v.reset(OpLOONG64MOVHstore)
  2592  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2593  		v.Aux = symToAux(sym)
  2594  		v.AddArg3(ptr, val, mem)
  2595  		return true
  2596  	}
  2597  	// match: (MOVHstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  2598  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2599  	// result: (MOVHstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  2600  	for {
  2601  		off1 := auxIntToInt32(v.AuxInt)
  2602  		sym1 := auxToSym(v.Aux)
  2603  		if v_0.Op != OpLOONG64MOVVaddr {
  2604  			break
  2605  		}
  2606  		off2 := auxIntToInt32(v_0.AuxInt)
  2607  		sym2 := auxToSym(v_0.Aux)
  2608  		ptr := v_0.Args[0]
  2609  		val := v_1
  2610  		mem := v_2
  2611  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2612  			break
  2613  		}
  2614  		v.reset(OpLOONG64MOVHstore)
  2615  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2616  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2617  		v.AddArg3(ptr, val, mem)
  2618  		return true
  2619  	}
  2620  	// match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem)
  2621  	// result: (MOVHstore [off] {sym} ptr x mem)
  2622  	for {
  2623  		off := auxIntToInt32(v.AuxInt)
  2624  		sym := auxToSym(v.Aux)
  2625  		ptr := v_0
  2626  		if v_1.Op != OpLOONG64MOVHreg {
  2627  			break
  2628  		}
  2629  		x := v_1.Args[0]
  2630  		mem := v_2
  2631  		v.reset(OpLOONG64MOVHstore)
  2632  		v.AuxInt = int32ToAuxInt(off)
  2633  		v.Aux = symToAux(sym)
  2634  		v.AddArg3(ptr, x, mem)
  2635  		return true
  2636  	}
  2637  	// match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem)
  2638  	// result: (MOVHstore [off] {sym} ptr x mem)
  2639  	for {
  2640  		off := auxIntToInt32(v.AuxInt)
  2641  		sym := auxToSym(v.Aux)
  2642  		ptr := v_0
  2643  		if v_1.Op != OpLOONG64MOVHUreg {
  2644  			break
  2645  		}
  2646  		x := v_1.Args[0]
  2647  		mem := v_2
  2648  		v.reset(OpLOONG64MOVHstore)
  2649  		v.AuxInt = int32ToAuxInt(off)
  2650  		v.Aux = symToAux(sym)
  2651  		v.AddArg3(ptr, x, mem)
  2652  		return true
  2653  	}
  2654  	// match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem)
  2655  	// result: (MOVHstore [off] {sym} ptr x mem)
  2656  	for {
  2657  		off := auxIntToInt32(v.AuxInt)
  2658  		sym := auxToSym(v.Aux)
  2659  		ptr := v_0
  2660  		if v_1.Op != OpLOONG64MOVWreg {
  2661  			break
  2662  		}
  2663  		x := v_1.Args[0]
  2664  		mem := v_2
  2665  		v.reset(OpLOONG64MOVHstore)
  2666  		v.AuxInt = int32ToAuxInt(off)
  2667  		v.Aux = symToAux(sym)
  2668  		v.AddArg3(ptr, x, mem)
  2669  		return true
  2670  	}
  2671  	// match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem)
  2672  	// result: (MOVHstore [off] {sym} ptr x mem)
  2673  	for {
  2674  		off := auxIntToInt32(v.AuxInt)
  2675  		sym := auxToSym(v.Aux)
  2676  		ptr := v_0
  2677  		if v_1.Op != OpLOONG64MOVWUreg {
  2678  			break
  2679  		}
  2680  		x := v_1.Args[0]
  2681  		mem := v_2
  2682  		v.reset(OpLOONG64MOVHstore)
  2683  		v.AuxInt = int32ToAuxInt(off)
  2684  		v.Aux = symToAux(sym)
  2685  		v.AddArg3(ptr, x, mem)
  2686  		return true
  2687  	}
  2688  	return false
  2689  }
  2690  func rewriteValueLOONG64_OpLOONG64MOVHstorezero(v *Value) bool {
  2691  	v_1 := v.Args[1]
  2692  	v_0 := v.Args[0]
  2693  	b := v.Block
  2694  	config := b.Func.Config
  2695  	// match: (MOVHstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  2696  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2697  	// result: (MOVHstorezero [off1+int32(off2)] {sym} ptr mem)
  2698  	for {
  2699  		off1 := auxIntToInt32(v.AuxInt)
  2700  		sym := auxToSym(v.Aux)
  2701  		if v_0.Op != OpLOONG64ADDVconst {
  2702  			break
  2703  		}
  2704  		off2 := auxIntToInt64(v_0.AuxInt)
  2705  		ptr := v_0.Args[0]
  2706  		mem := v_1
  2707  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2708  			break
  2709  		}
  2710  		v.reset(OpLOONG64MOVHstorezero)
  2711  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2712  		v.Aux = symToAux(sym)
  2713  		v.AddArg2(ptr, mem)
  2714  		return true
  2715  	}
  2716  	// match: (MOVHstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  2717  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2718  	// result: (MOVHstorezero [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  2719  	for {
  2720  		off1 := auxIntToInt32(v.AuxInt)
  2721  		sym1 := auxToSym(v.Aux)
  2722  		if v_0.Op != OpLOONG64MOVVaddr {
  2723  			break
  2724  		}
  2725  		off2 := auxIntToInt32(v_0.AuxInt)
  2726  		sym2 := auxToSym(v_0.Aux)
  2727  		ptr := v_0.Args[0]
  2728  		mem := v_1
  2729  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2730  			break
  2731  		}
  2732  		v.reset(OpLOONG64MOVHstorezero)
  2733  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2734  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2735  		v.AddArg2(ptr, mem)
  2736  		return true
  2737  	}
  2738  	return false
  2739  }
  2740  func rewriteValueLOONG64_OpLOONG64MOVVload(v *Value) bool {
  2741  	v_1 := v.Args[1]
  2742  	v_0 := v.Args[0]
  2743  	b := v.Block
  2744  	config := b.Func.Config
  2745  	// match: (MOVVload [off1] {sym} (ADDVconst [off2] ptr) mem)
  2746  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2747  	// result: (MOVVload [off1+int32(off2)] {sym} ptr mem)
  2748  	for {
  2749  		off1 := auxIntToInt32(v.AuxInt)
  2750  		sym := auxToSym(v.Aux)
  2751  		if v_0.Op != OpLOONG64ADDVconst {
  2752  			break
  2753  		}
  2754  		off2 := auxIntToInt64(v_0.AuxInt)
  2755  		ptr := v_0.Args[0]
  2756  		mem := v_1
  2757  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2758  			break
  2759  		}
  2760  		v.reset(OpLOONG64MOVVload)
  2761  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2762  		v.Aux = symToAux(sym)
  2763  		v.AddArg2(ptr, mem)
  2764  		return true
  2765  	}
  2766  	// match: (MOVVload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  2767  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2768  	// result: (MOVVload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  2769  	for {
  2770  		off1 := auxIntToInt32(v.AuxInt)
  2771  		sym1 := auxToSym(v.Aux)
  2772  		if v_0.Op != OpLOONG64MOVVaddr {
  2773  			break
  2774  		}
  2775  		off2 := auxIntToInt32(v_0.AuxInt)
  2776  		sym2 := auxToSym(v_0.Aux)
  2777  		ptr := v_0.Args[0]
  2778  		mem := v_1
  2779  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2780  			break
  2781  		}
  2782  		v.reset(OpLOONG64MOVVload)
  2783  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2784  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2785  		v.AddArg2(ptr, mem)
  2786  		return true
  2787  	}
  2788  	return false
  2789  }
  2790  func rewriteValueLOONG64_OpLOONG64MOVVreg(v *Value) bool {
  2791  	v_0 := v.Args[0]
  2792  	// match: (MOVVreg x)
  2793  	// cond: x.Uses == 1
  2794  	// result: (MOVVnop x)
  2795  	for {
  2796  		x := v_0
  2797  		if !(x.Uses == 1) {
  2798  			break
  2799  		}
  2800  		v.reset(OpLOONG64MOVVnop)
  2801  		v.AddArg(x)
  2802  		return true
  2803  	}
  2804  	// match: (MOVVreg (MOVVconst [c]))
  2805  	// result: (MOVVconst [c])
  2806  	for {
  2807  		if v_0.Op != OpLOONG64MOVVconst {
  2808  			break
  2809  		}
  2810  		c := auxIntToInt64(v_0.AuxInt)
  2811  		v.reset(OpLOONG64MOVVconst)
  2812  		v.AuxInt = int64ToAuxInt(c)
  2813  		return true
  2814  	}
  2815  	return false
  2816  }
  2817  func rewriteValueLOONG64_OpLOONG64MOVVstore(v *Value) bool {
  2818  	v_2 := v.Args[2]
  2819  	v_1 := v.Args[1]
  2820  	v_0 := v.Args[0]
  2821  	b := v.Block
  2822  	config := b.Func.Config
  2823  	// match: (MOVVstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  2824  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2825  	// result: (MOVVstore [off1+int32(off2)] {sym} ptr val mem)
  2826  	for {
  2827  		off1 := auxIntToInt32(v.AuxInt)
  2828  		sym := auxToSym(v.Aux)
  2829  		if v_0.Op != OpLOONG64ADDVconst {
  2830  			break
  2831  		}
  2832  		off2 := auxIntToInt64(v_0.AuxInt)
  2833  		ptr := v_0.Args[0]
  2834  		val := v_1
  2835  		mem := v_2
  2836  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2837  			break
  2838  		}
  2839  		v.reset(OpLOONG64MOVVstore)
  2840  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2841  		v.Aux = symToAux(sym)
  2842  		v.AddArg3(ptr, val, mem)
  2843  		return true
  2844  	}
  2845  	// match: (MOVVstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  2846  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2847  	// result: (MOVVstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  2848  	for {
  2849  		off1 := auxIntToInt32(v.AuxInt)
  2850  		sym1 := auxToSym(v.Aux)
  2851  		if v_0.Op != OpLOONG64MOVVaddr {
  2852  			break
  2853  		}
  2854  		off2 := auxIntToInt32(v_0.AuxInt)
  2855  		sym2 := auxToSym(v_0.Aux)
  2856  		ptr := v_0.Args[0]
  2857  		val := v_1
  2858  		mem := v_2
  2859  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2860  			break
  2861  		}
  2862  		v.reset(OpLOONG64MOVVstore)
  2863  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2864  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2865  		v.AddArg3(ptr, val, mem)
  2866  		return true
  2867  	}
  2868  	return false
  2869  }
  2870  func rewriteValueLOONG64_OpLOONG64MOVVstorezero(v *Value) bool {
  2871  	v_1 := v.Args[1]
  2872  	v_0 := v.Args[0]
  2873  	b := v.Block
  2874  	config := b.Func.Config
  2875  	// match: (MOVVstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  2876  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2877  	// result: (MOVVstorezero [off1+int32(off2)] {sym} ptr mem)
  2878  	for {
  2879  		off1 := auxIntToInt32(v.AuxInt)
  2880  		sym := auxToSym(v.Aux)
  2881  		if v_0.Op != OpLOONG64ADDVconst {
  2882  			break
  2883  		}
  2884  		off2 := auxIntToInt64(v_0.AuxInt)
  2885  		ptr := v_0.Args[0]
  2886  		mem := v_1
  2887  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2888  			break
  2889  		}
  2890  		v.reset(OpLOONG64MOVVstorezero)
  2891  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2892  		v.Aux = symToAux(sym)
  2893  		v.AddArg2(ptr, mem)
  2894  		return true
  2895  	}
  2896  	// match: (MOVVstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  2897  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2898  	// result: (MOVVstorezero [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  2899  	for {
  2900  		off1 := auxIntToInt32(v.AuxInt)
  2901  		sym1 := auxToSym(v.Aux)
  2902  		if v_0.Op != OpLOONG64MOVVaddr {
  2903  			break
  2904  		}
  2905  		off2 := auxIntToInt32(v_0.AuxInt)
  2906  		sym2 := auxToSym(v_0.Aux)
  2907  		ptr := v_0.Args[0]
  2908  		mem := v_1
  2909  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2910  			break
  2911  		}
  2912  		v.reset(OpLOONG64MOVVstorezero)
  2913  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2914  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2915  		v.AddArg2(ptr, mem)
  2916  		return true
  2917  	}
  2918  	return false
  2919  }
  2920  func rewriteValueLOONG64_OpLOONG64MOVWUload(v *Value) bool {
  2921  	v_1 := v.Args[1]
  2922  	v_0 := v.Args[0]
  2923  	b := v.Block
  2924  	config := b.Func.Config
  2925  	// match: (MOVWUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  2926  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2927  	// result: (MOVWUload [off1+int32(off2)] {sym} ptr mem)
  2928  	for {
  2929  		off1 := auxIntToInt32(v.AuxInt)
  2930  		sym := auxToSym(v.Aux)
  2931  		if v_0.Op != OpLOONG64ADDVconst {
  2932  			break
  2933  		}
  2934  		off2 := auxIntToInt64(v_0.AuxInt)
  2935  		ptr := v_0.Args[0]
  2936  		mem := v_1
  2937  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2938  			break
  2939  		}
  2940  		v.reset(OpLOONG64MOVWUload)
  2941  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2942  		v.Aux = symToAux(sym)
  2943  		v.AddArg2(ptr, mem)
  2944  		return true
  2945  	}
  2946  	// match: (MOVWUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  2947  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2948  	// result: (MOVWUload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  2949  	for {
  2950  		off1 := auxIntToInt32(v.AuxInt)
  2951  		sym1 := auxToSym(v.Aux)
  2952  		if v_0.Op != OpLOONG64MOVVaddr {
  2953  			break
  2954  		}
  2955  		off2 := auxIntToInt32(v_0.AuxInt)
  2956  		sym2 := auxToSym(v_0.Aux)
  2957  		ptr := v_0.Args[0]
  2958  		mem := v_1
  2959  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2960  			break
  2961  		}
  2962  		v.reset(OpLOONG64MOVWUload)
  2963  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2964  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2965  		v.AddArg2(ptr, mem)
  2966  		return true
  2967  	}
  2968  	return false
  2969  }
  2970  func rewriteValueLOONG64_OpLOONG64MOVWUreg(v *Value) bool {
  2971  	v_0 := v.Args[0]
  2972  	// match: (MOVWUreg x:(MOVBUload _ _))
  2973  	// result: (MOVVreg x)
  2974  	for {
  2975  		x := v_0
  2976  		if x.Op != OpLOONG64MOVBUload {
  2977  			break
  2978  		}
  2979  		v.reset(OpLOONG64MOVVreg)
  2980  		v.AddArg(x)
  2981  		return true
  2982  	}
  2983  	// match: (MOVWUreg x:(MOVHUload _ _))
  2984  	// result: (MOVVreg x)
  2985  	for {
  2986  		x := v_0
  2987  		if x.Op != OpLOONG64MOVHUload {
  2988  			break
  2989  		}
  2990  		v.reset(OpLOONG64MOVVreg)
  2991  		v.AddArg(x)
  2992  		return true
  2993  	}
  2994  	// match: (MOVWUreg x:(MOVWUload _ _))
  2995  	// result: (MOVVreg x)
  2996  	for {
  2997  		x := v_0
  2998  		if x.Op != OpLOONG64MOVWUload {
  2999  			break
  3000  		}
  3001  		v.reset(OpLOONG64MOVVreg)
  3002  		v.AddArg(x)
  3003  		return true
  3004  	}
  3005  	// match: (MOVWUreg x:(MOVBUreg _))
  3006  	// result: (MOVVreg x)
  3007  	for {
  3008  		x := v_0
  3009  		if x.Op != OpLOONG64MOVBUreg {
  3010  			break
  3011  		}
  3012  		v.reset(OpLOONG64MOVVreg)
  3013  		v.AddArg(x)
  3014  		return true
  3015  	}
  3016  	// match: (MOVWUreg x:(MOVHUreg _))
  3017  	// result: (MOVVreg x)
  3018  	for {
  3019  		x := v_0
  3020  		if x.Op != OpLOONG64MOVHUreg {
  3021  			break
  3022  		}
  3023  		v.reset(OpLOONG64MOVVreg)
  3024  		v.AddArg(x)
  3025  		return true
  3026  	}
  3027  	// match: (MOVWUreg x:(MOVWUreg _))
  3028  	// result: (MOVVreg x)
  3029  	for {
  3030  		x := v_0
  3031  		if x.Op != OpLOONG64MOVWUreg {
  3032  			break
  3033  		}
  3034  		v.reset(OpLOONG64MOVVreg)
  3035  		v.AddArg(x)
  3036  		return true
  3037  	}
  3038  	// match: (MOVWUreg (MOVVconst [c]))
  3039  	// result: (MOVVconst [int64(uint32(c))])
  3040  	for {
  3041  		if v_0.Op != OpLOONG64MOVVconst {
  3042  			break
  3043  		}
  3044  		c := auxIntToInt64(v_0.AuxInt)
  3045  		v.reset(OpLOONG64MOVVconst)
  3046  		v.AuxInt = int64ToAuxInt(int64(uint32(c)))
  3047  		return true
  3048  	}
  3049  	return false
  3050  }
  3051  func rewriteValueLOONG64_OpLOONG64MOVWload(v *Value) bool {
  3052  	v_1 := v.Args[1]
  3053  	v_0 := v.Args[0]
  3054  	b := v.Block
  3055  	config := b.Func.Config
  3056  	// match: (MOVWload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3057  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3058  	// result: (MOVWload [off1+int32(off2)] {sym} ptr mem)
  3059  	for {
  3060  		off1 := auxIntToInt32(v.AuxInt)
  3061  		sym := auxToSym(v.Aux)
  3062  		if v_0.Op != OpLOONG64ADDVconst {
  3063  			break
  3064  		}
  3065  		off2 := auxIntToInt64(v_0.AuxInt)
  3066  		ptr := v_0.Args[0]
  3067  		mem := v_1
  3068  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3069  			break
  3070  		}
  3071  		v.reset(OpLOONG64MOVWload)
  3072  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3073  		v.Aux = symToAux(sym)
  3074  		v.AddArg2(ptr, mem)
  3075  		return true
  3076  	}
  3077  	// match: (MOVWload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3078  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3079  	// result: (MOVWload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3080  	for {
  3081  		off1 := auxIntToInt32(v.AuxInt)
  3082  		sym1 := auxToSym(v.Aux)
  3083  		if v_0.Op != OpLOONG64MOVVaddr {
  3084  			break
  3085  		}
  3086  		off2 := auxIntToInt32(v_0.AuxInt)
  3087  		sym2 := auxToSym(v_0.Aux)
  3088  		ptr := v_0.Args[0]
  3089  		mem := v_1
  3090  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3091  			break
  3092  		}
  3093  		v.reset(OpLOONG64MOVWload)
  3094  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3095  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3096  		v.AddArg2(ptr, mem)
  3097  		return true
  3098  	}
  3099  	return false
  3100  }
  3101  func rewriteValueLOONG64_OpLOONG64MOVWreg(v *Value) bool {
  3102  	v_0 := v.Args[0]
  3103  	// match: (MOVWreg x:(MOVBload _ _))
  3104  	// result: (MOVVreg x)
  3105  	for {
  3106  		x := v_0
  3107  		if x.Op != OpLOONG64MOVBload {
  3108  			break
  3109  		}
  3110  		v.reset(OpLOONG64MOVVreg)
  3111  		v.AddArg(x)
  3112  		return true
  3113  	}
  3114  	// match: (MOVWreg x:(MOVBUload _ _))
  3115  	// result: (MOVVreg x)
  3116  	for {
  3117  		x := v_0
  3118  		if x.Op != OpLOONG64MOVBUload {
  3119  			break
  3120  		}
  3121  		v.reset(OpLOONG64MOVVreg)
  3122  		v.AddArg(x)
  3123  		return true
  3124  	}
  3125  	// match: (MOVWreg x:(MOVHload _ _))
  3126  	// result: (MOVVreg x)
  3127  	for {
  3128  		x := v_0
  3129  		if x.Op != OpLOONG64MOVHload {
  3130  			break
  3131  		}
  3132  		v.reset(OpLOONG64MOVVreg)
  3133  		v.AddArg(x)
  3134  		return true
  3135  	}
  3136  	// match: (MOVWreg x:(MOVHUload _ _))
  3137  	// result: (MOVVreg x)
  3138  	for {
  3139  		x := v_0
  3140  		if x.Op != OpLOONG64MOVHUload {
  3141  			break
  3142  		}
  3143  		v.reset(OpLOONG64MOVVreg)
  3144  		v.AddArg(x)
  3145  		return true
  3146  	}
  3147  	// match: (MOVWreg x:(MOVWload _ _))
  3148  	// result: (MOVVreg x)
  3149  	for {
  3150  		x := v_0
  3151  		if x.Op != OpLOONG64MOVWload {
  3152  			break
  3153  		}
  3154  		v.reset(OpLOONG64MOVVreg)
  3155  		v.AddArg(x)
  3156  		return true
  3157  	}
  3158  	// match: (MOVWreg x:(MOVBreg _))
  3159  	// result: (MOVVreg x)
  3160  	for {
  3161  		x := v_0
  3162  		if x.Op != OpLOONG64MOVBreg {
  3163  			break
  3164  		}
  3165  		v.reset(OpLOONG64MOVVreg)
  3166  		v.AddArg(x)
  3167  		return true
  3168  	}
  3169  	// match: (MOVWreg x:(MOVBUreg _))
  3170  	// result: (MOVVreg x)
  3171  	for {
  3172  		x := v_0
  3173  		if x.Op != OpLOONG64MOVBUreg {
  3174  			break
  3175  		}
  3176  		v.reset(OpLOONG64MOVVreg)
  3177  		v.AddArg(x)
  3178  		return true
  3179  	}
  3180  	// match: (MOVWreg x:(MOVHreg _))
  3181  	// result: (MOVVreg x)
  3182  	for {
  3183  		x := v_0
  3184  		if x.Op != OpLOONG64MOVHreg {
  3185  			break
  3186  		}
  3187  		v.reset(OpLOONG64MOVVreg)
  3188  		v.AddArg(x)
  3189  		return true
  3190  	}
  3191  	// match: (MOVWreg x:(MOVWreg _))
  3192  	// result: (MOVVreg x)
  3193  	for {
  3194  		x := v_0
  3195  		if x.Op != OpLOONG64MOVWreg {
  3196  			break
  3197  		}
  3198  		v.reset(OpLOONG64MOVVreg)
  3199  		v.AddArg(x)
  3200  		return true
  3201  	}
  3202  	// match: (MOVWreg (MOVVconst [c]))
  3203  	// result: (MOVVconst [int64(int32(c))])
  3204  	for {
  3205  		if v_0.Op != OpLOONG64MOVVconst {
  3206  			break
  3207  		}
  3208  		c := auxIntToInt64(v_0.AuxInt)
  3209  		v.reset(OpLOONG64MOVVconst)
  3210  		v.AuxInt = int64ToAuxInt(int64(int32(c)))
  3211  		return true
  3212  	}
  3213  	return false
  3214  }
  3215  func rewriteValueLOONG64_OpLOONG64MOVWstore(v *Value) bool {
  3216  	v_2 := v.Args[2]
  3217  	v_1 := v.Args[1]
  3218  	v_0 := v.Args[0]
  3219  	b := v.Block
  3220  	config := b.Func.Config
  3221  	// match: (MOVWstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  3222  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3223  	// result: (MOVWstore [off1+int32(off2)] {sym} ptr val mem)
  3224  	for {
  3225  		off1 := auxIntToInt32(v.AuxInt)
  3226  		sym := auxToSym(v.Aux)
  3227  		if v_0.Op != OpLOONG64ADDVconst {
  3228  			break
  3229  		}
  3230  		off2 := auxIntToInt64(v_0.AuxInt)
  3231  		ptr := v_0.Args[0]
  3232  		val := v_1
  3233  		mem := v_2
  3234  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3235  			break
  3236  		}
  3237  		v.reset(OpLOONG64MOVWstore)
  3238  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3239  		v.Aux = symToAux(sym)
  3240  		v.AddArg3(ptr, val, mem)
  3241  		return true
  3242  	}
  3243  	// match: (MOVWstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  3244  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3245  	// result: (MOVWstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  3246  	for {
  3247  		off1 := auxIntToInt32(v.AuxInt)
  3248  		sym1 := auxToSym(v.Aux)
  3249  		if v_0.Op != OpLOONG64MOVVaddr {
  3250  			break
  3251  		}
  3252  		off2 := auxIntToInt32(v_0.AuxInt)
  3253  		sym2 := auxToSym(v_0.Aux)
  3254  		ptr := v_0.Args[0]
  3255  		val := v_1
  3256  		mem := v_2
  3257  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3258  			break
  3259  		}
  3260  		v.reset(OpLOONG64MOVWstore)
  3261  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3262  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3263  		v.AddArg3(ptr, val, mem)
  3264  		return true
  3265  	}
  3266  	// match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem)
  3267  	// result: (MOVWstore [off] {sym} ptr x mem)
  3268  	for {
  3269  		off := auxIntToInt32(v.AuxInt)
  3270  		sym := auxToSym(v.Aux)
  3271  		ptr := v_0
  3272  		if v_1.Op != OpLOONG64MOVWreg {
  3273  			break
  3274  		}
  3275  		x := v_1.Args[0]
  3276  		mem := v_2
  3277  		v.reset(OpLOONG64MOVWstore)
  3278  		v.AuxInt = int32ToAuxInt(off)
  3279  		v.Aux = symToAux(sym)
  3280  		v.AddArg3(ptr, x, mem)
  3281  		return true
  3282  	}
  3283  	// match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem)
  3284  	// result: (MOVWstore [off] {sym} ptr x mem)
  3285  	for {
  3286  		off := auxIntToInt32(v.AuxInt)
  3287  		sym := auxToSym(v.Aux)
  3288  		ptr := v_0
  3289  		if v_1.Op != OpLOONG64MOVWUreg {
  3290  			break
  3291  		}
  3292  		x := v_1.Args[0]
  3293  		mem := v_2
  3294  		v.reset(OpLOONG64MOVWstore)
  3295  		v.AuxInt = int32ToAuxInt(off)
  3296  		v.Aux = symToAux(sym)
  3297  		v.AddArg3(ptr, x, mem)
  3298  		return true
  3299  	}
  3300  	return false
  3301  }
  3302  func rewriteValueLOONG64_OpLOONG64MOVWstorezero(v *Value) bool {
  3303  	v_1 := v.Args[1]
  3304  	v_0 := v.Args[0]
  3305  	b := v.Block
  3306  	config := b.Func.Config
  3307  	// match: (MOVWstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  3308  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3309  	// result: (MOVWstorezero [off1+int32(off2)] {sym} ptr mem)
  3310  	for {
  3311  		off1 := auxIntToInt32(v.AuxInt)
  3312  		sym := auxToSym(v.Aux)
  3313  		if v_0.Op != OpLOONG64ADDVconst {
  3314  			break
  3315  		}
  3316  		off2 := auxIntToInt64(v_0.AuxInt)
  3317  		ptr := v_0.Args[0]
  3318  		mem := v_1
  3319  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3320  			break
  3321  		}
  3322  		v.reset(OpLOONG64MOVWstorezero)
  3323  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3324  		v.Aux = symToAux(sym)
  3325  		v.AddArg2(ptr, mem)
  3326  		return true
  3327  	}
  3328  	// match: (MOVWstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3329  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3330  	// result: (MOVWstorezero [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3331  	for {
  3332  		off1 := auxIntToInt32(v.AuxInt)
  3333  		sym1 := auxToSym(v.Aux)
  3334  		if v_0.Op != OpLOONG64MOVVaddr {
  3335  			break
  3336  		}
  3337  		off2 := auxIntToInt32(v_0.AuxInt)
  3338  		sym2 := auxToSym(v_0.Aux)
  3339  		ptr := v_0.Args[0]
  3340  		mem := v_1
  3341  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3342  			break
  3343  		}
  3344  		v.reset(OpLOONG64MOVWstorezero)
  3345  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3346  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3347  		v.AddArg2(ptr, mem)
  3348  		return true
  3349  	}
  3350  	return false
  3351  }
  3352  func rewriteValueLOONG64_OpLOONG64MULV(v *Value) bool {
  3353  	v_1 := v.Args[1]
  3354  	v_0 := v.Args[0]
  3355  	// match: (MULV x (MOVVconst [-1]))
  3356  	// result: (NEGV x)
  3357  	for {
  3358  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3359  			x := v_0
  3360  			if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != -1 {
  3361  				continue
  3362  			}
  3363  			v.reset(OpLOONG64NEGV)
  3364  			v.AddArg(x)
  3365  			return true
  3366  		}
  3367  		break
  3368  	}
  3369  	// match: (MULV _ (MOVVconst [0]))
  3370  	// result: (MOVVconst [0])
  3371  	for {
  3372  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3373  			if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
  3374  				continue
  3375  			}
  3376  			v.reset(OpLOONG64MOVVconst)
  3377  			v.AuxInt = int64ToAuxInt(0)
  3378  			return true
  3379  		}
  3380  		break
  3381  	}
  3382  	// match: (MULV x (MOVVconst [1]))
  3383  	// result: x
  3384  	for {
  3385  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3386  			x := v_0
  3387  			if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != 1 {
  3388  				continue
  3389  			}
  3390  			v.copyOf(x)
  3391  			return true
  3392  		}
  3393  		break
  3394  	}
  3395  	// match: (MULV x (MOVVconst [c]))
  3396  	// cond: isPowerOfTwo64(c)
  3397  	// result: (SLLVconst [log64(c)] x)
  3398  	for {
  3399  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3400  			x := v_0
  3401  			if v_1.Op != OpLOONG64MOVVconst {
  3402  				continue
  3403  			}
  3404  			c := auxIntToInt64(v_1.AuxInt)
  3405  			if !(isPowerOfTwo64(c)) {
  3406  				continue
  3407  			}
  3408  			v.reset(OpLOONG64SLLVconst)
  3409  			v.AuxInt = int64ToAuxInt(log64(c))
  3410  			v.AddArg(x)
  3411  			return true
  3412  		}
  3413  		break
  3414  	}
  3415  	// match: (MULV (MOVVconst [c]) (MOVVconst [d]))
  3416  	// result: (MOVVconst [c*d])
  3417  	for {
  3418  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3419  			if v_0.Op != OpLOONG64MOVVconst {
  3420  				continue
  3421  			}
  3422  			c := auxIntToInt64(v_0.AuxInt)
  3423  			if v_1.Op != OpLOONG64MOVVconst {
  3424  				continue
  3425  			}
  3426  			d := auxIntToInt64(v_1.AuxInt)
  3427  			v.reset(OpLOONG64MOVVconst)
  3428  			v.AuxInt = int64ToAuxInt(c * d)
  3429  			return true
  3430  		}
  3431  		break
  3432  	}
  3433  	return false
  3434  }
  3435  func rewriteValueLOONG64_OpLOONG64NEGV(v *Value) bool {
  3436  	v_0 := v.Args[0]
  3437  	// match: (NEGV (MOVVconst [c]))
  3438  	// result: (MOVVconst [-c])
  3439  	for {
  3440  		if v_0.Op != OpLOONG64MOVVconst {
  3441  			break
  3442  		}
  3443  		c := auxIntToInt64(v_0.AuxInt)
  3444  		v.reset(OpLOONG64MOVVconst)
  3445  		v.AuxInt = int64ToAuxInt(-c)
  3446  		return true
  3447  	}
  3448  	return false
  3449  }
  3450  func rewriteValueLOONG64_OpLOONG64NOR(v *Value) bool {
  3451  	v_1 := v.Args[1]
  3452  	v_0 := v.Args[0]
  3453  	// match: (NOR x (MOVVconst [c]))
  3454  	// cond: is32Bit(c)
  3455  	// result: (NORconst [c] x)
  3456  	for {
  3457  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3458  			x := v_0
  3459  			if v_1.Op != OpLOONG64MOVVconst {
  3460  				continue
  3461  			}
  3462  			c := auxIntToInt64(v_1.AuxInt)
  3463  			if !(is32Bit(c)) {
  3464  				continue
  3465  			}
  3466  			v.reset(OpLOONG64NORconst)
  3467  			v.AuxInt = int64ToAuxInt(c)
  3468  			v.AddArg(x)
  3469  			return true
  3470  		}
  3471  		break
  3472  	}
  3473  	return false
  3474  }
  3475  func rewriteValueLOONG64_OpLOONG64NORconst(v *Value) bool {
  3476  	v_0 := v.Args[0]
  3477  	// match: (NORconst [c] (MOVVconst [d]))
  3478  	// result: (MOVVconst [^(c|d)])
  3479  	for {
  3480  		c := auxIntToInt64(v.AuxInt)
  3481  		if v_0.Op != OpLOONG64MOVVconst {
  3482  			break
  3483  		}
  3484  		d := auxIntToInt64(v_0.AuxInt)
  3485  		v.reset(OpLOONG64MOVVconst)
  3486  		v.AuxInt = int64ToAuxInt(^(c | d))
  3487  		return true
  3488  	}
  3489  	return false
  3490  }
  3491  func rewriteValueLOONG64_OpLOONG64OR(v *Value) bool {
  3492  	v_1 := v.Args[1]
  3493  	v_0 := v.Args[0]
  3494  	// match: (OR x (MOVVconst [c]))
  3495  	// cond: is32Bit(c)
  3496  	// result: (ORconst [c] x)
  3497  	for {
  3498  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3499  			x := v_0
  3500  			if v_1.Op != OpLOONG64MOVVconst {
  3501  				continue
  3502  			}
  3503  			c := auxIntToInt64(v_1.AuxInt)
  3504  			if !(is32Bit(c)) {
  3505  				continue
  3506  			}
  3507  			v.reset(OpLOONG64ORconst)
  3508  			v.AuxInt = int64ToAuxInt(c)
  3509  			v.AddArg(x)
  3510  			return true
  3511  		}
  3512  		break
  3513  	}
  3514  	// match: (OR x x)
  3515  	// result: x
  3516  	for {
  3517  		x := v_0
  3518  		if x != v_1 {
  3519  			break
  3520  		}
  3521  		v.copyOf(x)
  3522  		return true
  3523  	}
  3524  	return false
  3525  }
  3526  func rewriteValueLOONG64_OpLOONG64ORconst(v *Value) bool {
  3527  	v_0 := v.Args[0]
  3528  	// match: (ORconst [0] x)
  3529  	// result: x
  3530  	for {
  3531  		if auxIntToInt64(v.AuxInt) != 0 {
  3532  			break
  3533  		}
  3534  		x := v_0
  3535  		v.copyOf(x)
  3536  		return true
  3537  	}
  3538  	// match: (ORconst [-1] _)
  3539  	// result: (MOVVconst [-1])
  3540  	for {
  3541  		if auxIntToInt64(v.AuxInt) != -1 {
  3542  			break
  3543  		}
  3544  		v.reset(OpLOONG64MOVVconst)
  3545  		v.AuxInt = int64ToAuxInt(-1)
  3546  		return true
  3547  	}
  3548  	// match: (ORconst [c] (MOVVconst [d]))
  3549  	// result: (MOVVconst [c|d])
  3550  	for {
  3551  		c := auxIntToInt64(v.AuxInt)
  3552  		if v_0.Op != OpLOONG64MOVVconst {
  3553  			break
  3554  		}
  3555  		d := auxIntToInt64(v_0.AuxInt)
  3556  		v.reset(OpLOONG64MOVVconst)
  3557  		v.AuxInt = int64ToAuxInt(c | d)
  3558  		return true
  3559  	}
  3560  	// match: (ORconst [c] (ORconst [d] x))
  3561  	// cond: is32Bit(c|d)
  3562  	// result: (ORconst [c|d] x)
  3563  	for {
  3564  		c := auxIntToInt64(v.AuxInt)
  3565  		if v_0.Op != OpLOONG64ORconst {
  3566  			break
  3567  		}
  3568  		d := auxIntToInt64(v_0.AuxInt)
  3569  		x := v_0.Args[0]
  3570  		if !(is32Bit(c | d)) {
  3571  			break
  3572  		}
  3573  		v.reset(OpLOONG64ORconst)
  3574  		v.AuxInt = int64ToAuxInt(c | d)
  3575  		v.AddArg(x)
  3576  		return true
  3577  	}
  3578  	return false
  3579  }
  3580  func rewriteValueLOONG64_OpLOONG64REMV(v *Value) bool {
  3581  	v_1 := v.Args[1]
  3582  	v_0 := v.Args[0]
  3583  	// match: (REMV (MOVVconst [c]) (MOVVconst [d]))
  3584  	// cond: d != 0
  3585  	// result: (MOVVconst [c%d])
  3586  	for {
  3587  		if v_0.Op != OpLOONG64MOVVconst {
  3588  			break
  3589  		}
  3590  		c := auxIntToInt64(v_0.AuxInt)
  3591  		if v_1.Op != OpLOONG64MOVVconst {
  3592  			break
  3593  		}
  3594  		d := auxIntToInt64(v_1.AuxInt)
  3595  		if !(d != 0) {
  3596  			break
  3597  		}
  3598  		v.reset(OpLOONG64MOVVconst)
  3599  		v.AuxInt = int64ToAuxInt(c % d)
  3600  		return true
  3601  	}
  3602  	return false
  3603  }
  3604  func rewriteValueLOONG64_OpLOONG64REMVU(v *Value) bool {
  3605  	v_1 := v.Args[1]
  3606  	v_0 := v.Args[0]
  3607  	// match: (REMVU _ (MOVVconst [1]))
  3608  	// result: (MOVVconst [0])
  3609  	for {
  3610  		if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != 1 {
  3611  			break
  3612  		}
  3613  		v.reset(OpLOONG64MOVVconst)
  3614  		v.AuxInt = int64ToAuxInt(0)
  3615  		return true
  3616  	}
  3617  	// match: (REMVU x (MOVVconst [c]))
  3618  	// cond: isPowerOfTwo64(c)
  3619  	// result: (ANDconst [c-1] x)
  3620  	for {
  3621  		x := v_0
  3622  		if v_1.Op != OpLOONG64MOVVconst {
  3623  			break
  3624  		}
  3625  		c := auxIntToInt64(v_1.AuxInt)
  3626  		if !(isPowerOfTwo64(c)) {
  3627  			break
  3628  		}
  3629  		v.reset(OpLOONG64ANDconst)
  3630  		v.AuxInt = int64ToAuxInt(c - 1)
  3631  		v.AddArg(x)
  3632  		return true
  3633  	}
  3634  	// match: (REMVU (MOVVconst [c]) (MOVVconst [d]))
  3635  	// cond: d != 0
  3636  	// result: (MOVVconst [int64(uint64(c)%uint64(d))])
  3637  	for {
  3638  		if v_0.Op != OpLOONG64MOVVconst {
  3639  			break
  3640  		}
  3641  		c := auxIntToInt64(v_0.AuxInt)
  3642  		if v_1.Op != OpLOONG64MOVVconst {
  3643  			break
  3644  		}
  3645  		d := auxIntToInt64(v_1.AuxInt)
  3646  		if !(d != 0) {
  3647  			break
  3648  		}
  3649  		v.reset(OpLOONG64MOVVconst)
  3650  		v.AuxInt = int64ToAuxInt(int64(uint64(c) % uint64(d)))
  3651  		return true
  3652  	}
  3653  	return false
  3654  }
  3655  func rewriteValueLOONG64_OpLOONG64ROTR(v *Value) bool {
  3656  	v_1 := v.Args[1]
  3657  	v_0 := v.Args[0]
  3658  	// match: (ROTR x (MOVVconst [c]))
  3659  	// result: (ROTRconst x [c&31])
  3660  	for {
  3661  		x := v_0
  3662  		if v_1.Op != OpLOONG64MOVVconst {
  3663  			break
  3664  		}
  3665  		c := auxIntToInt64(v_1.AuxInt)
  3666  		v.reset(OpLOONG64ROTRconst)
  3667  		v.AuxInt = int64ToAuxInt(c & 31)
  3668  		v.AddArg(x)
  3669  		return true
  3670  	}
  3671  	return false
  3672  }
  3673  func rewriteValueLOONG64_OpLOONG64ROTRV(v *Value) bool {
  3674  	v_1 := v.Args[1]
  3675  	v_0 := v.Args[0]
  3676  	// match: (ROTRV x (MOVVconst [c]))
  3677  	// result: (ROTRVconst x [c&63])
  3678  	for {
  3679  		x := v_0
  3680  		if v_1.Op != OpLOONG64MOVVconst {
  3681  			break
  3682  		}
  3683  		c := auxIntToInt64(v_1.AuxInt)
  3684  		v.reset(OpLOONG64ROTRVconst)
  3685  		v.AuxInt = int64ToAuxInt(c & 63)
  3686  		v.AddArg(x)
  3687  		return true
  3688  	}
  3689  	return false
  3690  }
  3691  func rewriteValueLOONG64_OpLOONG64SGT(v *Value) bool {
  3692  	v_1 := v.Args[1]
  3693  	v_0 := v.Args[0]
  3694  	// match: (SGT (MOVVconst [c]) x)
  3695  	// cond: is32Bit(c)
  3696  	// result: (SGTconst [c] x)
  3697  	for {
  3698  		if v_0.Op != OpLOONG64MOVVconst {
  3699  			break
  3700  		}
  3701  		c := auxIntToInt64(v_0.AuxInt)
  3702  		x := v_1
  3703  		if !(is32Bit(c)) {
  3704  			break
  3705  		}
  3706  		v.reset(OpLOONG64SGTconst)
  3707  		v.AuxInt = int64ToAuxInt(c)
  3708  		v.AddArg(x)
  3709  		return true
  3710  	}
  3711  	// match: (SGT x x)
  3712  	// result: (MOVVconst [0])
  3713  	for {
  3714  		x := v_0
  3715  		if x != v_1 {
  3716  			break
  3717  		}
  3718  		v.reset(OpLOONG64MOVVconst)
  3719  		v.AuxInt = int64ToAuxInt(0)
  3720  		return true
  3721  	}
  3722  	return false
  3723  }
  3724  func rewriteValueLOONG64_OpLOONG64SGTU(v *Value) bool {
  3725  	v_1 := v.Args[1]
  3726  	v_0 := v.Args[0]
  3727  	// match: (SGTU (MOVVconst [c]) x)
  3728  	// cond: is32Bit(c)
  3729  	// result: (SGTUconst [c] x)
  3730  	for {
  3731  		if v_0.Op != OpLOONG64MOVVconst {
  3732  			break
  3733  		}
  3734  		c := auxIntToInt64(v_0.AuxInt)
  3735  		x := v_1
  3736  		if !(is32Bit(c)) {
  3737  			break
  3738  		}
  3739  		v.reset(OpLOONG64SGTUconst)
  3740  		v.AuxInt = int64ToAuxInt(c)
  3741  		v.AddArg(x)
  3742  		return true
  3743  	}
  3744  	// match: (SGTU x x)
  3745  	// result: (MOVVconst [0])
  3746  	for {
  3747  		x := v_0
  3748  		if x != v_1 {
  3749  			break
  3750  		}
  3751  		v.reset(OpLOONG64MOVVconst)
  3752  		v.AuxInt = int64ToAuxInt(0)
  3753  		return true
  3754  	}
  3755  	return false
  3756  }
  3757  func rewriteValueLOONG64_OpLOONG64SGTUconst(v *Value) bool {
  3758  	v_0 := v.Args[0]
  3759  	// match: (SGTUconst [c] (MOVVconst [d]))
  3760  	// cond: uint64(c)>uint64(d)
  3761  	// result: (MOVVconst [1])
  3762  	for {
  3763  		c := auxIntToInt64(v.AuxInt)
  3764  		if v_0.Op != OpLOONG64MOVVconst {
  3765  			break
  3766  		}
  3767  		d := auxIntToInt64(v_0.AuxInt)
  3768  		if !(uint64(c) > uint64(d)) {
  3769  			break
  3770  		}
  3771  		v.reset(OpLOONG64MOVVconst)
  3772  		v.AuxInt = int64ToAuxInt(1)
  3773  		return true
  3774  	}
  3775  	// match: (SGTUconst [c] (MOVVconst [d]))
  3776  	// cond: uint64(c)<=uint64(d)
  3777  	// result: (MOVVconst [0])
  3778  	for {
  3779  		c := auxIntToInt64(v.AuxInt)
  3780  		if v_0.Op != OpLOONG64MOVVconst {
  3781  			break
  3782  		}
  3783  		d := auxIntToInt64(v_0.AuxInt)
  3784  		if !(uint64(c) <= uint64(d)) {
  3785  			break
  3786  		}
  3787  		v.reset(OpLOONG64MOVVconst)
  3788  		v.AuxInt = int64ToAuxInt(0)
  3789  		return true
  3790  	}
  3791  	// match: (SGTUconst [c] (MOVBUreg _))
  3792  	// cond: 0xff < uint64(c)
  3793  	// result: (MOVVconst [1])
  3794  	for {
  3795  		c := auxIntToInt64(v.AuxInt)
  3796  		if v_0.Op != OpLOONG64MOVBUreg || !(0xff < uint64(c)) {
  3797  			break
  3798  		}
  3799  		v.reset(OpLOONG64MOVVconst)
  3800  		v.AuxInt = int64ToAuxInt(1)
  3801  		return true
  3802  	}
  3803  	// match: (SGTUconst [c] (MOVHUreg _))
  3804  	// cond: 0xffff < uint64(c)
  3805  	// result: (MOVVconst [1])
  3806  	for {
  3807  		c := auxIntToInt64(v.AuxInt)
  3808  		if v_0.Op != OpLOONG64MOVHUreg || !(0xffff < uint64(c)) {
  3809  			break
  3810  		}
  3811  		v.reset(OpLOONG64MOVVconst)
  3812  		v.AuxInt = int64ToAuxInt(1)
  3813  		return true
  3814  	}
  3815  	// match: (SGTUconst [c] (ANDconst [m] _))
  3816  	// cond: uint64(m) < uint64(c)
  3817  	// result: (MOVVconst [1])
  3818  	for {
  3819  		c := auxIntToInt64(v.AuxInt)
  3820  		if v_0.Op != OpLOONG64ANDconst {
  3821  			break
  3822  		}
  3823  		m := auxIntToInt64(v_0.AuxInt)
  3824  		if !(uint64(m) < uint64(c)) {
  3825  			break
  3826  		}
  3827  		v.reset(OpLOONG64MOVVconst)
  3828  		v.AuxInt = int64ToAuxInt(1)
  3829  		return true
  3830  	}
  3831  	// match: (SGTUconst [c] (SRLVconst _ [d]))
  3832  	// cond: 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)
  3833  	// result: (MOVVconst [1])
  3834  	for {
  3835  		c := auxIntToInt64(v.AuxInt)
  3836  		if v_0.Op != OpLOONG64SRLVconst {
  3837  			break
  3838  		}
  3839  		d := auxIntToInt64(v_0.AuxInt)
  3840  		if !(0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)) {
  3841  			break
  3842  		}
  3843  		v.reset(OpLOONG64MOVVconst)
  3844  		v.AuxInt = int64ToAuxInt(1)
  3845  		return true
  3846  	}
  3847  	return false
  3848  }
  3849  func rewriteValueLOONG64_OpLOONG64SGTconst(v *Value) bool {
  3850  	v_0 := v.Args[0]
  3851  	// match: (SGTconst [c] (MOVVconst [d]))
  3852  	// cond: c>d
  3853  	// result: (MOVVconst [1])
  3854  	for {
  3855  		c := auxIntToInt64(v.AuxInt)
  3856  		if v_0.Op != OpLOONG64MOVVconst {
  3857  			break
  3858  		}
  3859  		d := auxIntToInt64(v_0.AuxInt)
  3860  		if !(c > d) {
  3861  			break
  3862  		}
  3863  		v.reset(OpLOONG64MOVVconst)
  3864  		v.AuxInt = int64ToAuxInt(1)
  3865  		return true
  3866  	}
  3867  	// match: (SGTconst [c] (MOVVconst [d]))
  3868  	// cond: c<=d
  3869  	// result: (MOVVconst [0])
  3870  	for {
  3871  		c := auxIntToInt64(v.AuxInt)
  3872  		if v_0.Op != OpLOONG64MOVVconst {
  3873  			break
  3874  		}
  3875  		d := auxIntToInt64(v_0.AuxInt)
  3876  		if !(c <= d) {
  3877  			break
  3878  		}
  3879  		v.reset(OpLOONG64MOVVconst)
  3880  		v.AuxInt = int64ToAuxInt(0)
  3881  		return true
  3882  	}
  3883  	// match: (SGTconst [c] (MOVBreg _))
  3884  	// cond: 0x7f < c
  3885  	// result: (MOVVconst [1])
  3886  	for {
  3887  		c := auxIntToInt64(v.AuxInt)
  3888  		if v_0.Op != OpLOONG64MOVBreg || !(0x7f < c) {
  3889  			break
  3890  		}
  3891  		v.reset(OpLOONG64MOVVconst)
  3892  		v.AuxInt = int64ToAuxInt(1)
  3893  		return true
  3894  	}
  3895  	// match: (SGTconst [c] (MOVBreg _))
  3896  	// cond: c <= -0x80
  3897  	// result: (MOVVconst [0])
  3898  	for {
  3899  		c := auxIntToInt64(v.AuxInt)
  3900  		if v_0.Op != OpLOONG64MOVBreg || !(c <= -0x80) {
  3901  			break
  3902  		}
  3903  		v.reset(OpLOONG64MOVVconst)
  3904  		v.AuxInt = int64ToAuxInt(0)
  3905  		return true
  3906  	}
  3907  	// match: (SGTconst [c] (MOVBUreg _))
  3908  	// cond: 0xff < c
  3909  	// result: (MOVVconst [1])
  3910  	for {
  3911  		c := auxIntToInt64(v.AuxInt)
  3912  		if v_0.Op != OpLOONG64MOVBUreg || !(0xff < c) {
  3913  			break
  3914  		}
  3915  		v.reset(OpLOONG64MOVVconst)
  3916  		v.AuxInt = int64ToAuxInt(1)
  3917  		return true
  3918  	}
  3919  	// match: (SGTconst [c] (MOVBUreg _))
  3920  	// cond: c < 0
  3921  	// result: (MOVVconst [0])
  3922  	for {
  3923  		c := auxIntToInt64(v.AuxInt)
  3924  		if v_0.Op != OpLOONG64MOVBUreg || !(c < 0) {
  3925  			break
  3926  		}
  3927  		v.reset(OpLOONG64MOVVconst)
  3928  		v.AuxInt = int64ToAuxInt(0)
  3929  		return true
  3930  	}
  3931  	// match: (SGTconst [c] (MOVHreg _))
  3932  	// cond: 0x7fff < c
  3933  	// result: (MOVVconst [1])
  3934  	for {
  3935  		c := auxIntToInt64(v.AuxInt)
  3936  		if v_0.Op != OpLOONG64MOVHreg || !(0x7fff < c) {
  3937  			break
  3938  		}
  3939  		v.reset(OpLOONG64MOVVconst)
  3940  		v.AuxInt = int64ToAuxInt(1)
  3941  		return true
  3942  	}
  3943  	// match: (SGTconst [c] (MOVHreg _))
  3944  	// cond: c <= -0x8000
  3945  	// result: (MOVVconst [0])
  3946  	for {
  3947  		c := auxIntToInt64(v.AuxInt)
  3948  		if v_0.Op != OpLOONG64MOVHreg || !(c <= -0x8000) {
  3949  			break
  3950  		}
  3951  		v.reset(OpLOONG64MOVVconst)
  3952  		v.AuxInt = int64ToAuxInt(0)
  3953  		return true
  3954  	}
  3955  	// match: (SGTconst [c] (MOVHUreg _))
  3956  	// cond: 0xffff < c
  3957  	// result: (MOVVconst [1])
  3958  	for {
  3959  		c := auxIntToInt64(v.AuxInt)
  3960  		if v_0.Op != OpLOONG64MOVHUreg || !(0xffff < c) {
  3961  			break
  3962  		}
  3963  		v.reset(OpLOONG64MOVVconst)
  3964  		v.AuxInt = int64ToAuxInt(1)
  3965  		return true
  3966  	}
  3967  	// match: (SGTconst [c] (MOVHUreg _))
  3968  	// cond: c < 0
  3969  	// result: (MOVVconst [0])
  3970  	for {
  3971  		c := auxIntToInt64(v.AuxInt)
  3972  		if v_0.Op != OpLOONG64MOVHUreg || !(c < 0) {
  3973  			break
  3974  		}
  3975  		v.reset(OpLOONG64MOVVconst)
  3976  		v.AuxInt = int64ToAuxInt(0)
  3977  		return true
  3978  	}
  3979  	// match: (SGTconst [c] (MOVWUreg _))
  3980  	// cond: c < 0
  3981  	// result: (MOVVconst [0])
  3982  	for {
  3983  		c := auxIntToInt64(v.AuxInt)
  3984  		if v_0.Op != OpLOONG64MOVWUreg || !(c < 0) {
  3985  			break
  3986  		}
  3987  		v.reset(OpLOONG64MOVVconst)
  3988  		v.AuxInt = int64ToAuxInt(0)
  3989  		return true
  3990  	}
  3991  	// match: (SGTconst [c] (ANDconst [m] _))
  3992  	// cond: 0 <= m && m < c
  3993  	// result: (MOVVconst [1])
  3994  	for {
  3995  		c := auxIntToInt64(v.AuxInt)
  3996  		if v_0.Op != OpLOONG64ANDconst {
  3997  			break
  3998  		}
  3999  		m := auxIntToInt64(v_0.AuxInt)
  4000  		if !(0 <= m && m < c) {
  4001  			break
  4002  		}
  4003  		v.reset(OpLOONG64MOVVconst)
  4004  		v.AuxInt = int64ToAuxInt(1)
  4005  		return true
  4006  	}
  4007  	// match: (SGTconst [c] (SRLVconst _ [d]))
  4008  	// cond: 0 <= c && 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)
  4009  	// result: (MOVVconst [1])
  4010  	for {
  4011  		c := auxIntToInt64(v.AuxInt)
  4012  		if v_0.Op != OpLOONG64SRLVconst {
  4013  			break
  4014  		}
  4015  		d := auxIntToInt64(v_0.AuxInt)
  4016  		if !(0 <= c && 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)) {
  4017  			break
  4018  		}
  4019  		v.reset(OpLOONG64MOVVconst)
  4020  		v.AuxInt = int64ToAuxInt(1)
  4021  		return true
  4022  	}
  4023  	return false
  4024  }
  4025  func rewriteValueLOONG64_OpLOONG64SLLV(v *Value) bool {
  4026  	v_1 := v.Args[1]
  4027  	v_0 := v.Args[0]
  4028  	// match: (SLLV _ (MOVVconst [c]))
  4029  	// cond: uint64(c)>=64
  4030  	// result: (MOVVconst [0])
  4031  	for {
  4032  		if v_1.Op != OpLOONG64MOVVconst {
  4033  			break
  4034  		}
  4035  		c := auxIntToInt64(v_1.AuxInt)
  4036  		if !(uint64(c) >= 64) {
  4037  			break
  4038  		}
  4039  		v.reset(OpLOONG64MOVVconst)
  4040  		v.AuxInt = int64ToAuxInt(0)
  4041  		return true
  4042  	}
  4043  	// match: (SLLV x (MOVVconst [c]))
  4044  	// result: (SLLVconst x [c])
  4045  	for {
  4046  		x := v_0
  4047  		if v_1.Op != OpLOONG64MOVVconst {
  4048  			break
  4049  		}
  4050  		c := auxIntToInt64(v_1.AuxInt)
  4051  		v.reset(OpLOONG64SLLVconst)
  4052  		v.AuxInt = int64ToAuxInt(c)
  4053  		v.AddArg(x)
  4054  		return true
  4055  	}
  4056  	return false
  4057  }
  4058  func rewriteValueLOONG64_OpLOONG64SLLVconst(v *Value) bool {
  4059  	v_0 := v.Args[0]
  4060  	// match: (SLLVconst [c] (MOVVconst [d]))
  4061  	// result: (MOVVconst [d<<uint64(c)])
  4062  	for {
  4063  		c := auxIntToInt64(v.AuxInt)
  4064  		if v_0.Op != OpLOONG64MOVVconst {
  4065  			break
  4066  		}
  4067  		d := auxIntToInt64(v_0.AuxInt)
  4068  		v.reset(OpLOONG64MOVVconst)
  4069  		v.AuxInt = int64ToAuxInt(d << uint64(c))
  4070  		return true
  4071  	}
  4072  	return false
  4073  }
  4074  func rewriteValueLOONG64_OpLOONG64SRAV(v *Value) bool {
  4075  	v_1 := v.Args[1]
  4076  	v_0 := v.Args[0]
  4077  	// match: (SRAV x (MOVVconst [c]))
  4078  	// cond: uint64(c)>=64
  4079  	// result: (SRAVconst x [63])
  4080  	for {
  4081  		x := v_0
  4082  		if v_1.Op != OpLOONG64MOVVconst {
  4083  			break
  4084  		}
  4085  		c := auxIntToInt64(v_1.AuxInt)
  4086  		if !(uint64(c) >= 64) {
  4087  			break
  4088  		}
  4089  		v.reset(OpLOONG64SRAVconst)
  4090  		v.AuxInt = int64ToAuxInt(63)
  4091  		v.AddArg(x)
  4092  		return true
  4093  	}
  4094  	// match: (SRAV x (MOVVconst [c]))
  4095  	// result: (SRAVconst x [c])
  4096  	for {
  4097  		x := v_0
  4098  		if v_1.Op != OpLOONG64MOVVconst {
  4099  			break
  4100  		}
  4101  		c := auxIntToInt64(v_1.AuxInt)
  4102  		v.reset(OpLOONG64SRAVconst)
  4103  		v.AuxInt = int64ToAuxInt(c)
  4104  		v.AddArg(x)
  4105  		return true
  4106  	}
  4107  	return false
  4108  }
  4109  func rewriteValueLOONG64_OpLOONG64SRAVconst(v *Value) bool {
  4110  	v_0 := v.Args[0]
  4111  	// match: (SRAVconst [c] (MOVVconst [d]))
  4112  	// result: (MOVVconst [d>>uint64(c)])
  4113  	for {
  4114  		c := auxIntToInt64(v.AuxInt)
  4115  		if v_0.Op != OpLOONG64MOVVconst {
  4116  			break
  4117  		}
  4118  		d := auxIntToInt64(v_0.AuxInt)
  4119  		v.reset(OpLOONG64MOVVconst)
  4120  		v.AuxInt = int64ToAuxInt(d >> uint64(c))
  4121  		return true
  4122  	}
  4123  	return false
  4124  }
  4125  func rewriteValueLOONG64_OpLOONG64SRLV(v *Value) bool {
  4126  	v_1 := v.Args[1]
  4127  	v_0 := v.Args[0]
  4128  	// match: (SRLV _ (MOVVconst [c]))
  4129  	// cond: uint64(c)>=64
  4130  	// result: (MOVVconst [0])
  4131  	for {
  4132  		if v_1.Op != OpLOONG64MOVVconst {
  4133  			break
  4134  		}
  4135  		c := auxIntToInt64(v_1.AuxInt)
  4136  		if !(uint64(c) >= 64) {
  4137  			break
  4138  		}
  4139  		v.reset(OpLOONG64MOVVconst)
  4140  		v.AuxInt = int64ToAuxInt(0)
  4141  		return true
  4142  	}
  4143  	// match: (SRLV x (MOVVconst [c]))
  4144  	// result: (SRLVconst x [c])
  4145  	for {
  4146  		x := v_0
  4147  		if v_1.Op != OpLOONG64MOVVconst {
  4148  			break
  4149  		}
  4150  		c := auxIntToInt64(v_1.AuxInt)
  4151  		v.reset(OpLOONG64SRLVconst)
  4152  		v.AuxInt = int64ToAuxInt(c)
  4153  		v.AddArg(x)
  4154  		return true
  4155  	}
  4156  	return false
  4157  }
  4158  func rewriteValueLOONG64_OpLOONG64SRLVconst(v *Value) bool {
  4159  	v_0 := v.Args[0]
  4160  	// match: (SRLVconst [c] (MOVVconst [d]))
  4161  	// result: (MOVVconst [int64(uint64(d)>>uint64(c))])
  4162  	for {
  4163  		c := auxIntToInt64(v.AuxInt)
  4164  		if v_0.Op != OpLOONG64MOVVconst {
  4165  			break
  4166  		}
  4167  		d := auxIntToInt64(v_0.AuxInt)
  4168  		v.reset(OpLOONG64MOVVconst)
  4169  		v.AuxInt = int64ToAuxInt(int64(uint64(d) >> uint64(c)))
  4170  		return true
  4171  	}
  4172  	return false
  4173  }
  4174  func rewriteValueLOONG64_OpLOONG64SUBV(v *Value) bool {
  4175  	v_1 := v.Args[1]
  4176  	v_0 := v.Args[0]
  4177  	// match: (SUBV x (MOVVconst [c]))
  4178  	// cond: is32Bit(c)
  4179  	// result: (SUBVconst [c] x)
  4180  	for {
  4181  		x := v_0
  4182  		if v_1.Op != OpLOONG64MOVVconst {
  4183  			break
  4184  		}
  4185  		c := auxIntToInt64(v_1.AuxInt)
  4186  		if !(is32Bit(c)) {
  4187  			break
  4188  		}
  4189  		v.reset(OpLOONG64SUBVconst)
  4190  		v.AuxInt = int64ToAuxInt(c)
  4191  		v.AddArg(x)
  4192  		return true
  4193  	}
  4194  	// match: (SUBV x x)
  4195  	// result: (MOVVconst [0])
  4196  	for {
  4197  		x := v_0
  4198  		if x != v_1 {
  4199  			break
  4200  		}
  4201  		v.reset(OpLOONG64MOVVconst)
  4202  		v.AuxInt = int64ToAuxInt(0)
  4203  		return true
  4204  	}
  4205  	// match: (SUBV (MOVVconst [0]) x)
  4206  	// result: (NEGV x)
  4207  	for {
  4208  		if v_0.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0.AuxInt) != 0 {
  4209  			break
  4210  		}
  4211  		x := v_1
  4212  		v.reset(OpLOONG64NEGV)
  4213  		v.AddArg(x)
  4214  		return true
  4215  	}
  4216  	return false
  4217  }
  4218  func rewriteValueLOONG64_OpLOONG64SUBVconst(v *Value) bool {
  4219  	v_0 := v.Args[0]
  4220  	// match: (SUBVconst [0] x)
  4221  	// result: x
  4222  	for {
  4223  		if auxIntToInt64(v.AuxInt) != 0 {
  4224  			break
  4225  		}
  4226  		x := v_0
  4227  		v.copyOf(x)
  4228  		return true
  4229  	}
  4230  	// match: (SUBVconst [c] (MOVVconst [d]))
  4231  	// result: (MOVVconst [d-c])
  4232  	for {
  4233  		c := auxIntToInt64(v.AuxInt)
  4234  		if v_0.Op != OpLOONG64MOVVconst {
  4235  			break
  4236  		}
  4237  		d := auxIntToInt64(v_0.AuxInt)
  4238  		v.reset(OpLOONG64MOVVconst)
  4239  		v.AuxInt = int64ToAuxInt(d - c)
  4240  		return true
  4241  	}
  4242  	// match: (SUBVconst [c] (SUBVconst [d] x))
  4243  	// cond: is32Bit(-c-d)
  4244  	// result: (ADDVconst [-c-d] x)
  4245  	for {
  4246  		c := auxIntToInt64(v.AuxInt)
  4247  		if v_0.Op != OpLOONG64SUBVconst {
  4248  			break
  4249  		}
  4250  		d := auxIntToInt64(v_0.AuxInt)
  4251  		x := v_0.Args[0]
  4252  		if !(is32Bit(-c - d)) {
  4253  			break
  4254  		}
  4255  		v.reset(OpLOONG64ADDVconst)
  4256  		v.AuxInt = int64ToAuxInt(-c - d)
  4257  		v.AddArg(x)
  4258  		return true
  4259  	}
  4260  	// match: (SUBVconst [c] (ADDVconst [d] x))
  4261  	// cond: is32Bit(-c+d)
  4262  	// result: (ADDVconst [-c+d] x)
  4263  	for {
  4264  		c := auxIntToInt64(v.AuxInt)
  4265  		if v_0.Op != OpLOONG64ADDVconst {
  4266  			break
  4267  		}
  4268  		d := auxIntToInt64(v_0.AuxInt)
  4269  		x := v_0.Args[0]
  4270  		if !(is32Bit(-c + d)) {
  4271  			break
  4272  		}
  4273  		v.reset(OpLOONG64ADDVconst)
  4274  		v.AuxInt = int64ToAuxInt(-c + d)
  4275  		v.AddArg(x)
  4276  		return true
  4277  	}
  4278  	return false
  4279  }
  4280  func rewriteValueLOONG64_OpLOONG64XOR(v *Value) bool {
  4281  	v_1 := v.Args[1]
  4282  	v_0 := v.Args[0]
  4283  	// match: (XOR x (MOVVconst [c]))
  4284  	// cond: is32Bit(c)
  4285  	// result: (XORconst [c] x)
  4286  	for {
  4287  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4288  			x := v_0
  4289  			if v_1.Op != OpLOONG64MOVVconst {
  4290  				continue
  4291  			}
  4292  			c := auxIntToInt64(v_1.AuxInt)
  4293  			if !(is32Bit(c)) {
  4294  				continue
  4295  			}
  4296  			v.reset(OpLOONG64XORconst)
  4297  			v.AuxInt = int64ToAuxInt(c)
  4298  			v.AddArg(x)
  4299  			return true
  4300  		}
  4301  		break
  4302  	}
  4303  	// match: (XOR x x)
  4304  	// result: (MOVVconst [0])
  4305  	for {
  4306  		x := v_0
  4307  		if x != v_1 {
  4308  			break
  4309  		}
  4310  		v.reset(OpLOONG64MOVVconst)
  4311  		v.AuxInt = int64ToAuxInt(0)
  4312  		return true
  4313  	}
  4314  	return false
  4315  }
  4316  func rewriteValueLOONG64_OpLOONG64XORconst(v *Value) bool {
  4317  	v_0 := v.Args[0]
  4318  	// match: (XORconst [0] x)
  4319  	// result: x
  4320  	for {
  4321  		if auxIntToInt64(v.AuxInt) != 0 {
  4322  			break
  4323  		}
  4324  		x := v_0
  4325  		v.copyOf(x)
  4326  		return true
  4327  	}
  4328  	// match: (XORconst [-1] x)
  4329  	// result: (NORconst [0] x)
  4330  	for {
  4331  		if auxIntToInt64(v.AuxInt) != -1 {
  4332  			break
  4333  		}
  4334  		x := v_0
  4335  		v.reset(OpLOONG64NORconst)
  4336  		v.AuxInt = int64ToAuxInt(0)
  4337  		v.AddArg(x)
  4338  		return true
  4339  	}
  4340  	// match: (XORconst [c] (MOVVconst [d]))
  4341  	// result: (MOVVconst [c^d])
  4342  	for {
  4343  		c := auxIntToInt64(v.AuxInt)
  4344  		if v_0.Op != OpLOONG64MOVVconst {
  4345  			break
  4346  		}
  4347  		d := auxIntToInt64(v_0.AuxInt)
  4348  		v.reset(OpLOONG64MOVVconst)
  4349  		v.AuxInt = int64ToAuxInt(c ^ d)
  4350  		return true
  4351  	}
  4352  	// match: (XORconst [c] (XORconst [d] x))
  4353  	// cond: is32Bit(c^d)
  4354  	// result: (XORconst [c^d] x)
  4355  	for {
  4356  		c := auxIntToInt64(v.AuxInt)
  4357  		if v_0.Op != OpLOONG64XORconst {
  4358  			break
  4359  		}
  4360  		d := auxIntToInt64(v_0.AuxInt)
  4361  		x := v_0.Args[0]
  4362  		if !(is32Bit(c ^ d)) {
  4363  			break
  4364  		}
  4365  		v.reset(OpLOONG64XORconst)
  4366  		v.AuxInt = int64ToAuxInt(c ^ d)
  4367  		v.AddArg(x)
  4368  		return true
  4369  	}
  4370  	return false
  4371  }
  4372  func rewriteValueLOONG64_OpLeq16(v *Value) bool {
  4373  	v_1 := v.Args[1]
  4374  	v_0 := v.Args[0]
  4375  	b := v.Block
  4376  	typ := &b.Func.Config.Types
  4377  	// match: (Leq16 x y)
  4378  	// result: (XOR (MOVVconst [1]) (SGT (SignExt16to64 x) (SignExt16to64 y)))
  4379  	for {
  4380  		x := v_0
  4381  		y := v_1
  4382  		v.reset(OpLOONG64XOR)
  4383  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  4384  		v0.AuxInt = int64ToAuxInt(1)
  4385  		v1 := b.NewValue0(v.Pos, OpLOONG64SGT, typ.Bool)
  4386  		v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  4387  		v2.AddArg(x)
  4388  		v3 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  4389  		v3.AddArg(y)
  4390  		v1.AddArg2(v2, v3)
  4391  		v.AddArg2(v0, v1)
  4392  		return true
  4393  	}
  4394  }
  4395  func rewriteValueLOONG64_OpLeq16U(v *Value) bool {
  4396  	v_1 := v.Args[1]
  4397  	v_0 := v.Args[0]
  4398  	b := v.Block
  4399  	typ := &b.Func.Config.Types
  4400  	// match: (Leq16U x y)
  4401  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt16to64 x) (ZeroExt16to64 y)))
  4402  	for {
  4403  		x := v_0
  4404  		y := v_1
  4405  		v.reset(OpLOONG64XOR)
  4406  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  4407  		v0.AuxInt = int64ToAuxInt(1)
  4408  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  4409  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  4410  		v2.AddArg(x)
  4411  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  4412  		v3.AddArg(y)
  4413  		v1.AddArg2(v2, v3)
  4414  		v.AddArg2(v0, v1)
  4415  		return true
  4416  	}
  4417  }
  4418  func rewriteValueLOONG64_OpLeq32(v *Value) bool {
  4419  	v_1 := v.Args[1]
  4420  	v_0 := v.Args[0]
  4421  	b := v.Block
  4422  	typ := &b.Func.Config.Types
  4423  	// match: (Leq32 x y)
  4424  	// result: (XOR (MOVVconst [1]) (SGT (SignExt32to64 x) (SignExt32to64 y)))
  4425  	for {
  4426  		x := v_0
  4427  		y := v_1
  4428  		v.reset(OpLOONG64XOR)
  4429  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  4430  		v0.AuxInt = int64ToAuxInt(1)
  4431  		v1 := b.NewValue0(v.Pos, OpLOONG64SGT, typ.Bool)
  4432  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  4433  		v2.AddArg(x)
  4434  		v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  4435  		v3.AddArg(y)
  4436  		v1.AddArg2(v2, v3)
  4437  		v.AddArg2(v0, v1)
  4438  		return true
  4439  	}
  4440  }
  4441  func rewriteValueLOONG64_OpLeq32F(v *Value) bool {
  4442  	v_1 := v.Args[1]
  4443  	v_0 := v.Args[0]
  4444  	b := v.Block
  4445  	// match: (Leq32F x y)
  4446  	// result: (FPFlagTrue (CMPGEF y x))
  4447  	for {
  4448  		x := v_0
  4449  		y := v_1
  4450  		v.reset(OpLOONG64FPFlagTrue)
  4451  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPGEF, types.TypeFlags)
  4452  		v0.AddArg2(y, x)
  4453  		v.AddArg(v0)
  4454  		return true
  4455  	}
  4456  }
  4457  func rewriteValueLOONG64_OpLeq32U(v *Value) bool {
  4458  	v_1 := v.Args[1]
  4459  	v_0 := v.Args[0]
  4460  	b := v.Block
  4461  	typ := &b.Func.Config.Types
  4462  	// match: (Leq32U x y)
  4463  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt32to64 x) (ZeroExt32to64 y)))
  4464  	for {
  4465  		x := v_0
  4466  		y := v_1
  4467  		v.reset(OpLOONG64XOR)
  4468  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  4469  		v0.AuxInt = int64ToAuxInt(1)
  4470  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  4471  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  4472  		v2.AddArg(x)
  4473  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  4474  		v3.AddArg(y)
  4475  		v1.AddArg2(v2, v3)
  4476  		v.AddArg2(v0, v1)
  4477  		return true
  4478  	}
  4479  }
  4480  func rewriteValueLOONG64_OpLeq64(v *Value) bool {
  4481  	v_1 := v.Args[1]
  4482  	v_0 := v.Args[0]
  4483  	b := v.Block
  4484  	typ := &b.Func.Config.Types
  4485  	// match: (Leq64 x y)
  4486  	// result: (XOR (MOVVconst [1]) (SGT x y))
  4487  	for {
  4488  		x := v_0
  4489  		y := v_1
  4490  		v.reset(OpLOONG64XOR)
  4491  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  4492  		v0.AuxInt = int64ToAuxInt(1)
  4493  		v1 := b.NewValue0(v.Pos, OpLOONG64SGT, typ.Bool)
  4494  		v1.AddArg2(x, y)
  4495  		v.AddArg2(v0, v1)
  4496  		return true
  4497  	}
  4498  }
  4499  func rewriteValueLOONG64_OpLeq64F(v *Value) bool {
  4500  	v_1 := v.Args[1]
  4501  	v_0 := v.Args[0]
  4502  	b := v.Block
  4503  	// match: (Leq64F x y)
  4504  	// result: (FPFlagTrue (CMPGED y x))
  4505  	for {
  4506  		x := v_0
  4507  		y := v_1
  4508  		v.reset(OpLOONG64FPFlagTrue)
  4509  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPGED, types.TypeFlags)
  4510  		v0.AddArg2(y, x)
  4511  		v.AddArg(v0)
  4512  		return true
  4513  	}
  4514  }
  4515  func rewriteValueLOONG64_OpLeq64U(v *Value) bool {
  4516  	v_1 := v.Args[1]
  4517  	v_0 := v.Args[0]
  4518  	b := v.Block
  4519  	typ := &b.Func.Config.Types
  4520  	// match: (Leq64U x y)
  4521  	// result: (XOR (MOVVconst [1]) (SGTU x y))
  4522  	for {
  4523  		x := v_0
  4524  		y := v_1
  4525  		v.reset(OpLOONG64XOR)
  4526  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  4527  		v0.AuxInt = int64ToAuxInt(1)
  4528  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  4529  		v1.AddArg2(x, y)
  4530  		v.AddArg2(v0, v1)
  4531  		return true
  4532  	}
  4533  }
  4534  func rewriteValueLOONG64_OpLeq8(v *Value) bool {
  4535  	v_1 := v.Args[1]
  4536  	v_0 := v.Args[0]
  4537  	b := v.Block
  4538  	typ := &b.Func.Config.Types
  4539  	// match: (Leq8 x y)
  4540  	// result: (XOR (MOVVconst [1]) (SGT (SignExt8to64 x) (SignExt8to64 y)))
  4541  	for {
  4542  		x := v_0
  4543  		y := v_1
  4544  		v.reset(OpLOONG64XOR)
  4545  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  4546  		v0.AuxInt = int64ToAuxInt(1)
  4547  		v1 := b.NewValue0(v.Pos, OpLOONG64SGT, typ.Bool)
  4548  		v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  4549  		v2.AddArg(x)
  4550  		v3 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  4551  		v3.AddArg(y)
  4552  		v1.AddArg2(v2, v3)
  4553  		v.AddArg2(v0, v1)
  4554  		return true
  4555  	}
  4556  }
  4557  func rewriteValueLOONG64_OpLeq8U(v *Value) bool {
  4558  	v_1 := v.Args[1]
  4559  	v_0 := v.Args[0]
  4560  	b := v.Block
  4561  	typ := &b.Func.Config.Types
  4562  	// match: (Leq8U x y)
  4563  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt8to64 x) (ZeroExt8to64 y)))
  4564  	for {
  4565  		x := v_0
  4566  		y := v_1
  4567  		v.reset(OpLOONG64XOR)
  4568  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  4569  		v0.AuxInt = int64ToAuxInt(1)
  4570  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  4571  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  4572  		v2.AddArg(x)
  4573  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  4574  		v3.AddArg(y)
  4575  		v1.AddArg2(v2, v3)
  4576  		v.AddArg2(v0, v1)
  4577  		return true
  4578  	}
  4579  }
  4580  func rewriteValueLOONG64_OpLess16(v *Value) bool {
  4581  	v_1 := v.Args[1]
  4582  	v_0 := v.Args[0]
  4583  	b := v.Block
  4584  	typ := &b.Func.Config.Types
  4585  	// match: (Less16 x y)
  4586  	// result: (SGT (SignExt16to64 y) (SignExt16to64 x))
  4587  	for {
  4588  		x := v_0
  4589  		y := v_1
  4590  		v.reset(OpLOONG64SGT)
  4591  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  4592  		v0.AddArg(y)
  4593  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  4594  		v1.AddArg(x)
  4595  		v.AddArg2(v0, v1)
  4596  		return true
  4597  	}
  4598  }
  4599  func rewriteValueLOONG64_OpLess16U(v *Value) bool {
  4600  	v_1 := v.Args[1]
  4601  	v_0 := v.Args[0]
  4602  	b := v.Block
  4603  	typ := &b.Func.Config.Types
  4604  	// match: (Less16U x y)
  4605  	// result: (SGTU (ZeroExt16to64 y) (ZeroExt16to64 x))
  4606  	for {
  4607  		x := v_0
  4608  		y := v_1
  4609  		v.reset(OpLOONG64SGTU)
  4610  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  4611  		v0.AddArg(y)
  4612  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  4613  		v1.AddArg(x)
  4614  		v.AddArg2(v0, v1)
  4615  		return true
  4616  	}
  4617  }
  4618  func rewriteValueLOONG64_OpLess32(v *Value) bool {
  4619  	v_1 := v.Args[1]
  4620  	v_0 := v.Args[0]
  4621  	b := v.Block
  4622  	typ := &b.Func.Config.Types
  4623  	// match: (Less32 x y)
  4624  	// result: (SGT (SignExt32to64 y) (SignExt32to64 x))
  4625  	for {
  4626  		x := v_0
  4627  		y := v_1
  4628  		v.reset(OpLOONG64SGT)
  4629  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  4630  		v0.AddArg(y)
  4631  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  4632  		v1.AddArg(x)
  4633  		v.AddArg2(v0, v1)
  4634  		return true
  4635  	}
  4636  }
  4637  func rewriteValueLOONG64_OpLess32F(v *Value) bool {
  4638  	v_1 := v.Args[1]
  4639  	v_0 := v.Args[0]
  4640  	b := v.Block
  4641  	// match: (Less32F x y)
  4642  	// result: (FPFlagTrue (CMPGTF y x))
  4643  	for {
  4644  		x := v_0
  4645  		y := v_1
  4646  		v.reset(OpLOONG64FPFlagTrue)
  4647  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPGTF, types.TypeFlags)
  4648  		v0.AddArg2(y, x)
  4649  		v.AddArg(v0)
  4650  		return true
  4651  	}
  4652  }
  4653  func rewriteValueLOONG64_OpLess32U(v *Value) bool {
  4654  	v_1 := v.Args[1]
  4655  	v_0 := v.Args[0]
  4656  	b := v.Block
  4657  	typ := &b.Func.Config.Types
  4658  	// match: (Less32U x y)
  4659  	// result: (SGTU (ZeroExt32to64 y) (ZeroExt32to64 x))
  4660  	for {
  4661  		x := v_0
  4662  		y := v_1
  4663  		v.reset(OpLOONG64SGTU)
  4664  		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  4665  		v0.AddArg(y)
  4666  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  4667  		v1.AddArg(x)
  4668  		v.AddArg2(v0, v1)
  4669  		return true
  4670  	}
  4671  }
  4672  func rewriteValueLOONG64_OpLess64(v *Value) bool {
  4673  	v_1 := v.Args[1]
  4674  	v_0 := v.Args[0]
  4675  	// match: (Less64 x y)
  4676  	// result: (SGT y x)
  4677  	for {
  4678  		x := v_0
  4679  		y := v_1
  4680  		v.reset(OpLOONG64SGT)
  4681  		v.AddArg2(y, x)
  4682  		return true
  4683  	}
  4684  }
  4685  func rewriteValueLOONG64_OpLess64F(v *Value) bool {
  4686  	v_1 := v.Args[1]
  4687  	v_0 := v.Args[0]
  4688  	b := v.Block
  4689  	// match: (Less64F x y)
  4690  	// result: (FPFlagTrue (CMPGTD y x))
  4691  	for {
  4692  		x := v_0
  4693  		y := v_1
  4694  		v.reset(OpLOONG64FPFlagTrue)
  4695  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPGTD, types.TypeFlags)
  4696  		v0.AddArg2(y, x)
  4697  		v.AddArg(v0)
  4698  		return true
  4699  	}
  4700  }
  4701  func rewriteValueLOONG64_OpLess64U(v *Value) bool {
  4702  	v_1 := v.Args[1]
  4703  	v_0 := v.Args[0]
  4704  	// match: (Less64U x y)
  4705  	// result: (SGTU y x)
  4706  	for {
  4707  		x := v_0
  4708  		y := v_1
  4709  		v.reset(OpLOONG64SGTU)
  4710  		v.AddArg2(y, x)
  4711  		return true
  4712  	}
  4713  }
  4714  func rewriteValueLOONG64_OpLess8(v *Value) bool {
  4715  	v_1 := v.Args[1]
  4716  	v_0 := v.Args[0]
  4717  	b := v.Block
  4718  	typ := &b.Func.Config.Types
  4719  	// match: (Less8 x y)
  4720  	// result: (SGT (SignExt8to64 y) (SignExt8to64 x))
  4721  	for {
  4722  		x := v_0
  4723  		y := v_1
  4724  		v.reset(OpLOONG64SGT)
  4725  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  4726  		v0.AddArg(y)
  4727  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  4728  		v1.AddArg(x)
  4729  		v.AddArg2(v0, v1)
  4730  		return true
  4731  	}
  4732  }
  4733  func rewriteValueLOONG64_OpLess8U(v *Value) bool {
  4734  	v_1 := v.Args[1]
  4735  	v_0 := v.Args[0]
  4736  	b := v.Block
  4737  	typ := &b.Func.Config.Types
  4738  	// match: (Less8U x y)
  4739  	// result: (SGTU (ZeroExt8to64 y) (ZeroExt8to64 x))
  4740  	for {
  4741  		x := v_0
  4742  		y := v_1
  4743  		v.reset(OpLOONG64SGTU)
  4744  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  4745  		v0.AddArg(y)
  4746  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  4747  		v1.AddArg(x)
  4748  		v.AddArg2(v0, v1)
  4749  		return true
  4750  	}
  4751  }
  4752  func rewriteValueLOONG64_OpLoad(v *Value) bool {
  4753  	v_1 := v.Args[1]
  4754  	v_0 := v.Args[0]
  4755  	// match: (Load <t> ptr mem)
  4756  	// cond: t.IsBoolean()
  4757  	// result: (MOVBUload ptr mem)
  4758  	for {
  4759  		t := v.Type
  4760  		ptr := v_0
  4761  		mem := v_1
  4762  		if !(t.IsBoolean()) {
  4763  			break
  4764  		}
  4765  		v.reset(OpLOONG64MOVBUload)
  4766  		v.AddArg2(ptr, mem)
  4767  		return true
  4768  	}
  4769  	// match: (Load <t> ptr mem)
  4770  	// cond: (is8BitInt(t) && t.IsSigned())
  4771  	// result: (MOVBload ptr mem)
  4772  	for {
  4773  		t := v.Type
  4774  		ptr := v_0
  4775  		mem := v_1
  4776  		if !(is8BitInt(t) && t.IsSigned()) {
  4777  			break
  4778  		}
  4779  		v.reset(OpLOONG64MOVBload)
  4780  		v.AddArg2(ptr, mem)
  4781  		return true
  4782  	}
  4783  	// match: (Load <t> ptr mem)
  4784  	// cond: (is8BitInt(t) && !t.IsSigned())
  4785  	// result: (MOVBUload ptr mem)
  4786  	for {
  4787  		t := v.Type
  4788  		ptr := v_0
  4789  		mem := v_1
  4790  		if !(is8BitInt(t) && !t.IsSigned()) {
  4791  			break
  4792  		}
  4793  		v.reset(OpLOONG64MOVBUload)
  4794  		v.AddArg2(ptr, mem)
  4795  		return true
  4796  	}
  4797  	// match: (Load <t> ptr mem)
  4798  	// cond: (is16BitInt(t) && t.IsSigned())
  4799  	// result: (MOVHload ptr mem)
  4800  	for {
  4801  		t := v.Type
  4802  		ptr := v_0
  4803  		mem := v_1
  4804  		if !(is16BitInt(t) && t.IsSigned()) {
  4805  			break
  4806  		}
  4807  		v.reset(OpLOONG64MOVHload)
  4808  		v.AddArg2(ptr, mem)
  4809  		return true
  4810  	}
  4811  	// match: (Load <t> ptr mem)
  4812  	// cond: (is16BitInt(t) && !t.IsSigned())
  4813  	// result: (MOVHUload ptr mem)
  4814  	for {
  4815  		t := v.Type
  4816  		ptr := v_0
  4817  		mem := v_1
  4818  		if !(is16BitInt(t) && !t.IsSigned()) {
  4819  			break
  4820  		}
  4821  		v.reset(OpLOONG64MOVHUload)
  4822  		v.AddArg2(ptr, mem)
  4823  		return true
  4824  	}
  4825  	// match: (Load <t> ptr mem)
  4826  	// cond: (is32BitInt(t) && t.IsSigned())
  4827  	// result: (MOVWload ptr mem)
  4828  	for {
  4829  		t := v.Type
  4830  		ptr := v_0
  4831  		mem := v_1
  4832  		if !(is32BitInt(t) && t.IsSigned()) {
  4833  			break
  4834  		}
  4835  		v.reset(OpLOONG64MOVWload)
  4836  		v.AddArg2(ptr, mem)
  4837  		return true
  4838  	}
  4839  	// match: (Load <t> ptr mem)
  4840  	// cond: (is32BitInt(t) && !t.IsSigned())
  4841  	// result: (MOVWUload ptr mem)
  4842  	for {
  4843  		t := v.Type
  4844  		ptr := v_0
  4845  		mem := v_1
  4846  		if !(is32BitInt(t) && !t.IsSigned()) {
  4847  			break
  4848  		}
  4849  		v.reset(OpLOONG64MOVWUload)
  4850  		v.AddArg2(ptr, mem)
  4851  		return true
  4852  	}
  4853  	// match: (Load <t> ptr mem)
  4854  	// cond: (is64BitInt(t) || isPtr(t))
  4855  	// result: (MOVVload ptr mem)
  4856  	for {
  4857  		t := v.Type
  4858  		ptr := v_0
  4859  		mem := v_1
  4860  		if !(is64BitInt(t) || isPtr(t)) {
  4861  			break
  4862  		}
  4863  		v.reset(OpLOONG64MOVVload)
  4864  		v.AddArg2(ptr, mem)
  4865  		return true
  4866  	}
  4867  	// match: (Load <t> ptr mem)
  4868  	// cond: is32BitFloat(t)
  4869  	// result: (MOVFload ptr mem)
  4870  	for {
  4871  		t := v.Type
  4872  		ptr := v_0
  4873  		mem := v_1
  4874  		if !(is32BitFloat(t)) {
  4875  			break
  4876  		}
  4877  		v.reset(OpLOONG64MOVFload)
  4878  		v.AddArg2(ptr, mem)
  4879  		return true
  4880  	}
  4881  	// match: (Load <t> ptr mem)
  4882  	// cond: is64BitFloat(t)
  4883  	// result: (MOVDload ptr mem)
  4884  	for {
  4885  		t := v.Type
  4886  		ptr := v_0
  4887  		mem := v_1
  4888  		if !(is64BitFloat(t)) {
  4889  			break
  4890  		}
  4891  		v.reset(OpLOONG64MOVDload)
  4892  		v.AddArg2(ptr, mem)
  4893  		return true
  4894  	}
  4895  	return false
  4896  }
  4897  func rewriteValueLOONG64_OpLocalAddr(v *Value) bool {
  4898  	v_1 := v.Args[1]
  4899  	v_0 := v.Args[0]
  4900  	b := v.Block
  4901  	typ := &b.Func.Config.Types
  4902  	// match: (LocalAddr <t> {sym} base mem)
  4903  	// cond: t.Elem().HasPointers()
  4904  	// result: (MOVVaddr {sym} (SPanchored base mem))
  4905  	for {
  4906  		t := v.Type
  4907  		sym := auxToSym(v.Aux)
  4908  		base := v_0
  4909  		mem := v_1
  4910  		if !(t.Elem().HasPointers()) {
  4911  			break
  4912  		}
  4913  		v.reset(OpLOONG64MOVVaddr)
  4914  		v.Aux = symToAux(sym)
  4915  		v0 := b.NewValue0(v.Pos, OpSPanchored, typ.Uintptr)
  4916  		v0.AddArg2(base, mem)
  4917  		v.AddArg(v0)
  4918  		return true
  4919  	}
  4920  	// match: (LocalAddr <t> {sym} base _)
  4921  	// cond: !t.Elem().HasPointers()
  4922  	// result: (MOVVaddr {sym} base)
  4923  	for {
  4924  		t := v.Type
  4925  		sym := auxToSym(v.Aux)
  4926  		base := v_0
  4927  		if !(!t.Elem().HasPointers()) {
  4928  			break
  4929  		}
  4930  		v.reset(OpLOONG64MOVVaddr)
  4931  		v.Aux = symToAux(sym)
  4932  		v.AddArg(base)
  4933  		return true
  4934  	}
  4935  	return false
  4936  }
  4937  func rewriteValueLOONG64_OpLsh16x16(v *Value) bool {
  4938  	v_1 := v.Args[1]
  4939  	v_0 := v.Args[0]
  4940  	b := v.Block
  4941  	typ := &b.Func.Config.Types
  4942  	// match: (Lsh16x16 <t> x y)
  4943  	// result: (MASKEQZ (SLLV <t> x (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y)))
  4944  	for {
  4945  		t := v.Type
  4946  		x := v_0
  4947  		y := v_1
  4948  		v.reset(OpLOONG64MASKEQZ)
  4949  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  4950  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  4951  		v1.AddArg(y)
  4952  		v0.AddArg2(x, v1)
  4953  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  4954  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  4955  		v3.AuxInt = int64ToAuxInt(64)
  4956  		v2.AddArg2(v3, v1)
  4957  		v.AddArg2(v0, v2)
  4958  		return true
  4959  	}
  4960  }
  4961  func rewriteValueLOONG64_OpLsh16x32(v *Value) bool {
  4962  	v_1 := v.Args[1]
  4963  	v_0 := v.Args[0]
  4964  	b := v.Block
  4965  	typ := &b.Func.Config.Types
  4966  	// match: (Lsh16x32 <t> x y)
  4967  	// result: (MASKEQZ (SLLV <t> x (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y)))
  4968  	for {
  4969  		t := v.Type
  4970  		x := v_0
  4971  		y := v_1
  4972  		v.reset(OpLOONG64MASKEQZ)
  4973  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  4974  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  4975  		v1.AddArg(y)
  4976  		v0.AddArg2(x, v1)
  4977  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  4978  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  4979  		v3.AuxInt = int64ToAuxInt(64)
  4980  		v2.AddArg2(v3, v1)
  4981  		v.AddArg2(v0, v2)
  4982  		return true
  4983  	}
  4984  }
  4985  func rewriteValueLOONG64_OpLsh16x64(v *Value) bool {
  4986  	v_1 := v.Args[1]
  4987  	v_0 := v.Args[0]
  4988  	b := v.Block
  4989  	typ := &b.Func.Config.Types
  4990  	// match: (Lsh16x64 <t> x y)
  4991  	// result: (MASKEQZ (SLLV <t> x y) (SGTU (MOVVconst <typ.UInt64> [64]) y))
  4992  	for {
  4993  		t := v.Type
  4994  		x := v_0
  4995  		y := v_1
  4996  		v.reset(OpLOONG64MASKEQZ)
  4997  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  4998  		v0.AddArg2(x, y)
  4999  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  5000  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  5001  		v2.AuxInt = int64ToAuxInt(64)
  5002  		v1.AddArg2(v2, y)
  5003  		v.AddArg2(v0, v1)
  5004  		return true
  5005  	}
  5006  }
  5007  func rewriteValueLOONG64_OpLsh16x8(v *Value) bool {
  5008  	v_1 := v.Args[1]
  5009  	v_0 := v.Args[0]
  5010  	b := v.Block
  5011  	typ := &b.Func.Config.Types
  5012  	// match: (Lsh16x8 <t> x y)
  5013  	// result: (MASKEQZ (SLLV <t> x (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y)))
  5014  	for {
  5015  		t := v.Type
  5016  		x := v_0
  5017  		y := v_1
  5018  		v.reset(OpLOONG64MASKEQZ)
  5019  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  5020  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  5021  		v1.AddArg(y)
  5022  		v0.AddArg2(x, v1)
  5023  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  5024  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  5025  		v3.AuxInt = int64ToAuxInt(64)
  5026  		v2.AddArg2(v3, v1)
  5027  		v.AddArg2(v0, v2)
  5028  		return true
  5029  	}
  5030  }
  5031  func rewriteValueLOONG64_OpLsh32x16(v *Value) bool {
  5032  	v_1 := v.Args[1]
  5033  	v_0 := v.Args[0]
  5034  	b := v.Block
  5035  	typ := &b.Func.Config.Types
  5036  	// match: (Lsh32x16 <t> x y)
  5037  	// result: (MASKEQZ (SLLV <t> x (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y)))
  5038  	for {
  5039  		t := v.Type
  5040  		x := v_0
  5041  		y := v_1
  5042  		v.reset(OpLOONG64MASKEQZ)
  5043  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  5044  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  5045  		v1.AddArg(y)
  5046  		v0.AddArg2(x, v1)
  5047  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  5048  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  5049  		v3.AuxInt = int64ToAuxInt(64)
  5050  		v2.AddArg2(v3, v1)
  5051  		v.AddArg2(v0, v2)
  5052  		return true
  5053  	}
  5054  }
  5055  func rewriteValueLOONG64_OpLsh32x32(v *Value) bool {
  5056  	v_1 := v.Args[1]
  5057  	v_0 := v.Args[0]
  5058  	b := v.Block
  5059  	typ := &b.Func.Config.Types
  5060  	// match: (Lsh32x32 <t> x y)
  5061  	// result: (MASKEQZ (SLLV <t> x (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y)))
  5062  	for {
  5063  		t := v.Type
  5064  		x := v_0
  5065  		y := v_1
  5066  		v.reset(OpLOONG64MASKEQZ)
  5067  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  5068  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  5069  		v1.AddArg(y)
  5070  		v0.AddArg2(x, v1)
  5071  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  5072  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  5073  		v3.AuxInt = int64ToAuxInt(64)
  5074  		v2.AddArg2(v3, v1)
  5075  		v.AddArg2(v0, v2)
  5076  		return true
  5077  	}
  5078  }
  5079  func rewriteValueLOONG64_OpLsh32x64(v *Value) bool {
  5080  	v_1 := v.Args[1]
  5081  	v_0 := v.Args[0]
  5082  	b := v.Block
  5083  	typ := &b.Func.Config.Types
  5084  	// match: (Lsh32x64 <t> x y)
  5085  	// result: (MASKEQZ (SLLV <t> x y) (SGTU (MOVVconst <typ.UInt64> [64]) y))
  5086  	for {
  5087  		t := v.Type
  5088  		x := v_0
  5089  		y := v_1
  5090  		v.reset(OpLOONG64MASKEQZ)
  5091  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  5092  		v0.AddArg2(x, y)
  5093  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  5094  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  5095  		v2.AuxInt = int64ToAuxInt(64)
  5096  		v1.AddArg2(v2, y)
  5097  		v.AddArg2(v0, v1)
  5098  		return true
  5099  	}
  5100  }
  5101  func rewriteValueLOONG64_OpLsh32x8(v *Value) bool {
  5102  	v_1 := v.Args[1]
  5103  	v_0 := v.Args[0]
  5104  	b := v.Block
  5105  	typ := &b.Func.Config.Types
  5106  	// match: (Lsh32x8 <t> x y)
  5107  	// result: (MASKEQZ (SLLV <t> x (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y)))
  5108  	for {
  5109  		t := v.Type
  5110  		x := v_0
  5111  		y := v_1
  5112  		v.reset(OpLOONG64MASKEQZ)
  5113  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  5114  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  5115  		v1.AddArg(y)
  5116  		v0.AddArg2(x, v1)
  5117  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  5118  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  5119  		v3.AuxInt = int64ToAuxInt(64)
  5120  		v2.AddArg2(v3, v1)
  5121  		v.AddArg2(v0, v2)
  5122  		return true
  5123  	}
  5124  }
  5125  func rewriteValueLOONG64_OpLsh64x16(v *Value) bool {
  5126  	v_1 := v.Args[1]
  5127  	v_0 := v.Args[0]
  5128  	b := v.Block
  5129  	typ := &b.Func.Config.Types
  5130  	// match: (Lsh64x16 <t> x y)
  5131  	// result: (MASKEQZ (SLLV <t> x (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y)))
  5132  	for {
  5133  		t := v.Type
  5134  		x := v_0
  5135  		y := v_1
  5136  		v.reset(OpLOONG64MASKEQZ)
  5137  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  5138  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  5139  		v1.AddArg(y)
  5140  		v0.AddArg2(x, v1)
  5141  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  5142  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  5143  		v3.AuxInt = int64ToAuxInt(64)
  5144  		v2.AddArg2(v3, v1)
  5145  		v.AddArg2(v0, v2)
  5146  		return true
  5147  	}
  5148  }
  5149  func rewriteValueLOONG64_OpLsh64x32(v *Value) bool {
  5150  	v_1 := v.Args[1]
  5151  	v_0 := v.Args[0]
  5152  	b := v.Block
  5153  	typ := &b.Func.Config.Types
  5154  	// match: (Lsh64x32 <t> x y)
  5155  	// result: (MASKEQZ (SLLV <t> x (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y)))
  5156  	for {
  5157  		t := v.Type
  5158  		x := v_0
  5159  		y := v_1
  5160  		v.reset(OpLOONG64MASKEQZ)
  5161  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  5162  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  5163  		v1.AddArg(y)
  5164  		v0.AddArg2(x, v1)
  5165  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  5166  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  5167  		v3.AuxInt = int64ToAuxInt(64)
  5168  		v2.AddArg2(v3, v1)
  5169  		v.AddArg2(v0, v2)
  5170  		return true
  5171  	}
  5172  }
  5173  func rewriteValueLOONG64_OpLsh64x64(v *Value) bool {
  5174  	v_1 := v.Args[1]
  5175  	v_0 := v.Args[0]
  5176  	b := v.Block
  5177  	typ := &b.Func.Config.Types
  5178  	// match: (Lsh64x64 <t> x y)
  5179  	// result: (MASKEQZ (SLLV <t> x y) (SGTU (MOVVconst <typ.UInt64> [64]) y))
  5180  	for {
  5181  		t := v.Type
  5182  		x := v_0
  5183  		y := v_1
  5184  		v.reset(OpLOONG64MASKEQZ)
  5185  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  5186  		v0.AddArg2(x, y)
  5187  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  5188  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  5189  		v2.AuxInt = int64ToAuxInt(64)
  5190  		v1.AddArg2(v2, y)
  5191  		v.AddArg2(v0, v1)
  5192  		return true
  5193  	}
  5194  }
  5195  func rewriteValueLOONG64_OpLsh64x8(v *Value) bool {
  5196  	v_1 := v.Args[1]
  5197  	v_0 := v.Args[0]
  5198  	b := v.Block
  5199  	typ := &b.Func.Config.Types
  5200  	// match: (Lsh64x8 <t> x y)
  5201  	// result: (MASKEQZ (SLLV <t> x (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y)))
  5202  	for {
  5203  		t := v.Type
  5204  		x := v_0
  5205  		y := v_1
  5206  		v.reset(OpLOONG64MASKEQZ)
  5207  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  5208  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  5209  		v1.AddArg(y)
  5210  		v0.AddArg2(x, v1)
  5211  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  5212  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  5213  		v3.AuxInt = int64ToAuxInt(64)
  5214  		v2.AddArg2(v3, v1)
  5215  		v.AddArg2(v0, v2)
  5216  		return true
  5217  	}
  5218  }
  5219  func rewriteValueLOONG64_OpLsh8x16(v *Value) bool {
  5220  	v_1 := v.Args[1]
  5221  	v_0 := v.Args[0]
  5222  	b := v.Block
  5223  	typ := &b.Func.Config.Types
  5224  	// match: (Lsh8x16 <t> x y)
  5225  	// result: (MASKEQZ (SLLV <t> x (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y)))
  5226  	for {
  5227  		t := v.Type
  5228  		x := v_0
  5229  		y := v_1
  5230  		v.reset(OpLOONG64MASKEQZ)
  5231  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  5232  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  5233  		v1.AddArg(y)
  5234  		v0.AddArg2(x, v1)
  5235  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  5236  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  5237  		v3.AuxInt = int64ToAuxInt(64)
  5238  		v2.AddArg2(v3, v1)
  5239  		v.AddArg2(v0, v2)
  5240  		return true
  5241  	}
  5242  }
  5243  func rewriteValueLOONG64_OpLsh8x32(v *Value) bool {
  5244  	v_1 := v.Args[1]
  5245  	v_0 := v.Args[0]
  5246  	b := v.Block
  5247  	typ := &b.Func.Config.Types
  5248  	// match: (Lsh8x32 <t> x y)
  5249  	// result: (MASKEQZ (SLLV <t> x (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y)))
  5250  	for {
  5251  		t := v.Type
  5252  		x := v_0
  5253  		y := v_1
  5254  		v.reset(OpLOONG64MASKEQZ)
  5255  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  5256  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  5257  		v1.AddArg(y)
  5258  		v0.AddArg2(x, v1)
  5259  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  5260  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  5261  		v3.AuxInt = int64ToAuxInt(64)
  5262  		v2.AddArg2(v3, v1)
  5263  		v.AddArg2(v0, v2)
  5264  		return true
  5265  	}
  5266  }
  5267  func rewriteValueLOONG64_OpLsh8x64(v *Value) bool {
  5268  	v_1 := v.Args[1]
  5269  	v_0 := v.Args[0]
  5270  	b := v.Block
  5271  	typ := &b.Func.Config.Types
  5272  	// match: (Lsh8x64 <t> x y)
  5273  	// result: (MASKEQZ (SLLV <t> x y) (SGTU (MOVVconst <typ.UInt64> [64]) y))
  5274  	for {
  5275  		t := v.Type
  5276  		x := v_0
  5277  		y := v_1
  5278  		v.reset(OpLOONG64MASKEQZ)
  5279  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  5280  		v0.AddArg2(x, y)
  5281  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  5282  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  5283  		v2.AuxInt = int64ToAuxInt(64)
  5284  		v1.AddArg2(v2, y)
  5285  		v.AddArg2(v0, v1)
  5286  		return true
  5287  	}
  5288  }
  5289  func rewriteValueLOONG64_OpLsh8x8(v *Value) bool {
  5290  	v_1 := v.Args[1]
  5291  	v_0 := v.Args[0]
  5292  	b := v.Block
  5293  	typ := &b.Func.Config.Types
  5294  	// match: (Lsh8x8 <t> x y)
  5295  	// result: (MASKEQZ (SLLV <t> x (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y)))
  5296  	for {
  5297  		t := v.Type
  5298  		x := v_0
  5299  		y := v_1
  5300  		v.reset(OpLOONG64MASKEQZ)
  5301  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  5302  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  5303  		v1.AddArg(y)
  5304  		v0.AddArg2(x, v1)
  5305  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  5306  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  5307  		v3.AuxInt = int64ToAuxInt(64)
  5308  		v2.AddArg2(v3, v1)
  5309  		v.AddArg2(v0, v2)
  5310  		return true
  5311  	}
  5312  }
  5313  func rewriteValueLOONG64_OpMod16(v *Value) bool {
  5314  	v_1 := v.Args[1]
  5315  	v_0 := v.Args[0]
  5316  	b := v.Block
  5317  	typ := &b.Func.Config.Types
  5318  	// match: (Mod16 x y)
  5319  	// result: (REMV (SignExt16to64 x) (SignExt16to64 y))
  5320  	for {
  5321  		x := v_0
  5322  		y := v_1
  5323  		v.reset(OpLOONG64REMV)
  5324  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  5325  		v0.AddArg(x)
  5326  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  5327  		v1.AddArg(y)
  5328  		v.AddArg2(v0, v1)
  5329  		return true
  5330  	}
  5331  }
  5332  func rewriteValueLOONG64_OpMod16u(v *Value) bool {
  5333  	v_1 := v.Args[1]
  5334  	v_0 := v.Args[0]
  5335  	b := v.Block
  5336  	typ := &b.Func.Config.Types
  5337  	// match: (Mod16u x y)
  5338  	// result: (REMVU (ZeroExt16to64 x) (ZeroExt16to64 y))
  5339  	for {
  5340  		x := v_0
  5341  		y := v_1
  5342  		v.reset(OpLOONG64REMVU)
  5343  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  5344  		v0.AddArg(x)
  5345  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  5346  		v1.AddArg(y)
  5347  		v.AddArg2(v0, v1)
  5348  		return true
  5349  	}
  5350  }
  5351  func rewriteValueLOONG64_OpMod32(v *Value) bool {
  5352  	v_1 := v.Args[1]
  5353  	v_0 := v.Args[0]
  5354  	b := v.Block
  5355  	typ := &b.Func.Config.Types
  5356  	// match: (Mod32 x y)
  5357  	// result: (REMV (SignExt32to64 x) (SignExt32to64 y))
  5358  	for {
  5359  		x := v_0
  5360  		y := v_1
  5361  		v.reset(OpLOONG64REMV)
  5362  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  5363  		v0.AddArg(x)
  5364  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  5365  		v1.AddArg(y)
  5366  		v.AddArg2(v0, v1)
  5367  		return true
  5368  	}
  5369  }
  5370  func rewriteValueLOONG64_OpMod32u(v *Value) bool {
  5371  	v_1 := v.Args[1]
  5372  	v_0 := v.Args[0]
  5373  	b := v.Block
  5374  	typ := &b.Func.Config.Types
  5375  	// match: (Mod32u x y)
  5376  	// result: (REMVU (ZeroExt32to64 x) (ZeroExt32to64 y))
  5377  	for {
  5378  		x := v_0
  5379  		y := v_1
  5380  		v.reset(OpLOONG64REMVU)
  5381  		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  5382  		v0.AddArg(x)
  5383  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  5384  		v1.AddArg(y)
  5385  		v.AddArg2(v0, v1)
  5386  		return true
  5387  	}
  5388  }
  5389  func rewriteValueLOONG64_OpMod64(v *Value) bool {
  5390  	v_1 := v.Args[1]
  5391  	v_0 := v.Args[0]
  5392  	// match: (Mod64 x y)
  5393  	// result: (REMV x y)
  5394  	for {
  5395  		x := v_0
  5396  		y := v_1
  5397  		v.reset(OpLOONG64REMV)
  5398  		v.AddArg2(x, y)
  5399  		return true
  5400  	}
  5401  }
  5402  func rewriteValueLOONG64_OpMod8(v *Value) bool {
  5403  	v_1 := v.Args[1]
  5404  	v_0 := v.Args[0]
  5405  	b := v.Block
  5406  	typ := &b.Func.Config.Types
  5407  	// match: (Mod8 x y)
  5408  	// result: (REMV (SignExt8to64 x) (SignExt8to64 y))
  5409  	for {
  5410  		x := v_0
  5411  		y := v_1
  5412  		v.reset(OpLOONG64REMV)
  5413  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  5414  		v0.AddArg(x)
  5415  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  5416  		v1.AddArg(y)
  5417  		v.AddArg2(v0, v1)
  5418  		return true
  5419  	}
  5420  }
  5421  func rewriteValueLOONG64_OpMod8u(v *Value) bool {
  5422  	v_1 := v.Args[1]
  5423  	v_0 := v.Args[0]
  5424  	b := v.Block
  5425  	typ := &b.Func.Config.Types
  5426  	// match: (Mod8u x y)
  5427  	// result: (REMVU (ZeroExt8to64 x) (ZeroExt8to64 y))
  5428  	for {
  5429  		x := v_0
  5430  		y := v_1
  5431  		v.reset(OpLOONG64REMVU)
  5432  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  5433  		v0.AddArg(x)
  5434  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  5435  		v1.AddArg(y)
  5436  		v.AddArg2(v0, v1)
  5437  		return true
  5438  	}
  5439  }
  5440  func rewriteValueLOONG64_OpMove(v *Value) bool {
  5441  	v_2 := v.Args[2]
  5442  	v_1 := v.Args[1]
  5443  	v_0 := v.Args[0]
  5444  	b := v.Block
  5445  	config := b.Func.Config
  5446  	typ := &b.Func.Config.Types
  5447  	// match: (Move [0] _ _ mem)
  5448  	// result: mem
  5449  	for {
  5450  		if auxIntToInt64(v.AuxInt) != 0 {
  5451  			break
  5452  		}
  5453  		mem := v_2
  5454  		v.copyOf(mem)
  5455  		return true
  5456  	}
  5457  	// match: (Move [1] dst src mem)
  5458  	// result: (MOVBstore dst (MOVBload src mem) mem)
  5459  	for {
  5460  		if auxIntToInt64(v.AuxInt) != 1 {
  5461  			break
  5462  		}
  5463  		dst := v_0
  5464  		src := v_1
  5465  		mem := v_2
  5466  		v.reset(OpLOONG64MOVBstore)
  5467  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVBload, typ.Int8)
  5468  		v0.AddArg2(src, mem)
  5469  		v.AddArg3(dst, v0, mem)
  5470  		return true
  5471  	}
  5472  	// match: (Move [2] {t} dst src mem)
  5473  	// cond: t.Alignment()%2 == 0
  5474  	// result: (MOVHstore dst (MOVHload src mem) mem)
  5475  	for {
  5476  		if auxIntToInt64(v.AuxInt) != 2 {
  5477  			break
  5478  		}
  5479  		t := auxToType(v.Aux)
  5480  		dst := v_0
  5481  		src := v_1
  5482  		mem := v_2
  5483  		if !(t.Alignment()%2 == 0) {
  5484  			break
  5485  		}
  5486  		v.reset(OpLOONG64MOVHstore)
  5487  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVHload, typ.Int16)
  5488  		v0.AddArg2(src, mem)
  5489  		v.AddArg3(dst, v0, mem)
  5490  		return true
  5491  	}
  5492  	// match: (Move [2] dst src mem)
  5493  	// result: (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))
  5494  	for {
  5495  		if auxIntToInt64(v.AuxInt) != 2 {
  5496  			break
  5497  		}
  5498  		dst := v_0
  5499  		src := v_1
  5500  		mem := v_2
  5501  		v.reset(OpLOONG64MOVBstore)
  5502  		v.AuxInt = int32ToAuxInt(1)
  5503  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVBload, typ.Int8)
  5504  		v0.AuxInt = int32ToAuxInt(1)
  5505  		v0.AddArg2(src, mem)
  5506  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVBstore, types.TypeMem)
  5507  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVBload, typ.Int8)
  5508  		v2.AddArg2(src, mem)
  5509  		v1.AddArg3(dst, v2, mem)
  5510  		v.AddArg3(dst, v0, v1)
  5511  		return true
  5512  	}
  5513  	// match: (Move [4] {t} dst src mem)
  5514  	// cond: t.Alignment()%4 == 0
  5515  	// result: (MOVWstore dst (MOVWload src mem) mem)
  5516  	for {
  5517  		if auxIntToInt64(v.AuxInt) != 4 {
  5518  			break
  5519  		}
  5520  		t := auxToType(v.Aux)
  5521  		dst := v_0
  5522  		src := v_1
  5523  		mem := v_2
  5524  		if !(t.Alignment()%4 == 0) {
  5525  			break
  5526  		}
  5527  		v.reset(OpLOONG64MOVWstore)
  5528  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVWload, typ.Int32)
  5529  		v0.AddArg2(src, mem)
  5530  		v.AddArg3(dst, v0, mem)
  5531  		return true
  5532  	}
  5533  	// match: (Move [4] {t} dst src mem)
  5534  	// cond: t.Alignment()%2 == 0
  5535  	// result: (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))
  5536  	for {
  5537  		if auxIntToInt64(v.AuxInt) != 4 {
  5538  			break
  5539  		}
  5540  		t := auxToType(v.Aux)
  5541  		dst := v_0
  5542  		src := v_1
  5543  		mem := v_2
  5544  		if !(t.Alignment()%2 == 0) {
  5545  			break
  5546  		}
  5547  		v.reset(OpLOONG64MOVHstore)
  5548  		v.AuxInt = int32ToAuxInt(2)
  5549  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVHload, typ.Int16)
  5550  		v0.AuxInt = int32ToAuxInt(2)
  5551  		v0.AddArg2(src, mem)
  5552  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVHstore, types.TypeMem)
  5553  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVHload, typ.Int16)
  5554  		v2.AddArg2(src, mem)
  5555  		v1.AddArg3(dst, v2, mem)
  5556  		v.AddArg3(dst, v0, v1)
  5557  		return true
  5558  	}
  5559  	// match: (Move [4] dst src mem)
  5560  	// result: (MOVBstore [3] dst (MOVBload [3] src mem) (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))))
  5561  	for {
  5562  		if auxIntToInt64(v.AuxInt) != 4 {
  5563  			break
  5564  		}
  5565  		dst := v_0
  5566  		src := v_1
  5567  		mem := v_2
  5568  		v.reset(OpLOONG64MOVBstore)
  5569  		v.AuxInt = int32ToAuxInt(3)
  5570  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVBload, typ.Int8)
  5571  		v0.AuxInt = int32ToAuxInt(3)
  5572  		v0.AddArg2(src, mem)
  5573  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVBstore, types.TypeMem)
  5574  		v1.AuxInt = int32ToAuxInt(2)
  5575  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVBload, typ.Int8)
  5576  		v2.AuxInt = int32ToAuxInt(2)
  5577  		v2.AddArg2(src, mem)
  5578  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVBstore, types.TypeMem)
  5579  		v3.AuxInt = int32ToAuxInt(1)
  5580  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVBload, typ.Int8)
  5581  		v4.AuxInt = int32ToAuxInt(1)
  5582  		v4.AddArg2(src, mem)
  5583  		v5 := b.NewValue0(v.Pos, OpLOONG64MOVBstore, types.TypeMem)
  5584  		v6 := b.NewValue0(v.Pos, OpLOONG64MOVBload, typ.Int8)
  5585  		v6.AddArg2(src, mem)
  5586  		v5.AddArg3(dst, v6, mem)
  5587  		v3.AddArg3(dst, v4, v5)
  5588  		v1.AddArg3(dst, v2, v3)
  5589  		v.AddArg3(dst, v0, v1)
  5590  		return true
  5591  	}
  5592  	// match: (Move [8] {t} dst src mem)
  5593  	// cond: t.Alignment()%8 == 0
  5594  	// result: (MOVVstore dst (MOVVload src mem) mem)
  5595  	for {
  5596  		if auxIntToInt64(v.AuxInt) != 8 {
  5597  			break
  5598  		}
  5599  		t := auxToType(v.Aux)
  5600  		dst := v_0
  5601  		src := v_1
  5602  		mem := v_2
  5603  		if !(t.Alignment()%8 == 0) {
  5604  			break
  5605  		}
  5606  		v.reset(OpLOONG64MOVVstore)
  5607  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  5608  		v0.AddArg2(src, mem)
  5609  		v.AddArg3(dst, v0, mem)
  5610  		return true
  5611  	}
  5612  	// match: (Move [8] {t} dst src mem)
  5613  	// cond: t.Alignment()%4 == 0
  5614  	// result: (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem))
  5615  	for {
  5616  		if auxIntToInt64(v.AuxInt) != 8 {
  5617  			break
  5618  		}
  5619  		t := auxToType(v.Aux)
  5620  		dst := v_0
  5621  		src := v_1
  5622  		mem := v_2
  5623  		if !(t.Alignment()%4 == 0) {
  5624  			break
  5625  		}
  5626  		v.reset(OpLOONG64MOVWstore)
  5627  		v.AuxInt = int32ToAuxInt(4)
  5628  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVWload, typ.Int32)
  5629  		v0.AuxInt = int32ToAuxInt(4)
  5630  		v0.AddArg2(src, mem)
  5631  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVWstore, types.TypeMem)
  5632  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVWload, typ.Int32)
  5633  		v2.AddArg2(src, mem)
  5634  		v1.AddArg3(dst, v2, mem)
  5635  		v.AddArg3(dst, v0, v1)
  5636  		return true
  5637  	}
  5638  	// match: (Move [8] {t} dst src mem)
  5639  	// cond: t.Alignment()%2 == 0
  5640  	// result: (MOVHstore [6] dst (MOVHload [6] src mem) (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))))
  5641  	for {
  5642  		if auxIntToInt64(v.AuxInt) != 8 {
  5643  			break
  5644  		}
  5645  		t := auxToType(v.Aux)
  5646  		dst := v_0
  5647  		src := v_1
  5648  		mem := v_2
  5649  		if !(t.Alignment()%2 == 0) {
  5650  			break
  5651  		}
  5652  		v.reset(OpLOONG64MOVHstore)
  5653  		v.AuxInt = int32ToAuxInt(6)
  5654  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVHload, typ.Int16)
  5655  		v0.AuxInt = int32ToAuxInt(6)
  5656  		v0.AddArg2(src, mem)
  5657  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVHstore, types.TypeMem)
  5658  		v1.AuxInt = int32ToAuxInt(4)
  5659  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVHload, typ.Int16)
  5660  		v2.AuxInt = int32ToAuxInt(4)
  5661  		v2.AddArg2(src, mem)
  5662  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVHstore, types.TypeMem)
  5663  		v3.AuxInt = int32ToAuxInt(2)
  5664  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVHload, typ.Int16)
  5665  		v4.AuxInt = int32ToAuxInt(2)
  5666  		v4.AddArg2(src, mem)
  5667  		v5 := b.NewValue0(v.Pos, OpLOONG64MOVHstore, types.TypeMem)
  5668  		v6 := b.NewValue0(v.Pos, OpLOONG64MOVHload, typ.Int16)
  5669  		v6.AddArg2(src, mem)
  5670  		v5.AddArg3(dst, v6, mem)
  5671  		v3.AddArg3(dst, v4, v5)
  5672  		v1.AddArg3(dst, v2, v3)
  5673  		v.AddArg3(dst, v0, v1)
  5674  		return true
  5675  	}
  5676  	// match: (Move [3] dst src mem)
  5677  	// result: (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem)))
  5678  	for {
  5679  		if auxIntToInt64(v.AuxInt) != 3 {
  5680  			break
  5681  		}
  5682  		dst := v_0
  5683  		src := v_1
  5684  		mem := v_2
  5685  		v.reset(OpLOONG64MOVBstore)
  5686  		v.AuxInt = int32ToAuxInt(2)
  5687  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVBload, typ.Int8)
  5688  		v0.AuxInt = int32ToAuxInt(2)
  5689  		v0.AddArg2(src, mem)
  5690  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVBstore, types.TypeMem)
  5691  		v1.AuxInt = int32ToAuxInt(1)
  5692  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVBload, typ.Int8)
  5693  		v2.AuxInt = int32ToAuxInt(1)
  5694  		v2.AddArg2(src, mem)
  5695  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVBstore, types.TypeMem)
  5696  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVBload, typ.Int8)
  5697  		v4.AddArg2(src, mem)
  5698  		v3.AddArg3(dst, v4, mem)
  5699  		v1.AddArg3(dst, v2, v3)
  5700  		v.AddArg3(dst, v0, v1)
  5701  		return true
  5702  	}
  5703  	// match: (Move [6] {t} dst src mem)
  5704  	// cond: t.Alignment()%2 == 0
  5705  	// result: (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem)))
  5706  	for {
  5707  		if auxIntToInt64(v.AuxInt) != 6 {
  5708  			break
  5709  		}
  5710  		t := auxToType(v.Aux)
  5711  		dst := v_0
  5712  		src := v_1
  5713  		mem := v_2
  5714  		if !(t.Alignment()%2 == 0) {
  5715  			break
  5716  		}
  5717  		v.reset(OpLOONG64MOVHstore)
  5718  		v.AuxInt = int32ToAuxInt(4)
  5719  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVHload, typ.Int16)
  5720  		v0.AuxInt = int32ToAuxInt(4)
  5721  		v0.AddArg2(src, mem)
  5722  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVHstore, types.TypeMem)
  5723  		v1.AuxInt = int32ToAuxInt(2)
  5724  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVHload, typ.Int16)
  5725  		v2.AuxInt = int32ToAuxInt(2)
  5726  		v2.AddArg2(src, mem)
  5727  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVHstore, types.TypeMem)
  5728  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVHload, typ.Int16)
  5729  		v4.AddArg2(src, mem)
  5730  		v3.AddArg3(dst, v4, mem)
  5731  		v1.AddArg3(dst, v2, v3)
  5732  		v.AddArg3(dst, v0, v1)
  5733  		return true
  5734  	}
  5735  	// match: (Move [12] {t} dst src mem)
  5736  	// cond: t.Alignment()%4 == 0
  5737  	// result: (MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem)))
  5738  	for {
  5739  		if auxIntToInt64(v.AuxInt) != 12 {
  5740  			break
  5741  		}
  5742  		t := auxToType(v.Aux)
  5743  		dst := v_0
  5744  		src := v_1
  5745  		mem := v_2
  5746  		if !(t.Alignment()%4 == 0) {
  5747  			break
  5748  		}
  5749  		v.reset(OpLOONG64MOVWstore)
  5750  		v.AuxInt = int32ToAuxInt(8)
  5751  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVWload, typ.Int32)
  5752  		v0.AuxInt = int32ToAuxInt(8)
  5753  		v0.AddArg2(src, mem)
  5754  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVWstore, types.TypeMem)
  5755  		v1.AuxInt = int32ToAuxInt(4)
  5756  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVWload, typ.Int32)
  5757  		v2.AuxInt = int32ToAuxInt(4)
  5758  		v2.AddArg2(src, mem)
  5759  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVWstore, types.TypeMem)
  5760  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVWload, typ.Int32)
  5761  		v4.AddArg2(src, mem)
  5762  		v3.AddArg3(dst, v4, mem)
  5763  		v1.AddArg3(dst, v2, v3)
  5764  		v.AddArg3(dst, v0, v1)
  5765  		return true
  5766  	}
  5767  	// match: (Move [16] {t} dst src mem)
  5768  	// cond: t.Alignment()%8 == 0
  5769  	// result: (MOVVstore [8] dst (MOVVload [8] src mem) (MOVVstore dst (MOVVload src mem) mem))
  5770  	for {
  5771  		if auxIntToInt64(v.AuxInt) != 16 {
  5772  			break
  5773  		}
  5774  		t := auxToType(v.Aux)
  5775  		dst := v_0
  5776  		src := v_1
  5777  		mem := v_2
  5778  		if !(t.Alignment()%8 == 0) {
  5779  			break
  5780  		}
  5781  		v.reset(OpLOONG64MOVVstore)
  5782  		v.AuxInt = int32ToAuxInt(8)
  5783  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  5784  		v0.AuxInt = int32ToAuxInt(8)
  5785  		v0.AddArg2(src, mem)
  5786  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  5787  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  5788  		v2.AddArg2(src, mem)
  5789  		v1.AddArg3(dst, v2, mem)
  5790  		v.AddArg3(dst, v0, v1)
  5791  		return true
  5792  	}
  5793  	// match: (Move [24] {t} dst src mem)
  5794  	// cond: t.Alignment()%8 == 0
  5795  	// result: (MOVVstore [16] dst (MOVVload [16] src mem) (MOVVstore [8] dst (MOVVload [8] src mem) (MOVVstore dst (MOVVload src mem) mem)))
  5796  	for {
  5797  		if auxIntToInt64(v.AuxInt) != 24 {
  5798  			break
  5799  		}
  5800  		t := auxToType(v.Aux)
  5801  		dst := v_0
  5802  		src := v_1
  5803  		mem := v_2
  5804  		if !(t.Alignment()%8 == 0) {
  5805  			break
  5806  		}
  5807  		v.reset(OpLOONG64MOVVstore)
  5808  		v.AuxInt = int32ToAuxInt(16)
  5809  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  5810  		v0.AuxInt = int32ToAuxInt(16)
  5811  		v0.AddArg2(src, mem)
  5812  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  5813  		v1.AuxInt = int32ToAuxInt(8)
  5814  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  5815  		v2.AuxInt = int32ToAuxInt(8)
  5816  		v2.AddArg2(src, mem)
  5817  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  5818  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  5819  		v4.AddArg2(src, mem)
  5820  		v3.AddArg3(dst, v4, mem)
  5821  		v1.AddArg3(dst, v2, v3)
  5822  		v.AddArg3(dst, v0, v1)
  5823  		return true
  5824  	}
  5825  	// match: (Move [s] {t} dst src mem)
  5826  	// cond: s%8 == 0 && s >= 24 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice && logLargeCopy(v, s)
  5827  	// result: (DUFFCOPY [16 * (128 - s/8)] dst src mem)
  5828  	for {
  5829  		s := auxIntToInt64(v.AuxInt)
  5830  		t := auxToType(v.Aux)
  5831  		dst := v_0
  5832  		src := v_1
  5833  		mem := v_2
  5834  		if !(s%8 == 0 && s >= 24 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice && logLargeCopy(v, s)) {
  5835  			break
  5836  		}
  5837  		v.reset(OpLOONG64DUFFCOPY)
  5838  		v.AuxInt = int64ToAuxInt(16 * (128 - s/8))
  5839  		v.AddArg3(dst, src, mem)
  5840  		return true
  5841  	}
  5842  	// match: (Move [s] {t} dst src mem)
  5843  	// cond: s > 24 && logLargeCopy(v, s) || t.Alignment()%8 != 0
  5844  	// result: (LoweredMove [t.Alignment()] dst src (ADDVconst <src.Type> src [s-moveSize(t.Alignment(), config)]) mem)
  5845  	for {
  5846  		s := auxIntToInt64(v.AuxInt)
  5847  		t := auxToType(v.Aux)
  5848  		dst := v_0
  5849  		src := v_1
  5850  		mem := v_2
  5851  		if !(s > 24 && logLargeCopy(v, s) || t.Alignment()%8 != 0) {
  5852  			break
  5853  		}
  5854  		v.reset(OpLOONG64LoweredMove)
  5855  		v.AuxInt = int64ToAuxInt(t.Alignment())
  5856  		v0 := b.NewValue0(v.Pos, OpLOONG64ADDVconst, src.Type)
  5857  		v0.AuxInt = int64ToAuxInt(s - moveSize(t.Alignment(), config))
  5858  		v0.AddArg(src)
  5859  		v.AddArg4(dst, src, v0, mem)
  5860  		return true
  5861  	}
  5862  	return false
  5863  }
  5864  func rewriteValueLOONG64_OpNeq16(v *Value) bool {
  5865  	v_1 := v.Args[1]
  5866  	v_0 := v.Args[0]
  5867  	b := v.Block
  5868  	typ := &b.Func.Config.Types
  5869  	// match: (Neq16 x y)
  5870  	// result: (SGTU (XOR (ZeroExt16to32 x) (ZeroExt16to64 y)) (MOVVconst [0]))
  5871  	for {
  5872  		x := v_0
  5873  		y := v_1
  5874  		v.reset(OpLOONG64SGTU)
  5875  		v0 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  5876  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  5877  		v1.AddArg(x)
  5878  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  5879  		v2.AddArg(y)
  5880  		v0.AddArg2(v1, v2)
  5881  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  5882  		v3.AuxInt = int64ToAuxInt(0)
  5883  		v.AddArg2(v0, v3)
  5884  		return true
  5885  	}
  5886  }
  5887  func rewriteValueLOONG64_OpNeq32(v *Value) bool {
  5888  	v_1 := v.Args[1]
  5889  	v_0 := v.Args[0]
  5890  	b := v.Block
  5891  	typ := &b.Func.Config.Types
  5892  	// match: (Neq32 x y)
  5893  	// result: (SGTU (XOR (ZeroExt32to64 x) (ZeroExt32to64 y)) (MOVVconst [0]))
  5894  	for {
  5895  		x := v_0
  5896  		y := v_1
  5897  		v.reset(OpLOONG64SGTU)
  5898  		v0 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  5899  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  5900  		v1.AddArg(x)
  5901  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  5902  		v2.AddArg(y)
  5903  		v0.AddArg2(v1, v2)
  5904  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  5905  		v3.AuxInt = int64ToAuxInt(0)
  5906  		v.AddArg2(v0, v3)
  5907  		return true
  5908  	}
  5909  }
  5910  func rewriteValueLOONG64_OpNeq32F(v *Value) bool {
  5911  	v_1 := v.Args[1]
  5912  	v_0 := v.Args[0]
  5913  	b := v.Block
  5914  	// match: (Neq32F x y)
  5915  	// result: (FPFlagFalse (CMPEQF x y))
  5916  	for {
  5917  		x := v_0
  5918  		y := v_1
  5919  		v.reset(OpLOONG64FPFlagFalse)
  5920  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPEQF, types.TypeFlags)
  5921  		v0.AddArg2(x, y)
  5922  		v.AddArg(v0)
  5923  		return true
  5924  	}
  5925  }
  5926  func rewriteValueLOONG64_OpNeq64(v *Value) bool {
  5927  	v_1 := v.Args[1]
  5928  	v_0 := v.Args[0]
  5929  	b := v.Block
  5930  	typ := &b.Func.Config.Types
  5931  	// match: (Neq64 x y)
  5932  	// result: (SGTU (XOR x y) (MOVVconst [0]))
  5933  	for {
  5934  		x := v_0
  5935  		y := v_1
  5936  		v.reset(OpLOONG64SGTU)
  5937  		v0 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  5938  		v0.AddArg2(x, y)
  5939  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  5940  		v1.AuxInt = int64ToAuxInt(0)
  5941  		v.AddArg2(v0, v1)
  5942  		return true
  5943  	}
  5944  }
  5945  func rewriteValueLOONG64_OpNeq64F(v *Value) bool {
  5946  	v_1 := v.Args[1]
  5947  	v_0 := v.Args[0]
  5948  	b := v.Block
  5949  	// match: (Neq64F x y)
  5950  	// result: (FPFlagFalse (CMPEQD x y))
  5951  	for {
  5952  		x := v_0
  5953  		y := v_1
  5954  		v.reset(OpLOONG64FPFlagFalse)
  5955  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPEQD, types.TypeFlags)
  5956  		v0.AddArg2(x, y)
  5957  		v.AddArg(v0)
  5958  		return true
  5959  	}
  5960  }
  5961  func rewriteValueLOONG64_OpNeq8(v *Value) bool {
  5962  	v_1 := v.Args[1]
  5963  	v_0 := v.Args[0]
  5964  	b := v.Block
  5965  	typ := &b.Func.Config.Types
  5966  	// match: (Neq8 x y)
  5967  	// result: (SGTU (XOR (ZeroExt8to64 x) (ZeroExt8to64 y)) (MOVVconst [0]))
  5968  	for {
  5969  		x := v_0
  5970  		y := v_1
  5971  		v.reset(OpLOONG64SGTU)
  5972  		v0 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  5973  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  5974  		v1.AddArg(x)
  5975  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  5976  		v2.AddArg(y)
  5977  		v0.AddArg2(v1, v2)
  5978  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  5979  		v3.AuxInt = int64ToAuxInt(0)
  5980  		v.AddArg2(v0, v3)
  5981  		return true
  5982  	}
  5983  }
  5984  func rewriteValueLOONG64_OpNeqPtr(v *Value) bool {
  5985  	v_1 := v.Args[1]
  5986  	v_0 := v.Args[0]
  5987  	b := v.Block
  5988  	typ := &b.Func.Config.Types
  5989  	// match: (NeqPtr x y)
  5990  	// result: (SGTU (XOR x y) (MOVVconst [0]))
  5991  	for {
  5992  		x := v_0
  5993  		y := v_1
  5994  		v.reset(OpLOONG64SGTU)
  5995  		v0 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  5996  		v0.AddArg2(x, y)
  5997  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  5998  		v1.AuxInt = int64ToAuxInt(0)
  5999  		v.AddArg2(v0, v1)
  6000  		return true
  6001  	}
  6002  }
  6003  func rewriteValueLOONG64_OpNot(v *Value) bool {
  6004  	v_0 := v.Args[0]
  6005  	// match: (Not x)
  6006  	// result: (XORconst [1] x)
  6007  	for {
  6008  		x := v_0
  6009  		v.reset(OpLOONG64XORconst)
  6010  		v.AuxInt = int64ToAuxInt(1)
  6011  		v.AddArg(x)
  6012  		return true
  6013  	}
  6014  }
  6015  func rewriteValueLOONG64_OpOffPtr(v *Value) bool {
  6016  	v_0 := v.Args[0]
  6017  	// match: (OffPtr [off] ptr:(SP))
  6018  	// result: (MOVVaddr [int32(off)] ptr)
  6019  	for {
  6020  		off := auxIntToInt64(v.AuxInt)
  6021  		ptr := v_0
  6022  		if ptr.Op != OpSP {
  6023  			break
  6024  		}
  6025  		v.reset(OpLOONG64MOVVaddr)
  6026  		v.AuxInt = int32ToAuxInt(int32(off))
  6027  		v.AddArg(ptr)
  6028  		return true
  6029  	}
  6030  	// match: (OffPtr [off] ptr)
  6031  	// result: (ADDVconst [off] ptr)
  6032  	for {
  6033  		off := auxIntToInt64(v.AuxInt)
  6034  		ptr := v_0
  6035  		v.reset(OpLOONG64ADDVconst)
  6036  		v.AuxInt = int64ToAuxInt(off)
  6037  		v.AddArg(ptr)
  6038  		return true
  6039  	}
  6040  }
  6041  func rewriteValueLOONG64_OpPanicBounds(v *Value) bool {
  6042  	v_2 := v.Args[2]
  6043  	v_1 := v.Args[1]
  6044  	v_0 := v.Args[0]
  6045  	// match: (PanicBounds [kind] x y mem)
  6046  	// cond: boundsABI(kind) == 0
  6047  	// result: (LoweredPanicBoundsA [kind] x y mem)
  6048  	for {
  6049  		kind := auxIntToInt64(v.AuxInt)
  6050  		x := v_0
  6051  		y := v_1
  6052  		mem := v_2
  6053  		if !(boundsABI(kind) == 0) {
  6054  			break
  6055  		}
  6056  		v.reset(OpLOONG64LoweredPanicBoundsA)
  6057  		v.AuxInt = int64ToAuxInt(kind)
  6058  		v.AddArg3(x, y, mem)
  6059  		return true
  6060  	}
  6061  	// match: (PanicBounds [kind] x y mem)
  6062  	// cond: boundsABI(kind) == 1
  6063  	// result: (LoweredPanicBoundsB [kind] x y mem)
  6064  	for {
  6065  		kind := auxIntToInt64(v.AuxInt)
  6066  		x := v_0
  6067  		y := v_1
  6068  		mem := v_2
  6069  		if !(boundsABI(kind) == 1) {
  6070  			break
  6071  		}
  6072  		v.reset(OpLOONG64LoweredPanicBoundsB)
  6073  		v.AuxInt = int64ToAuxInt(kind)
  6074  		v.AddArg3(x, y, mem)
  6075  		return true
  6076  	}
  6077  	// match: (PanicBounds [kind] x y mem)
  6078  	// cond: boundsABI(kind) == 2
  6079  	// result: (LoweredPanicBoundsC [kind] x y mem)
  6080  	for {
  6081  		kind := auxIntToInt64(v.AuxInt)
  6082  		x := v_0
  6083  		y := v_1
  6084  		mem := v_2
  6085  		if !(boundsABI(kind) == 2) {
  6086  			break
  6087  		}
  6088  		v.reset(OpLOONG64LoweredPanicBoundsC)
  6089  		v.AuxInt = int64ToAuxInt(kind)
  6090  		v.AddArg3(x, y, mem)
  6091  		return true
  6092  	}
  6093  	return false
  6094  }
  6095  func rewriteValueLOONG64_OpRotateLeft16(v *Value) bool {
  6096  	v_1 := v.Args[1]
  6097  	v_0 := v.Args[0]
  6098  	b := v.Block
  6099  	typ := &b.Func.Config.Types
  6100  	// match: (RotateLeft16 <t> x (MOVVconst [c]))
  6101  	// result: (Or16 (Lsh16x64 <t> x (MOVVconst [c&15])) (Rsh16Ux64 <t> x (MOVVconst [-c&15])))
  6102  	for {
  6103  		t := v.Type
  6104  		x := v_0
  6105  		if v_1.Op != OpLOONG64MOVVconst {
  6106  			break
  6107  		}
  6108  		c := auxIntToInt64(v_1.AuxInt)
  6109  		v.reset(OpOr16)
  6110  		v0 := b.NewValue0(v.Pos, OpLsh16x64, t)
  6111  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  6112  		v1.AuxInt = int64ToAuxInt(c & 15)
  6113  		v0.AddArg2(x, v1)
  6114  		v2 := b.NewValue0(v.Pos, OpRsh16Ux64, t)
  6115  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  6116  		v3.AuxInt = int64ToAuxInt(-c & 15)
  6117  		v2.AddArg2(x, v3)
  6118  		v.AddArg2(v0, v2)
  6119  		return true
  6120  	}
  6121  	return false
  6122  }
  6123  func rewriteValueLOONG64_OpRotateLeft32(v *Value) bool {
  6124  	v_1 := v.Args[1]
  6125  	v_0 := v.Args[0]
  6126  	b := v.Block
  6127  	// match: (RotateLeft32 x y)
  6128  	// result: (ROTR x (NEGV <y.Type> y))
  6129  	for {
  6130  		x := v_0
  6131  		y := v_1
  6132  		v.reset(OpLOONG64ROTR)
  6133  		v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, y.Type)
  6134  		v0.AddArg(y)
  6135  		v.AddArg2(x, v0)
  6136  		return true
  6137  	}
  6138  }
  6139  func rewriteValueLOONG64_OpRotateLeft64(v *Value) bool {
  6140  	v_1 := v.Args[1]
  6141  	v_0 := v.Args[0]
  6142  	b := v.Block
  6143  	// match: (RotateLeft64 x y)
  6144  	// result: (ROTRV x (NEGV <y.Type> y))
  6145  	for {
  6146  		x := v_0
  6147  		y := v_1
  6148  		v.reset(OpLOONG64ROTRV)
  6149  		v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, y.Type)
  6150  		v0.AddArg(y)
  6151  		v.AddArg2(x, v0)
  6152  		return true
  6153  	}
  6154  }
  6155  func rewriteValueLOONG64_OpRotateLeft8(v *Value) bool {
  6156  	v_1 := v.Args[1]
  6157  	v_0 := v.Args[0]
  6158  	b := v.Block
  6159  	typ := &b.Func.Config.Types
  6160  	// match: (RotateLeft8 <t> x (MOVVconst [c]))
  6161  	// result: (Or8 (Lsh8x64 <t> x (MOVVconst [c&7])) (Rsh8Ux64 <t> x (MOVVconst [-c&7])))
  6162  	for {
  6163  		t := v.Type
  6164  		x := v_0
  6165  		if v_1.Op != OpLOONG64MOVVconst {
  6166  			break
  6167  		}
  6168  		c := auxIntToInt64(v_1.AuxInt)
  6169  		v.reset(OpOr8)
  6170  		v0 := b.NewValue0(v.Pos, OpLsh8x64, t)
  6171  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  6172  		v1.AuxInt = int64ToAuxInt(c & 7)
  6173  		v0.AddArg2(x, v1)
  6174  		v2 := b.NewValue0(v.Pos, OpRsh8Ux64, t)
  6175  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  6176  		v3.AuxInt = int64ToAuxInt(-c & 7)
  6177  		v2.AddArg2(x, v3)
  6178  		v.AddArg2(v0, v2)
  6179  		return true
  6180  	}
  6181  	return false
  6182  }
  6183  func rewriteValueLOONG64_OpRsh16Ux16(v *Value) bool {
  6184  	v_1 := v.Args[1]
  6185  	v_0 := v.Args[0]
  6186  	b := v.Block
  6187  	typ := &b.Func.Config.Types
  6188  	// match: (Rsh16Ux16 <t> x y)
  6189  	// result: (MASKEQZ (SRLV <t> (ZeroExt16to64 x) (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y)))
  6190  	for {
  6191  		t := v.Type
  6192  		x := v_0
  6193  		y := v_1
  6194  		v.reset(OpLOONG64MASKEQZ)
  6195  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
  6196  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6197  		v1.AddArg(x)
  6198  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6199  		v2.AddArg(y)
  6200  		v0.AddArg2(v1, v2)
  6201  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  6202  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  6203  		v4.AuxInt = int64ToAuxInt(64)
  6204  		v3.AddArg2(v4, v2)
  6205  		v.AddArg2(v0, v3)
  6206  		return true
  6207  	}
  6208  }
  6209  func rewriteValueLOONG64_OpRsh16Ux32(v *Value) bool {
  6210  	v_1 := v.Args[1]
  6211  	v_0 := v.Args[0]
  6212  	b := v.Block
  6213  	typ := &b.Func.Config.Types
  6214  	// match: (Rsh16Ux32 <t> x y)
  6215  	// result: (MASKEQZ (SRLV <t> (ZeroExt16to64 x) (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y)))
  6216  	for {
  6217  		t := v.Type
  6218  		x := v_0
  6219  		y := v_1
  6220  		v.reset(OpLOONG64MASKEQZ)
  6221  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
  6222  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6223  		v1.AddArg(x)
  6224  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6225  		v2.AddArg(y)
  6226  		v0.AddArg2(v1, v2)
  6227  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  6228  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  6229  		v4.AuxInt = int64ToAuxInt(64)
  6230  		v3.AddArg2(v4, v2)
  6231  		v.AddArg2(v0, v3)
  6232  		return true
  6233  	}
  6234  }
  6235  func rewriteValueLOONG64_OpRsh16Ux64(v *Value) bool {
  6236  	v_1 := v.Args[1]
  6237  	v_0 := v.Args[0]
  6238  	b := v.Block
  6239  	typ := &b.Func.Config.Types
  6240  	// match: (Rsh16Ux64 <t> x y)
  6241  	// result: (MASKEQZ (SRLV <t> (ZeroExt16to64 x) y) (SGTU (MOVVconst <typ.UInt64> [64]) y))
  6242  	for {
  6243  		t := v.Type
  6244  		x := v_0
  6245  		y := v_1
  6246  		v.reset(OpLOONG64MASKEQZ)
  6247  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
  6248  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6249  		v1.AddArg(x)
  6250  		v0.AddArg2(v1, y)
  6251  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  6252  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  6253  		v3.AuxInt = int64ToAuxInt(64)
  6254  		v2.AddArg2(v3, y)
  6255  		v.AddArg2(v0, v2)
  6256  		return true
  6257  	}
  6258  }
  6259  func rewriteValueLOONG64_OpRsh16Ux8(v *Value) bool {
  6260  	v_1 := v.Args[1]
  6261  	v_0 := v.Args[0]
  6262  	b := v.Block
  6263  	typ := &b.Func.Config.Types
  6264  	// match: (Rsh16Ux8 <t> x y)
  6265  	// result: (MASKEQZ (SRLV <t> (ZeroExt16to64 x) (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y)))
  6266  	for {
  6267  		t := v.Type
  6268  		x := v_0
  6269  		y := v_1
  6270  		v.reset(OpLOONG64MASKEQZ)
  6271  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
  6272  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6273  		v1.AddArg(x)
  6274  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6275  		v2.AddArg(y)
  6276  		v0.AddArg2(v1, v2)
  6277  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  6278  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  6279  		v4.AuxInt = int64ToAuxInt(64)
  6280  		v3.AddArg2(v4, v2)
  6281  		v.AddArg2(v0, v3)
  6282  		return true
  6283  	}
  6284  }
  6285  func rewriteValueLOONG64_OpRsh16x16(v *Value) bool {
  6286  	v_1 := v.Args[1]
  6287  	v_0 := v.Args[0]
  6288  	b := v.Block
  6289  	typ := &b.Func.Config.Types
  6290  	// match: (Rsh16x16 <t> x y)
  6291  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
  6292  	for {
  6293  		t := v.Type
  6294  		x := v_0
  6295  		y := v_1
  6296  		v.reset(OpLOONG64SRAV)
  6297  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  6298  		v0.AddArg(x)
  6299  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
  6300  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
  6301  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  6302  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6303  		v4.AddArg(y)
  6304  		v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  6305  		v5.AuxInt = int64ToAuxInt(63)
  6306  		v3.AddArg2(v4, v5)
  6307  		v2.AddArg(v3)
  6308  		v1.AddArg2(v2, v4)
  6309  		v.AddArg2(v0, v1)
  6310  		return true
  6311  	}
  6312  }
  6313  func rewriteValueLOONG64_OpRsh16x32(v *Value) bool {
  6314  	v_1 := v.Args[1]
  6315  	v_0 := v.Args[0]
  6316  	b := v.Block
  6317  	typ := &b.Func.Config.Types
  6318  	// match: (Rsh16x32 <t> x y)
  6319  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
  6320  	for {
  6321  		t := v.Type
  6322  		x := v_0
  6323  		y := v_1
  6324  		v.reset(OpLOONG64SRAV)
  6325  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  6326  		v0.AddArg(x)
  6327  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
  6328  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
  6329  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  6330  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6331  		v4.AddArg(y)
  6332  		v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  6333  		v5.AuxInt = int64ToAuxInt(63)
  6334  		v3.AddArg2(v4, v5)
  6335  		v2.AddArg(v3)
  6336  		v1.AddArg2(v2, v4)
  6337  		v.AddArg2(v0, v1)
  6338  		return true
  6339  	}
  6340  }
  6341  func rewriteValueLOONG64_OpRsh16x64(v *Value) bool {
  6342  	v_1 := v.Args[1]
  6343  	v_0 := v.Args[0]
  6344  	b := v.Block
  6345  	typ := &b.Func.Config.Types
  6346  	// match: (Rsh16x64 <t> x y)
  6347  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
  6348  	for {
  6349  		t := v.Type
  6350  		x := v_0
  6351  		y := v_1
  6352  		v.reset(OpLOONG64SRAV)
  6353  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  6354  		v0.AddArg(x)
  6355  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
  6356  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
  6357  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  6358  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  6359  		v4.AuxInt = int64ToAuxInt(63)
  6360  		v3.AddArg2(y, v4)
  6361  		v2.AddArg(v3)
  6362  		v1.AddArg2(v2, y)
  6363  		v.AddArg2(v0, v1)
  6364  		return true
  6365  	}
  6366  }
  6367  func rewriteValueLOONG64_OpRsh16x8(v *Value) bool {
  6368  	v_1 := v.Args[1]
  6369  	v_0 := v.Args[0]
  6370  	b := v.Block
  6371  	typ := &b.Func.Config.Types
  6372  	// match: (Rsh16x8 <t> x y)
  6373  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
  6374  	for {
  6375  		t := v.Type
  6376  		x := v_0
  6377  		y := v_1
  6378  		v.reset(OpLOONG64SRAV)
  6379  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  6380  		v0.AddArg(x)
  6381  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
  6382  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
  6383  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  6384  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6385  		v4.AddArg(y)
  6386  		v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  6387  		v5.AuxInt = int64ToAuxInt(63)
  6388  		v3.AddArg2(v4, v5)
  6389  		v2.AddArg(v3)
  6390  		v1.AddArg2(v2, v4)
  6391  		v.AddArg2(v0, v1)
  6392  		return true
  6393  	}
  6394  }
  6395  func rewriteValueLOONG64_OpRsh32Ux16(v *Value) bool {
  6396  	v_1 := v.Args[1]
  6397  	v_0 := v.Args[0]
  6398  	b := v.Block
  6399  	typ := &b.Func.Config.Types
  6400  	// match: (Rsh32Ux16 <t> x y)
  6401  	// result: (MASKEQZ (SRLV <t> (ZeroExt32to64 x) (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y)))
  6402  	for {
  6403  		t := v.Type
  6404  		x := v_0
  6405  		y := v_1
  6406  		v.reset(OpLOONG64MASKEQZ)
  6407  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
  6408  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6409  		v1.AddArg(x)
  6410  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6411  		v2.AddArg(y)
  6412  		v0.AddArg2(v1, v2)
  6413  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  6414  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  6415  		v4.AuxInt = int64ToAuxInt(64)
  6416  		v3.AddArg2(v4, v2)
  6417  		v.AddArg2(v0, v3)
  6418  		return true
  6419  	}
  6420  }
  6421  func rewriteValueLOONG64_OpRsh32Ux32(v *Value) bool {
  6422  	v_1 := v.Args[1]
  6423  	v_0 := v.Args[0]
  6424  	b := v.Block
  6425  	typ := &b.Func.Config.Types
  6426  	// match: (Rsh32Ux32 <t> x y)
  6427  	// result: (MASKEQZ (SRLV <t> (ZeroExt32to64 x) (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y)))
  6428  	for {
  6429  		t := v.Type
  6430  		x := v_0
  6431  		y := v_1
  6432  		v.reset(OpLOONG64MASKEQZ)
  6433  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
  6434  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6435  		v1.AddArg(x)
  6436  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6437  		v2.AddArg(y)
  6438  		v0.AddArg2(v1, v2)
  6439  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  6440  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  6441  		v4.AuxInt = int64ToAuxInt(64)
  6442  		v3.AddArg2(v4, v2)
  6443  		v.AddArg2(v0, v3)
  6444  		return true
  6445  	}
  6446  }
  6447  func rewriteValueLOONG64_OpRsh32Ux64(v *Value) bool {
  6448  	v_1 := v.Args[1]
  6449  	v_0 := v.Args[0]
  6450  	b := v.Block
  6451  	typ := &b.Func.Config.Types
  6452  	// match: (Rsh32Ux64 <t> x y)
  6453  	// result: (MASKEQZ (SRLV <t> (ZeroExt32to64 x) y) (SGTU (MOVVconst <typ.UInt64> [64]) y))
  6454  	for {
  6455  		t := v.Type
  6456  		x := v_0
  6457  		y := v_1
  6458  		v.reset(OpLOONG64MASKEQZ)
  6459  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
  6460  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6461  		v1.AddArg(x)
  6462  		v0.AddArg2(v1, y)
  6463  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  6464  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  6465  		v3.AuxInt = int64ToAuxInt(64)
  6466  		v2.AddArg2(v3, y)
  6467  		v.AddArg2(v0, v2)
  6468  		return true
  6469  	}
  6470  }
  6471  func rewriteValueLOONG64_OpRsh32Ux8(v *Value) bool {
  6472  	v_1 := v.Args[1]
  6473  	v_0 := v.Args[0]
  6474  	b := v.Block
  6475  	typ := &b.Func.Config.Types
  6476  	// match: (Rsh32Ux8 <t> x y)
  6477  	// result: (MASKEQZ (SRLV <t> (ZeroExt32to64 x) (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y)))
  6478  	for {
  6479  		t := v.Type
  6480  		x := v_0
  6481  		y := v_1
  6482  		v.reset(OpLOONG64MASKEQZ)
  6483  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
  6484  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6485  		v1.AddArg(x)
  6486  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6487  		v2.AddArg(y)
  6488  		v0.AddArg2(v1, v2)
  6489  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  6490  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  6491  		v4.AuxInt = int64ToAuxInt(64)
  6492  		v3.AddArg2(v4, v2)
  6493  		v.AddArg2(v0, v3)
  6494  		return true
  6495  	}
  6496  }
  6497  func rewriteValueLOONG64_OpRsh32x16(v *Value) bool {
  6498  	v_1 := v.Args[1]
  6499  	v_0 := v.Args[0]
  6500  	b := v.Block
  6501  	typ := &b.Func.Config.Types
  6502  	// match: (Rsh32x16 <t> x y)
  6503  	// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
  6504  	for {
  6505  		t := v.Type
  6506  		x := v_0
  6507  		y := v_1
  6508  		v.reset(OpLOONG64SRAV)
  6509  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  6510  		v0.AddArg(x)
  6511  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
  6512  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
  6513  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  6514  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6515  		v4.AddArg(y)
  6516  		v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  6517  		v5.AuxInt = int64ToAuxInt(63)
  6518  		v3.AddArg2(v4, v5)
  6519  		v2.AddArg(v3)
  6520  		v1.AddArg2(v2, v4)
  6521  		v.AddArg2(v0, v1)
  6522  		return true
  6523  	}
  6524  }
  6525  func rewriteValueLOONG64_OpRsh32x32(v *Value) bool {
  6526  	v_1 := v.Args[1]
  6527  	v_0 := v.Args[0]
  6528  	b := v.Block
  6529  	typ := &b.Func.Config.Types
  6530  	// match: (Rsh32x32 <t> x y)
  6531  	// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
  6532  	for {
  6533  		t := v.Type
  6534  		x := v_0
  6535  		y := v_1
  6536  		v.reset(OpLOONG64SRAV)
  6537  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  6538  		v0.AddArg(x)
  6539  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
  6540  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
  6541  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  6542  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6543  		v4.AddArg(y)
  6544  		v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  6545  		v5.AuxInt = int64ToAuxInt(63)
  6546  		v3.AddArg2(v4, v5)
  6547  		v2.AddArg(v3)
  6548  		v1.AddArg2(v2, v4)
  6549  		v.AddArg2(v0, v1)
  6550  		return true
  6551  	}
  6552  }
  6553  func rewriteValueLOONG64_OpRsh32x64(v *Value) bool {
  6554  	v_1 := v.Args[1]
  6555  	v_0 := v.Args[0]
  6556  	b := v.Block
  6557  	typ := &b.Func.Config.Types
  6558  	// match: (Rsh32x64 <t> x y)
  6559  	// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
  6560  	for {
  6561  		t := v.Type
  6562  		x := v_0
  6563  		y := v_1
  6564  		v.reset(OpLOONG64SRAV)
  6565  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  6566  		v0.AddArg(x)
  6567  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
  6568  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
  6569  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  6570  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  6571  		v4.AuxInt = int64ToAuxInt(63)
  6572  		v3.AddArg2(y, v4)
  6573  		v2.AddArg(v3)
  6574  		v1.AddArg2(v2, y)
  6575  		v.AddArg2(v0, v1)
  6576  		return true
  6577  	}
  6578  }
  6579  func rewriteValueLOONG64_OpRsh32x8(v *Value) bool {
  6580  	v_1 := v.Args[1]
  6581  	v_0 := v.Args[0]
  6582  	b := v.Block
  6583  	typ := &b.Func.Config.Types
  6584  	// match: (Rsh32x8 <t> x y)
  6585  	// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
  6586  	for {
  6587  		t := v.Type
  6588  		x := v_0
  6589  		y := v_1
  6590  		v.reset(OpLOONG64SRAV)
  6591  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  6592  		v0.AddArg(x)
  6593  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
  6594  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
  6595  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  6596  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6597  		v4.AddArg(y)
  6598  		v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  6599  		v5.AuxInt = int64ToAuxInt(63)
  6600  		v3.AddArg2(v4, v5)
  6601  		v2.AddArg(v3)
  6602  		v1.AddArg2(v2, v4)
  6603  		v.AddArg2(v0, v1)
  6604  		return true
  6605  	}
  6606  }
  6607  func rewriteValueLOONG64_OpRsh64Ux16(v *Value) bool {
  6608  	v_1 := v.Args[1]
  6609  	v_0 := v.Args[0]
  6610  	b := v.Block
  6611  	typ := &b.Func.Config.Types
  6612  	// match: (Rsh64Ux16 <t> x y)
  6613  	// result: (MASKEQZ (SRLV <t> x (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y)))
  6614  	for {
  6615  		t := v.Type
  6616  		x := v_0
  6617  		y := v_1
  6618  		v.reset(OpLOONG64MASKEQZ)
  6619  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
  6620  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6621  		v1.AddArg(y)
  6622  		v0.AddArg2(x, v1)
  6623  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  6624  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  6625  		v3.AuxInt = int64ToAuxInt(64)
  6626  		v2.AddArg2(v3, v1)
  6627  		v.AddArg2(v0, v2)
  6628  		return true
  6629  	}
  6630  }
  6631  func rewriteValueLOONG64_OpRsh64Ux32(v *Value) bool {
  6632  	v_1 := v.Args[1]
  6633  	v_0 := v.Args[0]
  6634  	b := v.Block
  6635  	typ := &b.Func.Config.Types
  6636  	// match: (Rsh64Ux32 <t> x y)
  6637  	// result: (MASKEQZ (SRLV <t> x (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y)))
  6638  	for {
  6639  		t := v.Type
  6640  		x := v_0
  6641  		y := v_1
  6642  		v.reset(OpLOONG64MASKEQZ)
  6643  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
  6644  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6645  		v1.AddArg(y)
  6646  		v0.AddArg2(x, v1)
  6647  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  6648  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  6649  		v3.AuxInt = int64ToAuxInt(64)
  6650  		v2.AddArg2(v3, v1)
  6651  		v.AddArg2(v0, v2)
  6652  		return true
  6653  	}
  6654  }
  6655  func rewriteValueLOONG64_OpRsh64Ux64(v *Value) bool {
  6656  	v_1 := v.Args[1]
  6657  	v_0 := v.Args[0]
  6658  	b := v.Block
  6659  	typ := &b.Func.Config.Types
  6660  	// match: (Rsh64Ux64 <t> x y)
  6661  	// result: (MASKEQZ (SRLV <t> x y) (SGTU (MOVVconst <typ.UInt64> [64]) y))
  6662  	for {
  6663  		t := v.Type
  6664  		x := v_0
  6665  		y := v_1
  6666  		v.reset(OpLOONG64MASKEQZ)
  6667  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
  6668  		v0.AddArg2(x, y)
  6669  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  6670  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  6671  		v2.AuxInt = int64ToAuxInt(64)
  6672  		v1.AddArg2(v2, y)
  6673  		v.AddArg2(v0, v1)
  6674  		return true
  6675  	}
  6676  }
  6677  func rewriteValueLOONG64_OpRsh64Ux8(v *Value) bool {
  6678  	v_1 := v.Args[1]
  6679  	v_0 := v.Args[0]
  6680  	b := v.Block
  6681  	typ := &b.Func.Config.Types
  6682  	// match: (Rsh64Ux8 <t> x y)
  6683  	// result: (MASKEQZ (SRLV <t> x (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y)))
  6684  	for {
  6685  		t := v.Type
  6686  		x := v_0
  6687  		y := v_1
  6688  		v.reset(OpLOONG64MASKEQZ)
  6689  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
  6690  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6691  		v1.AddArg(y)
  6692  		v0.AddArg2(x, v1)
  6693  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  6694  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  6695  		v3.AuxInt = int64ToAuxInt(64)
  6696  		v2.AddArg2(v3, v1)
  6697  		v.AddArg2(v0, v2)
  6698  		return true
  6699  	}
  6700  }
  6701  func rewriteValueLOONG64_OpRsh64x16(v *Value) bool {
  6702  	v_1 := v.Args[1]
  6703  	v_0 := v.Args[0]
  6704  	b := v.Block
  6705  	typ := &b.Func.Config.Types
  6706  	// match: (Rsh64x16 <t> x y)
  6707  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
  6708  	for {
  6709  		t := v.Type
  6710  		x := v_0
  6711  		y := v_1
  6712  		v.reset(OpLOONG64SRAV)
  6713  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
  6714  		v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
  6715  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  6716  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6717  		v3.AddArg(y)
  6718  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  6719  		v4.AuxInt = int64ToAuxInt(63)
  6720  		v2.AddArg2(v3, v4)
  6721  		v1.AddArg(v2)
  6722  		v0.AddArg2(v1, v3)
  6723  		v.AddArg2(x, v0)
  6724  		return true
  6725  	}
  6726  }
  6727  func rewriteValueLOONG64_OpRsh64x32(v *Value) bool {
  6728  	v_1 := v.Args[1]
  6729  	v_0 := v.Args[0]
  6730  	b := v.Block
  6731  	typ := &b.Func.Config.Types
  6732  	// match: (Rsh64x32 <t> x y)
  6733  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
  6734  	for {
  6735  		t := v.Type
  6736  		x := v_0
  6737  		y := v_1
  6738  		v.reset(OpLOONG64SRAV)
  6739  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
  6740  		v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
  6741  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  6742  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6743  		v3.AddArg(y)
  6744  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  6745  		v4.AuxInt = int64ToAuxInt(63)
  6746  		v2.AddArg2(v3, v4)
  6747  		v1.AddArg(v2)
  6748  		v0.AddArg2(v1, v3)
  6749  		v.AddArg2(x, v0)
  6750  		return true
  6751  	}
  6752  }
  6753  func rewriteValueLOONG64_OpRsh64x64(v *Value) bool {
  6754  	v_1 := v.Args[1]
  6755  	v_0 := v.Args[0]
  6756  	b := v.Block
  6757  	typ := &b.Func.Config.Types
  6758  	// match: (Rsh64x64 <t> x y)
  6759  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
  6760  	for {
  6761  		t := v.Type
  6762  		x := v_0
  6763  		y := v_1
  6764  		v.reset(OpLOONG64SRAV)
  6765  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
  6766  		v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
  6767  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  6768  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  6769  		v3.AuxInt = int64ToAuxInt(63)
  6770  		v2.AddArg2(y, v3)
  6771  		v1.AddArg(v2)
  6772  		v0.AddArg2(v1, y)
  6773  		v.AddArg2(x, v0)
  6774  		return true
  6775  	}
  6776  }
  6777  func rewriteValueLOONG64_OpRsh64x8(v *Value) bool {
  6778  	v_1 := v.Args[1]
  6779  	v_0 := v.Args[0]
  6780  	b := v.Block
  6781  	typ := &b.Func.Config.Types
  6782  	// match: (Rsh64x8 <t> x y)
  6783  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
  6784  	for {
  6785  		t := v.Type
  6786  		x := v_0
  6787  		y := v_1
  6788  		v.reset(OpLOONG64SRAV)
  6789  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
  6790  		v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
  6791  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  6792  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6793  		v3.AddArg(y)
  6794  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  6795  		v4.AuxInt = int64ToAuxInt(63)
  6796  		v2.AddArg2(v3, v4)
  6797  		v1.AddArg(v2)
  6798  		v0.AddArg2(v1, v3)
  6799  		v.AddArg2(x, v0)
  6800  		return true
  6801  	}
  6802  }
  6803  func rewriteValueLOONG64_OpRsh8Ux16(v *Value) bool {
  6804  	v_1 := v.Args[1]
  6805  	v_0 := v.Args[0]
  6806  	b := v.Block
  6807  	typ := &b.Func.Config.Types
  6808  	// match: (Rsh8Ux16 <t> x y)
  6809  	// result: (MASKEQZ (SRLV <t> (ZeroExt8to64 x) (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y)))
  6810  	for {
  6811  		t := v.Type
  6812  		x := v_0
  6813  		y := v_1
  6814  		v.reset(OpLOONG64MASKEQZ)
  6815  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
  6816  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6817  		v1.AddArg(x)
  6818  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6819  		v2.AddArg(y)
  6820  		v0.AddArg2(v1, v2)
  6821  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  6822  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  6823  		v4.AuxInt = int64ToAuxInt(64)
  6824  		v3.AddArg2(v4, v2)
  6825  		v.AddArg2(v0, v3)
  6826  		return true
  6827  	}
  6828  }
  6829  func rewriteValueLOONG64_OpRsh8Ux32(v *Value) bool {
  6830  	v_1 := v.Args[1]
  6831  	v_0 := v.Args[0]
  6832  	b := v.Block
  6833  	typ := &b.Func.Config.Types
  6834  	// match: (Rsh8Ux32 <t> x y)
  6835  	// result: (MASKEQZ (SRLV <t> (ZeroExt8to64 x) (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y)))
  6836  	for {
  6837  		t := v.Type
  6838  		x := v_0
  6839  		y := v_1
  6840  		v.reset(OpLOONG64MASKEQZ)
  6841  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
  6842  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6843  		v1.AddArg(x)
  6844  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6845  		v2.AddArg(y)
  6846  		v0.AddArg2(v1, v2)
  6847  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  6848  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  6849  		v4.AuxInt = int64ToAuxInt(64)
  6850  		v3.AddArg2(v4, v2)
  6851  		v.AddArg2(v0, v3)
  6852  		return true
  6853  	}
  6854  }
  6855  func rewriteValueLOONG64_OpRsh8Ux64(v *Value) bool {
  6856  	v_1 := v.Args[1]
  6857  	v_0 := v.Args[0]
  6858  	b := v.Block
  6859  	typ := &b.Func.Config.Types
  6860  	// match: (Rsh8Ux64 <t> x y)
  6861  	// result: (MASKEQZ (SRLV <t> (ZeroExt8to64 x) y) (SGTU (MOVVconst <typ.UInt64> [64]) y))
  6862  	for {
  6863  		t := v.Type
  6864  		x := v_0
  6865  		y := v_1
  6866  		v.reset(OpLOONG64MASKEQZ)
  6867  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
  6868  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6869  		v1.AddArg(x)
  6870  		v0.AddArg2(v1, y)
  6871  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  6872  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  6873  		v3.AuxInt = int64ToAuxInt(64)
  6874  		v2.AddArg2(v3, y)
  6875  		v.AddArg2(v0, v2)
  6876  		return true
  6877  	}
  6878  }
  6879  func rewriteValueLOONG64_OpRsh8Ux8(v *Value) bool {
  6880  	v_1 := v.Args[1]
  6881  	v_0 := v.Args[0]
  6882  	b := v.Block
  6883  	typ := &b.Func.Config.Types
  6884  	// match: (Rsh8Ux8 <t> x y)
  6885  	// result: (MASKEQZ (SRLV <t> (ZeroExt8to64 x) (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y)))
  6886  	for {
  6887  		t := v.Type
  6888  		x := v_0
  6889  		y := v_1
  6890  		v.reset(OpLOONG64MASKEQZ)
  6891  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
  6892  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6893  		v1.AddArg(x)
  6894  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6895  		v2.AddArg(y)
  6896  		v0.AddArg2(v1, v2)
  6897  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  6898  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  6899  		v4.AuxInt = int64ToAuxInt(64)
  6900  		v3.AddArg2(v4, v2)
  6901  		v.AddArg2(v0, v3)
  6902  		return true
  6903  	}
  6904  }
  6905  func rewriteValueLOONG64_OpRsh8x16(v *Value) bool {
  6906  	v_1 := v.Args[1]
  6907  	v_0 := v.Args[0]
  6908  	b := v.Block
  6909  	typ := &b.Func.Config.Types
  6910  	// match: (Rsh8x16 <t> x y)
  6911  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
  6912  	for {
  6913  		t := v.Type
  6914  		x := v_0
  6915  		y := v_1
  6916  		v.reset(OpLOONG64SRAV)
  6917  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  6918  		v0.AddArg(x)
  6919  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
  6920  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
  6921  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  6922  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6923  		v4.AddArg(y)
  6924  		v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  6925  		v5.AuxInt = int64ToAuxInt(63)
  6926  		v3.AddArg2(v4, v5)
  6927  		v2.AddArg(v3)
  6928  		v1.AddArg2(v2, v4)
  6929  		v.AddArg2(v0, v1)
  6930  		return true
  6931  	}
  6932  }
  6933  func rewriteValueLOONG64_OpRsh8x32(v *Value) bool {
  6934  	v_1 := v.Args[1]
  6935  	v_0 := v.Args[0]
  6936  	b := v.Block
  6937  	typ := &b.Func.Config.Types
  6938  	// match: (Rsh8x32 <t> x y)
  6939  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
  6940  	for {
  6941  		t := v.Type
  6942  		x := v_0
  6943  		y := v_1
  6944  		v.reset(OpLOONG64SRAV)
  6945  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  6946  		v0.AddArg(x)
  6947  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
  6948  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
  6949  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  6950  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6951  		v4.AddArg(y)
  6952  		v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  6953  		v5.AuxInt = int64ToAuxInt(63)
  6954  		v3.AddArg2(v4, v5)
  6955  		v2.AddArg(v3)
  6956  		v1.AddArg2(v2, v4)
  6957  		v.AddArg2(v0, v1)
  6958  		return true
  6959  	}
  6960  }
  6961  func rewriteValueLOONG64_OpRsh8x64(v *Value) bool {
  6962  	v_1 := v.Args[1]
  6963  	v_0 := v.Args[0]
  6964  	b := v.Block
  6965  	typ := &b.Func.Config.Types
  6966  	// match: (Rsh8x64 <t> x y)
  6967  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
  6968  	for {
  6969  		t := v.Type
  6970  		x := v_0
  6971  		y := v_1
  6972  		v.reset(OpLOONG64SRAV)
  6973  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  6974  		v0.AddArg(x)
  6975  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
  6976  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
  6977  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  6978  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  6979  		v4.AuxInt = int64ToAuxInt(63)
  6980  		v3.AddArg2(y, v4)
  6981  		v2.AddArg(v3)
  6982  		v1.AddArg2(v2, y)
  6983  		v.AddArg2(v0, v1)
  6984  		return true
  6985  	}
  6986  }
  6987  func rewriteValueLOONG64_OpRsh8x8(v *Value) bool {
  6988  	v_1 := v.Args[1]
  6989  	v_0 := v.Args[0]
  6990  	b := v.Block
  6991  	typ := &b.Func.Config.Types
  6992  	// match: (Rsh8x8 <t> x y)
  6993  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
  6994  	for {
  6995  		t := v.Type
  6996  		x := v_0
  6997  		y := v_1
  6998  		v.reset(OpLOONG64SRAV)
  6999  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  7000  		v0.AddArg(x)
  7001  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
  7002  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
  7003  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  7004  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7005  		v4.AddArg(y)
  7006  		v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7007  		v5.AuxInt = int64ToAuxInt(63)
  7008  		v3.AddArg2(v4, v5)
  7009  		v2.AddArg(v3)
  7010  		v1.AddArg2(v2, v4)
  7011  		v.AddArg2(v0, v1)
  7012  		return true
  7013  	}
  7014  }
  7015  func rewriteValueLOONG64_OpSelect0(v *Value) bool {
  7016  	v_0 := v.Args[0]
  7017  	b := v.Block
  7018  	// match: (Select0 (Mul64uhilo x y))
  7019  	// result: (MULHVU x y)
  7020  	for {
  7021  		if v_0.Op != OpMul64uhilo {
  7022  			break
  7023  		}
  7024  		y := v_0.Args[1]
  7025  		x := v_0.Args[0]
  7026  		v.reset(OpLOONG64MULHVU)
  7027  		v.AddArg2(x, y)
  7028  		return true
  7029  	}
  7030  	// match: (Select0 (Mul64uover x y))
  7031  	// result: (MULV x y)
  7032  	for {
  7033  		if v_0.Op != OpMul64uover {
  7034  			break
  7035  		}
  7036  		y := v_0.Args[1]
  7037  		x := v_0.Args[0]
  7038  		v.reset(OpLOONG64MULV)
  7039  		v.AddArg2(x, y)
  7040  		return true
  7041  	}
  7042  	// match: (Select0 <t> (Add64carry x y c))
  7043  	// result: (ADDV (ADDV <t> x y) c)
  7044  	for {
  7045  		t := v.Type
  7046  		if v_0.Op != OpAdd64carry {
  7047  			break
  7048  		}
  7049  		c := v_0.Args[2]
  7050  		x := v_0.Args[0]
  7051  		y := v_0.Args[1]
  7052  		v.reset(OpLOONG64ADDV)
  7053  		v0 := b.NewValue0(v.Pos, OpLOONG64ADDV, t)
  7054  		v0.AddArg2(x, y)
  7055  		v.AddArg2(v0, c)
  7056  		return true
  7057  	}
  7058  	// match: (Select0 <t> (Sub64borrow x y c))
  7059  	// result: (SUBV (SUBV <t> x y) c)
  7060  	for {
  7061  		t := v.Type
  7062  		if v_0.Op != OpSub64borrow {
  7063  			break
  7064  		}
  7065  		c := v_0.Args[2]
  7066  		x := v_0.Args[0]
  7067  		y := v_0.Args[1]
  7068  		v.reset(OpLOONG64SUBV)
  7069  		v0 := b.NewValue0(v.Pos, OpLOONG64SUBV, t)
  7070  		v0.AddArg2(x, y)
  7071  		v.AddArg2(v0, c)
  7072  		return true
  7073  	}
  7074  	return false
  7075  }
  7076  func rewriteValueLOONG64_OpSelect1(v *Value) bool {
  7077  	v_0 := v.Args[0]
  7078  	b := v.Block
  7079  	typ := &b.Func.Config.Types
  7080  	// match: (Select1 (Mul64uhilo x y))
  7081  	// result: (MULV x y)
  7082  	for {
  7083  		if v_0.Op != OpMul64uhilo {
  7084  			break
  7085  		}
  7086  		y := v_0.Args[1]
  7087  		x := v_0.Args[0]
  7088  		v.reset(OpLOONG64MULV)
  7089  		v.AddArg2(x, y)
  7090  		return true
  7091  	}
  7092  	// match: (Select1 (Mul64uover x y))
  7093  	// result: (SGTU <typ.Bool> (MULHVU x y) (MOVVconst <typ.UInt64> [0]))
  7094  	for {
  7095  		if v_0.Op != OpMul64uover {
  7096  			break
  7097  		}
  7098  		y := v_0.Args[1]
  7099  		x := v_0.Args[0]
  7100  		v.reset(OpLOONG64SGTU)
  7101  		v.Type = typ.Bool
  7102  		v0 := b.NewValue0(v.Pos, OpLOONG64MULHVU, typ.UInt64)
  7103  		v0.AddArg2(x, y)
  7104  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7105  		v1.AuxInt = int64ToAuxInt(0)
  7106  		v.AddArg2(v0, v1)
  7107  		return true
  7108  	}
  7109  	// match: (Select1 <t> (Add64carry x y c))
  7110  	// result: (OR (SGTU <t> x s:(ADDV <t> x y)) (SGTU <t> s (ADDV <t> s c)))
  7111  	for {
  7112  		t := v.Type
  7113  		if v_0.Op != OpAdd64carry {
  7114  			break
  7115  		}
  7116  		c := v_0.Args[2]
  7117  		x := v_0.Args[0]
  7118  		y := v_0.Args[1]
  7119  		v.reset(OpLOONG64OR)
  7120  		v0 := b.NewValue0(v.Pos, OpLOONG64SGTU, t)
  7121  		s := b.NewValue0(v.Pos, OpLOONG64ADDV, t)
  7122  		s.AddArg2(x, y)
  7123  		v0.AddArg2(x, s)
  7124  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, t)
  7125  		v3 := b.NewValue0(v.Pos, OpLOONG64ADDV, t)
  7126  		v3.AddArg2(s, c)
  7127  		v2.AddArg2(s, v3)
  7128  		v.AddArg2(v0, v2)
  7129  		return true
  7130  	}
  7131  	// match: (Select1 <t> (Sub64borrow x y c))
  7132  	// result: (OR (SGTU <t> s:(SUBV <t> x y) x) (SGTU <t> (SUBV <t> s c) s))
  7133  	for {
  7134  		t := v.Type
  7135  		if v_0.Op != OpSub64borrow {
  7136  			break
  7137  		}
  7138  		c := v_0.Args[2]
  7139  		x := v_0.Args[0]
  7140  		y := v_0.Args[1]
  7141  		v.reset(OpLOONG64OR)
  7142  		v0 := b.NewValue0(v.Pos, OpLOONG64SGTU, t)
  7143  		s := b.NewValue0(v.Pos, OpLOONG64SUBV, t)
  7144  		s.AddArg2(x, y)
  7145  		v0.AddArg2(s, x)
  7146  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, t)
  7147  		v3 := b.NewValue0(v.Pos, OpLOONG64SUBV, t)
  7148  		v3.AddArg2(s, c)
  7149  		v2.AddArg2(v3, s)
  7150  		v.AddArg2(v0, v2)
  7151  		return true
  7152  	}
  7153  	return false
  7154  }
  7155  func rewriteValueLOONG64_OpSlicemask(v *Value) bool {
  7156  	v_0 := v.Args[0]
  7157  	b := v.Block
  7158  	// match: (Slicemask <t> x)
  7159  	// result: (SRAVconst (NEGV <t> x) [63])
  7160  	for {
  7161  		t := v.Type
  7162  		x := v_0
  7163  		v.reset(OpLOONG64SRAVconst)
  7164  		v.AuxInt = int64ToAuxInt(63)
  7165  		v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
  7166  		v0.AddArg(x)
  7167  		v.AddArg(v0)
  7168  		return true
  7169  	}
  7170  }
  7171  func rewriteValueLOONG64_OpStore(v *Value) bool {
  7172  	v_2 := v.Args[2]
  7173  	v_1 := v.Args[1]
  7174  	v_0 := v.Args[0]
  7175  	// match: (Store {t} ptr val mem)
  7176  	// cond: t.Size() == 1
  7177  	// result: (MOVBstore ptr val mem)
  7178  	for {
  7179  		t := auxToType(v.Aux)
  7180  		ptr := v_0
  7181  		val := v_1
  7182  		mem := v_2
  7183  		if !(t.Size() == 1) {
  7184  			break
  7185  		}
  7186  		v.reset(OpLOONG64MOVBstore)
  7187  		v.AddArg3(ptr, val, mem)
  7188  		return true
  7189  	}
  7190  	// match: (Store {t} ptr val mem)
  7191  	// cond: t.Size() == 2
  7192  	// result: (MOVHstore ptr val mem)
  7193  	for {
  7194  		t := auxToType(v.Aux)
  7195  		ptr := v_0
  7196  		val := v_1
  7197  		mem := v_2
  7198  		if !(t.Size() == 2) {
  7199  			break
  7200  		}
  7201  		v.reset(OpLOONG64MOVHstore)
  7202  		v.AddArg3(ptr, val, mem)
  7203  		return true
  7204  	}
  7205  	// match: (Store {t} ptr val mem)
  7206  	// cond: t.Size() == 4 && !t.IsFloat()
  7207  	// result: (MOVWstore ptr val mem)
  7208  	for {
  7209  		t := auxToType(v.Aux)
  7210  		ptr := v_0
  7211  		val := v_1
  7212  		mem := v_2
  7213  		if !(t.Size() == 4 && !t.IsFloat()) {
  7214  			break
  7215  		}
  7216  		v.reset(OpLOONG64MOVWstore)
  7217  		v.AddArg3(ptr, val, mem)
  7218  		return true
  7219  	}
  7220  	// match: (Store {t} ptr val mem)
  7221  	// cond: t.Size() == 8 && !t.IsFloat()
  7222  	// result: (MOVVstore ptr val mem)
  7223  	for {
  7224  		t := auxToType(v.Aux)
  7225  		ptr := v_0
  7226  		val := v_1
  7227  		mem := v_2
  7228  		if !(t.Size() == 8 && !t.IsFloat()) {
  7229  			break
  7230  		}
  7231  		v.reset(OpLOONG64MOVVstore)
  7232  		v.AddArg3(ptr, val, mem)
  7233  		return true
  7234  	}
  7235  	// match: (Store {t} ptr val mem)
  7236  	// cond: t.Size() == 4 && t.IsFloat()
  7237  	// result: (MOVFstore ptr val mem)
  7238  	for {
  7239  		t := auxToType(v.Aux)
  7240  		ptr := v_0
  7241  		val := v_1
  7242  		mem := v_2
  7243  		if !(t.Size() == 4 && t.IsFloat()) {
  7244  			break
  7245  		}
  7246  		v.reset(OpLOONG64MOVFstore)
  7247  		v.AddArg3(ptr, val, mem)
  7248  		return true
  7249  	}
  7250  	// match: (Store {t} ptr val mem)
  7251  	// cond: t.Size() == 8 && t.IsFloat()
  7252  	// result: (MOVDstore ptr val mem)
  7253  	for {
  7254  		t := auxToType(v.Aux)
  7255  		ptr := v_0
  7256  		val := v_1
  7257  		mem := v_2
  7258  		if !(t.Size() == 8 && t.IsFloat()) {
  7259  			break
  7260  		}
  7261  		v.reset(OpLOONG64MOVDstore)
  7262  		v.AddArg3(ptr, val, mem)
  7263  		return true
  7264  	}
  7265  	return false
  7266  }
  7267  func rewriteValueLOONG64_OpZero(v *Value) bool {
  7268  	v_1 := v.Args[1]
  7269  	v_0 := v.Args[0]
  7270  	b := v.Block
  7271  	config := b.Func.Config
  7272  	typ := &b.Func.Config.Types
  7273  	// match: (Zero [0] _ mem)
  7274  	// result: mem
  7275  	for {
  7276  		if auxIntToInt64(v.AuxInt) != 0 {
  7277  			break
  7278  		}
  7279  		mem := v_1
  7280  		v.copyOf(mem)
  7281  		return true
  7282  	}
  7283  	// match: (Zero [1] ptr mem)
  7284  	// result: (MOVBstore ptr (MOVVconst [0]) mem)
  7285  	for {
  7286  		if auxIntToInt64(v.AuxInt) != 1 {
  7287  			break
  7288  		}
  7289  		ptr := v_0
  7290  		mem := v_1
  7291  		v.reset(OpLOONG64MOVBstore)
  7292  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7293  		v0.AuxInt = int64ToAuxInt(0)
  7294  		v.AddArg3(ptr, v0, mem)
  7295  		return true
  7296  	}
  7297  	// match: (Zero [2] {t} ptr mem)
  7298  	// cond: t.Alignment()%2 == 0
  7299  	// result: (MOVHstore ptr (MOVVconst [0]) mem)
  7300  	for {
  7301  		if auxIntToInt64(v.AuxInt) != 2 {
  7302  			break
  7303  		}
  7304  		t := auxToType(v.Aux)
  7305  		ptr := v_0
  7306  		mem := v_1
  7307  		if !(t.Alignment()%2 == 0) {
  7308  			break
  7309  		}
  7310  		v.reset(OpLOONG64MOVHstore)
  7311  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7312  		v0.AuxInt = int64ToAuxInt(0)
  7313  		v.AddArg3(ptr, v0, mem)
  7314  		return true
  7315  	}
  7316  	// match: (Zero [2] ptr mem)
  7317  	// result: (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem))
  7318  	for {
  7319  		if auxIntToInt64(v.AuxInt) != 2 {
  7320  			break
  7321  		}
  7322  		ptr := v_0
  7323  		mem := v_1
  7324  		v.reset(OpLOONG64MOVBstore)
  7325  		v.AuxInt = int32ToAuxInt(1)
  7326  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7327  		v0.AuxInt = int64ToAuxInt(0)
  7328  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVBstore, types.TypeMem)
  7329  		v1.AuxInt = int32ToAuxInt(0)
  7330  		v1.AddArg3(ptr, v0, mem)
  7331  		v.AddArg3(ptr, v0, v1)
  7332  		return true
  7333  	}
  7334  	// match: (Zero [4] {t} ptr mem)
  7335  	// cond: t.Alignment()%4 == 0
  7336  	// result: (MOVWstore ptr (MOVVconst [0]) mem)
  7337  	for {
  7338  		if auxIntToInt64(v.AuxInt) != 4 {
  7339  			break
  7340  		}
  7341  		t := auxToType(v.Aux)
  7342  		ptr := v_0
  7343  		mem := v_1
  7344  		if !(t.Alignment()%4 == 0) {
  7345  			break
  7346  		}
  7347  		v.reset(OpLOONG64MOVWstore)
  7348  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7349  		v0.AuxInt = int64ToAuxInt(0)
  7350  		v.AddArg3(ptr, v0, mem)
  7351  		return true
  7352  	}
  7353  	// match: (Zero [4] {t} ptr mem)
  7354  	// cond: t.Alignment()%2 == 0
  7355  	// result: (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem))
  7356  	for {
  7357  		if auxIntToInt64(v.AuxInt) != 4 {
  7358  			break
  7359  		}
  7360  		t := auxToType(v.Aux)
  7361  		ptr := v_0
  7362  		mem := v_1
  7363  		if !(t.Alignment()%2 == 0) {
  7364  			break
  7365  		}
  7366  		v.reset(OpLOONG64MOVHstore)
  7367  		v.AuxInt = int32ToAuxInt(2)
  7368  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7369  		v0.AuxInt = int64ToAuxInt(0)
  7370  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVHstore, types.TypeMem)
  7371  		v1.AuxInt = int32ToAuxInt(0)
  7372  		v1.AddArg3(ptr, v0, mem)
  7373  		v.AddArg3(ptr, v0, v1)
  7374  		return true
  7375  	}
  7376  	// match: (Zero [4] ptr mem)
  7377  	// result: (MOVBstore [3] ptr (MOVVconst [0]) (MOVBstore [2] ptr (MOVVconst [0]) (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem))))
  7378  	for {
  7379  		if auxIntToInt64(v.AuxInt) != 4 {
  7380  			break
  7381  		}
  7382  		ptr := v_0
  7383  		mem := v_1
  7384  		v.reset(OpLOONG64MOVBstore)
  7385  		v.AuxInt = int32ToAuxInt(3)
  7386  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7387  		v0.AuxInt = int64ToAuxInt(0)
  7388  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVBstore, types.TypeMem)
  7389  		v1.AuxInt = int32ToAuxInt(2)
  7390  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVBstore, types.TypeMem)
  7391  		v2.AuxInt = int32ToAuxInt(1)
  7392  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVBstore, types.TypeMem)
  7393  		v3.AuxInt = int32ToAuxInt(0)
  7394  		v3.AddArg3(ptr, v0, mem)
  7395  		v2.AddArg3(ptr, v0, v3)
  7396  		v1.AddArg3(ptr, v0, v2)
  7397  		v.AddArg3(ptr, v0, v1)
  7398  		return true
  7399  	}
  7400  	// match: (Zero [8] {t} ptr mem)
  7401  	// cond: t.Alignment()%8 == 0
  7402  	// result: (MOVVstore ptr (MOVVconst [0]) mem)
  7403  	for {
  7404  		if auxIntToInt64(v.AuxInt) != 8 {
  7405  			break
  7406  		}
  7407  		t := auxToType(v.Aux)
  7408  		ptr := v_0
  7409  		mem := v_1
  7410  		if !(t.Alignment()%8 == 0) {
  7411  			break
  7412  		}
  7413  		v.reset(OpLOONG64MOVVstore)
  7414  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7415  		v0.AuxInt = int64ToAuxInt(0)
  7416  		v.AddArg3(ptr, v0, mem)
  7417  		return true
  7418  	}
  7419  	// match: (Zero [8] {t} ptr mem)
  7420  	// cond: t.Alignment()%4 == 0
  7421  	// result: (MOVWstore [4] ptr (MOVVconst [0]) (MOVWstore [0] ptr (MOVVconst [0]) mem))
  7422  	for {
  7423  		if auxIntToInt64(v.AuxInt) != 8 {
  7424  			break
  7425  		}
  7426  		t := auxToType(v.Aux)
  7427  		ptr := v_0
  7428  		mem := v_1
  7429  		if !(t.Alignment()%4 == 0) {
  7430  			break
  7431  		}
  7432  		v.reset(OpLOONG64MOVWstore)
  7433  		v.AuxInt = int32ToAuxInt(4)
  7434  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7435  		v0.AuxInt = int64ToAuxInt(0)
  7436  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVWstore, types.TypeMem)
  7437  		v1.AuxInt = int32ToAuxInt(0)
  7438  		v1.AddArg3(ptr, v0, mem)
  7439  		v.AddArg3(ptr, v0, v1)
  7440  		return true
  7441  	}
  7442  	// match: (Zero [8] {t} ptr mem)
  7443  	// cond: t.Alignment()%2 == 0
  7444  	// result: (MOVHstore [6] ptr (MOVVconst [0]) (MOVHstore [4] ptr (MOVVconst [0]) (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem))))
  7445  	for {
  7446  		if auxIntToInt64(v.AuxInt) != 8 {
  7447  			break
  7448  		}
  7449  		t := auxToType(v.Aux)
  7450  		ptr := v_0
  7451  		mem := v_1
  7452  		if !(t.Alignment()%2 == 0) {
  7453  			break
  7454  		}
  7455  		v.reset(OpLOONG64MOVHstore)
  7456  		v.AuxInt = int32ToAuxInt(6)
  7457  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7458  		v0.AuxInt = int64ToAuxInt(0)
  7459  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVHstore, types.TypeMem)
  7460  		v1.AuxInt = int32ToAuxInt(4)
  7461  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVHstore, types.TypeMem)
  7462  		v2.AuxInt = int32ToAuxInt(2)
  7463  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVHstore, types.TypeMem)
  7464  		v3.AuxInt = int32ToAuxInt(0)
  7465  		v3.AddArg3(ptr, v0, mem)
  7466  		v2.AddArg3(ptr, v0, v3)
  7467  		v1.AddArg3(ptr, v0, v2)
  7468  		v.AddArg3(ptr, v0, v1)
  7469  		return true
  7470  	}
  7471  	// match: (Zero [3] ptr mem)
  7472  	// result: (MOVBstore [2] ptr (MOVVconst [0]) (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem)))
  7473  	for {
  7474  		if auxIntToInt64(v.AuxInt) != 3 {
  7475  			break
  7476  		}
  7477  		ptr := v_0
  7478  		mem := v_1
  7479  		v.reset(OpLOONG64MOVBstore)
  7480  		v.AuxInt = int32ToAuxInt(2)
  7481  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7482  		v0.AuxInt = int64ToAuxInt(0)
  7483  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVBstore, types.TypeMem)
  7484  		v1.AuxInt = int32ToAuxInt(1)
  7485  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVBstore, types.TypeMem)
  7486  		v2.AuxInt = int32ToAuxInt(0)
  7487  		v2.AddArg3(ptr, v0, mem)
  7488  		v1.AddArg3(ptr, v0, v2)
  7489  		v.AddArg3(ptr, v0, v1)
  7490  		return true
  7491  	}
  7492  	// match: (Zero [6] {t} ptr mem)
  7493  	// cond: t.Alignment()%2 == 0
  7494  	// result: (MOVHstore [4] ptr (MOVVconst [0]) (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem)))
  7495  	for {
  7496  		if auxIntToInt64(v.AuxInt) != 6 {
  7497  			break
  7498  		}
  7499  		t := auxToType(v.Aux)
  7500  		ptr := v_0
  7501  		mem := v_1
  7502  		if !(t.Alignment()%2 == 0) {
  7503  			break
  7504  		}
  7505  		v.reset(OpLOONG64MOVHstore)
  7506  		v.AuxInt = int32ToAuxInt(4)
  7507  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7508  		v0.AuxInt = int64ToAuxInt(0)
  7509  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVHstore, types.TypeMem)
  7510  		v1.AuxInt = int32ToAuxInt(2)
  7511  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVHstore, types.TypeMem)
  7512  		v2.AuxInt = int32ToAuxInt(0)
  7513  		v2.AddArg3(ptr, v0, mem)
  7514  		v1.AddArg3(ptr, v0, v2)
  7515  		v.AddArg3(ptr, v0, v1)
  7516  		return true
  7517  	}
  7518  	// match: (Zero [12] {t} ptr mem)
  7519  	// cond: t.Alignment()%4 == 0
  7520  	// result: (MOVWstore [8] ptr (MOVVconst [0]) (MOVWstore [4] ptr (MOVVconst [0]) (MOVWstore [0] ptr (MOVVconst [0]) mem)))
  7521  	for {
  7522  		if auxIntToInt64(v.AuxInt) != 12 {
  7523  			break
  7524  		}
  7525  		t := auxToType(v.Aux)
  7526  		ptr := v_0
  7527  		mem := v_1
  7528  		if !(t.Alignment()%4 == 0) {
  7529  			break
  7530  		}
  7531  		v.reset(OpLOONG64MOVWstore)
  7532  		v.AuxInt = int32ToAuxInt(8)
  7533  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7534  		v0.AuxInt = int64ToAuxInt(0)
  7535  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVWstore, types.TypeMem)
  7536  		v1.AuxInt = int32ToAuxInt(4)
  7537  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVWstore, types.TypeMem)
  7538  		v2.AuxInt = int32ToAuxInt(0)
  7539  		v2.AddArg3(ptr, v0, mem)
  7540  		v1.AddArg3(ptr, v0, v2)
  7541  		v.AddArg3(ptr, v0, v1)
  7542  		return true
  7543  	}
  7544  	// match: (Zero [16] {t} ptr mem)
  7545  	// cond: t.Alignment()%8 == 0
  7546  	// result: (MOVVstore [8] ptr (MOVVconst [0]) (MOVVstore [0] ptr (MOVVconst [0]) mem))
  7547  	for {
  7548  		if auxIntToInt64(v.AuxInt) != 16 {
  7549  			break
  7550  		}
  7551  		t := auxToType(v.Aux)
  7552  		ptr := v_0
  7553  		mem := v_1
  7554  		if !(t.Alignment()%8 == 0) {
  7555  			break
  7556  		}
  7557  		v.reset(OpLOONG64MOVVstore)
  7558  		v.AuxInt = int32ToAuxInt(8)
  7559  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7560  		v0.AuxInt = int64ToAuxInt(0)
  7561  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  7562  		v1.AuxInt = int32ToAuxInt(0)
  7563  		v1.AddArg3(ptr, v0, mem)
  7564  		v.AddArg3(ptr, v0, v1)
  7565  		return true
  7566  	}
  7567  	// match: (Zero [24] {t} ptr mem)
  7568  	// cond: t.Alignment()%8 == 0
  7569  	// result: (MOVVstore [16] ptr (MOVVconst [0]) (MOVVstore [8] ptr (MOVVconst [0]) (MOVVstore [0] ptr (MOVVconst [0]) mem)))
  7570  	for {
  7571  		if auxIntToInt64(v.AuxInt) != 24 {
  7572  			break
  7573  		}
  7574  		t := auxToType(v.Aux)
  7575  		ptr := v_0
  7576  		mem := v_1
  7577  		if !(t.Alignment()%8 == 0) {
  7578  			break
  7579  		}
  7580  		v.reset(OpLOONG64MOVVstore)
  7581  		v.AuxInt = int32ToAuxInt(16)
  7582  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7583  		v0.AuxInt = int64ToAuxInt(0)
  7584  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  7585  		v1.AuxInt = int32ToAuxInt(8)
  7586  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  7587  		v2.AuxInt = int32ToAuxInt(0)
  7588  		v2.AddArg3(ptr, v0, mem)
  7589  		v1.AddArg3(ptr, v0, v2)
  7590  		v.AddArg3(ptr, v0, v1)
  7591  		return true
  7592  	}
  7593  	// match: (Zero [s] {t} ptr mem)
  7594  	// cond: s%8 == 0 && s > 24 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice
  7595  	// result: (DUFFZERO [8 * (128 - s/8)] ptr mem)
  7596  	for {
  7597  		s := auxIntToInt64(v.AuxInt)
  7598  		t := auxToType(v.Aux)
  7599  		ptr := v_0
  7600  		mem := v_1
  7601  		if !(s%8 == 0 && s > 24 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice) {
  7602  			break
  7603  		}
  7604  		v.reset(OpLOONG64DUFFZERO)
  7605  		v.AuxInt = int64ToAuxInt(8 * (128 - s/8))
  7606  		v.AddArg2(ptr, mem)
  7607  		return true
  7608  	}
  7609  	// match: (Zero [s] {t} ptr mem)
  7610  	// cond: (s > 8*128 || config.noDuffDevice) || t.Alignment()%8 != 0
  7611  	// result: (LoweredZero [t.Alignment()] ptr (ADDVconst <ptr.Type> ptr [s-moveSize(t.Alignment(), config)]) mem)
  7612  	for {
  7613  		s := auxIntToInt64(v.AuxInt)
  7614  		t := auxToType(v.Aux)
  7615  		ptr := v_0
  7616  		mem := v_1
  7617  		if !((s > 8*128 || config.noDuffDevice) || t.Alignment()%8 != 0) {
  7618  			break
  7619  		}
  7620  		v.reset(OpLOONG64LoweredZero)
  7621  		v.AuxInt = int64ToAuxInt(t.Alignment())
  7622  		v0 := b.NewValue0(v.Pos, OpLOONG64ADDVconst, ptr.Type)
  7623  		v0.AuxInt = int64ToAuxInt(s - moveSize(t.Alignment(), config))
  7624  		v0.AddArg(ptr)
  7625  		v.AddArg3(ptr, v0, mem)
  7626  		return true
  7627  	}
  7628  	return false
  7629  }
  7630  func rewriteBlockLOONG64(b *Block) bool {
  7631  	typ := &b.Func.Config.Types
  7632  	switch b.Kind {
  7633  	case BlockLOONG64EQ:
  7634  		// match: (EQ (FPFlagTrue cmp) yes no)
  7635  		// result: (FPF cmp yes no)
  7636  		for b.Controls[0].Op == OpLOONG64FPFlagTrue {
  7637  			v_0 := b.Controls[0]
  7638  			cmp := v_0.Args[0]
  7639  			b.resetWithControl(BlockLOONG64FPF, cmp)
  7640  			return true
  7641  		}
  7642  		// match: (EQ (FPFlagFalse cmp) yes no)
  7643  		// result: (FPT cmp yes no)
  7644  		for b.Controls[0].Op == OpLOONG64FPFlagFalse {
  7645  			v_0 := b.Controls[0]
  7646  			cmp := v_0.Args[0]
  7647  			b.resetWithControl(BlockLOONG64FPT, cmp)
  7648  			return true
  7649  		}
  7650  		// match: (EQ (XORconst [1] cmp:(SGT _ _)) yes no)
  7651  		// result: (NE cmp yes no)
  7652  		for b.Controls[0].Op == OpLOONG64XORconst {
  7653  			v_0 := b.Controls[0]
  7654  			if auxIntToInt64(v_0.AuxInt) != 1 {
  7655  				break
  7656  			}
  7657  			cmp := v_0.Args[0]
  7658  			if cmp.Op != OpLOONG64SGT {
  7659  				break
  7660  			}
  7661  			b.resetWithControl(BlockLOONG64NE, cmp)
  7662  			return true
  7663  		}
  7664  		// match: (EQ (XORconst [1] cmp:(SGTU _ _)) yes no)
  7665  		// result: (NE cmp yes no)
  7666  		for b.Controls[0].Op == OpLOONG64XORconst {
  7667  			v_0 := b.Controls[0]
  7668  			if auxIntToInt64(v_0.AuxInt) != 1 {
  7669  				break
  7670  			}
  7671  			cmp := v_0.Args[0]
  7672  			if cmp.Op != OpLOONG64SGTU {
  7673  				break
  7674  			}
  7675  			b.resetWithControl(BlockLOONG64NE, cmp)
  7676  			return true
  7677  		}
  7678  		// match: (EQ (XORconst [1] cmp:(SGTconst _)) yes no)
  7679  		// result: (NE cmp yes no)
  7680  		for b.Controls[0].Op == OpLOONG64XORconst {
  7681  			v_0 := b.Controls[0]
  7682  			if auxIntToInt64(v_0.AuxInt) != 1 {
  7683  				break
  7684  			}
  7685  			cmp := v_0.Args[0]
  7686  			if cmp.Op != OpLOONG64SGTconst {
  7687  				break
  7688  			}
  7689  			b.resetWithControl(BlockLOONG64NE, cmp)
  7690  			return true
  7691  		}
  7692  		// match: (EQ (XORconst [1] cmp:(SGTUconst _)) yes no)
  7693  		// result: (NE cmp yes no)
  7694  		for b.Controls[0].Op == OpLOONG64XORconst {
  7695  			v_0 := b.Controls[0]
  7696  			if auxIntToInt64(v_0.AuxInt) != 1 {
  7697  				break
  7698  			}
  7699  			cmp := v_0.Args[0]
  7700  			if cmp.Op != OpLOONG64SGTUconst {
  7701  				break
  7702  			}
  7703  			b.resetWithControl(BlockLOONG64NE, cmp)
  7704  			return true
  7705  		}
  7706  		// match: (EQ (SGTUconst [1] x) yes no)
  7707  		// result: (NE x yes no)
  7708  		for b.Controls[0].Op == OpLOONG64SGTUconst {
  7709  			v_0 := b.Controls[0]
  7710  			if auxIntToInt64(v_0.AuxInt) != 1 {
  7711  				break
  7712  			}
  7713  			x := v_0.Args[0]
  7714  			b.resetWithControl(BlockLOONG64NE, x)
  7715  			return true
  7716  		}
  7717  		// match: (EQ (SGTU x (MOVVconst [0])) yes no)
  7718  		// result: (EQ x yes no)
  7719  		for b.Controls[0].Op == OpLOONG64SGTU {
  7720  			v_0 := b.Controls[0]
  7721  			_ = v_0.Args[1]
  7722  			x := v_0.Args[0]
  7723  			v_0_1 := v_0.Args[1]
  7724  			if v_0_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
  7725  				break
  7726  			}
  7727  			b.resetWithControl(BlockLOONG64EQ, x)
  7728  			return true
  7729  		}
  7730  		// match: (EQ (SGTconst [0] x) yes no)
  7731  		// result: (GEZ x yes no)
  7732  		for b.Controls[0].Op == OpLOONG64SGTconst {
  7733  			v_0 := b.Controls[0]
  7734  			if auxIntToInt64(v_0.AuxInt) != 0 {
  7735  				break
  7736  			}
  7737  			x := v_0.Args[0]
  7738  			b.resetWithControl(BlockLOONG64GEZ, x)
  7739  			return true
  7740  		}
  7741  		// match: (EQ (SGT x (MOVVconst [0])) yes no)
  7742  		// result: (LEZ x yes no)
  7743  		for b.Controls[0].Op == OpLOONG64SGT {
  7744  			v_0 := b.Controls[0]
  7745  			_ = v_0.Args[1]
  7746  			x := v_0.Args[0]
  7747  			v_0_1 := v_0.Args[1]
  7748  			if v_0_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
  7749  				break
  7750  			}
  7751  			b.resetWithControl(BlockLOONG64LEZ, x)
  7752  			return true
  7753  		}
  7754  		// match: (EQ (MOVVconst [0]) yes no)
  7755  		// result: (First yes no)
  7756  		for b.Controls[0].Op == OpLOONG64MOVVconst {
  7757  			v_0 := b.Controls[0]
  7758  			if auxIntToInt64(v_0.AuxInt) != 0 {
  7759  				break
  7760  			}
  7761  			b.Reset(BlockFirst)
  7762  			return true
  7763  		}
  7764  		// match: (EQ (MOVVconst [c]) yes no)
  7765  		// cond: c != 0
  7766  		// result: (First no yes)
  7767  		for b.Controls[0].Op == OpLOONG64MOVVconst {
  7768  			v_0 := b.Controls[0]
  7769  			c := auxIntToInt64(v_0.AuxInt)
  7770  			if !(c != 0) {
  7771  				break
  7772  			}
  7773  			b.Reset(BlockFirst)
  7774  			b.swapSuccessors()
  7775  			return true
  7776  		}
  7777  	case BlockLOONG64GEZ:
  7778  		// match: (GEZ (MOVVconst [c]) yes no)
  7779  		// cond: c >= 0
  7780  		// result: (First yes no)
  7781  		for b.Controls[0].Op == OpLOONG64MOVVconst {
  7782  			v_0 := b.Controls[0]
  7783  			c := auxIntToInt64(v_0.AuxInt)
  7784  			if !(c >= 0) {
  7785  				break
  7786  			}
  7787  			b.Reset(BlockFirst)
  7788  			return true
  7789  		}
  7790  		// match: (GEZ (MOVVconst [c]) yes no)
  7791  		// cond: c < 0
  7792  		// result: (First no yes)
  7793  		for b.Controls[0].Op == OpLOONG64MOVVconst {
  7794  			v_0 := b.Controls[0]
  7795  			c := auxIntToInt64(v_0.AuxInt)
  7796  			if !(c < 0) {
  7797  				break
  7798  			}
  7799  			b.Reset(BlockFirst)
  7800  			b.swapSuccessors()
  7801  			return true
  7802  		}
  7803  	case BlockLOONG64GTZ:
  7804  		// match: (GTZ (MOVVconst [c]) yes no)
  7805  		// cond: c > 0
  7806  		// result: (First yes no)
  7807  		for b.Controls[0].Op == OpLOONG64MOVVconst {
  7808  			v_0 := b.Controls[0]
  7809  			c := auxIntToInt64(v_0.AuxInt)
  7810  			if !(c > 0) {
  7811  				break
  7812  			}
  7813  			b.Reset(BlockFirst)
  7814  			return true
  7815  		}
  7816  		// match: (GTZ (MOVVconst [c]) yes no)
  7817  		// cond: c <= 0
  7818  		// result: (First no yes)
  7819  		for b.Controls[0].Op == OpLOONG64MOVVconst {
  7820  			v_0 := b.Controls[0]
  7821  			c := auxIntToInt64(v_0.AuxInt)
  7822  			if !(c <= 0) {
  7823  				break
  7824  			}
  7825  			b.Reset(BlockFirst)
  7826  			b.swapSuccessors()
  7827  			return true
  7828  		}
  7829  	case BlockIf:
  7830  		// match: (If cond yes no)
  7831  		// result: (NE (MOVBUreg <typ.UInt64> cond) yes no)
  7832  		for {
  7833  			cond := b.Controls[0]
  7834  			v0 := b.NewValue0(cond.Pos, OpLOONG64MOVBUreg, typ.UInt64)
  7835  			v0.AddArg(cond)
  7836  			b.resetWithControl(BlockLOONG64NE, v0)
  7837  			return true
  7838  		}
  7839  	case BlockLOONG64LEZ:
  7840  		// match: (LEZ (MOVVconst [c]) yes no)
  7841  		// cond: c <= 0
  7842  		// result: (First yes no)
  7843  		for b.Controls[0].Op == OpLOONG64MOVVconst {
  7844  			v_0 := b.Controls[0]
  7845  			c := auxIntToInt64(v_0.AuxInt)
  7846  			if !(c <= 0) {
  7847  				break
  7848  			}
  7849  			b.Reset(BlockFirst)
  7850  			return true
  7851  		}
  7852  		// match: (LEZ (MOVVconst [c]) yes no)
  7853  		// cond: c > 0
  7854  		// result: (First no yes)
  7855  		for b.Controls[0].Op == OpLOONG64MOVVconst {
  7856  			v_0 := b.Controls[0]
  7857  			c := auxIntToInt64(v_0.AuxInt)
  7858  			if !(c > 0) {
  7859  				break
  7860  			}
  7861  			b.Reset(BlockFirst)
  7862  			b.swapSuccessors()
  7863  			return true
  7864  		}
  7865  	case BlockLOONG64LTZ:
  7866  		// match: (LTZ (MOVVconst [c]) yes no)
  7867  		// cond: c < 0
  7868  		// result: (First yes no)
  7869  		for b.Controls[0].Op == OpLOONG64MOVVconst {
  7870  			v_0 := b.Controls[0]
  7871  			c := auxIntToInt64(v_0.AuxInt)
  7872  			if !(c < 0) {
  7873  				break
  7874  			}
  7875  			b.Reset(BlockFirst)
  7876  			return true
  7877  		}
  7878  		// match: (LTZ (MOVVconst [c]) yes no)
  7879  		// cond: c >= 0
  7880  		// result: (First no yes)
  7881  		for b.Controls[0].Op == OpLOONG64MOVVconst {
  7882  			v_0 := b.Controls[0]
  7883  			c := auxIntToInt64(v_0.AuxInt)
  7884  			if !(c >= 0) {
  7885  				break
  7886  			}
  7887  			b.Reset(BlockFirst)
  7888  			b.swapSuccessors()
  7889  			return true
  7890  		}
  7891  	case BlockLOONG64NE:
  7892  		// match: (NE (FPFlagTrue cmp) yes no)
  7893  		// result: (FPT cmp yes no)
  7894  		for b.Controls[0].Op == OpLOONG64FPFlagTrue {
  7895  			v_0 := b.Controls[0]
  7896  			cmp := v_0.Args[0]
  7897  			b.resetWithControl(BlockLOONG64FPT, cmp)
  7898  			return true
  7899  		}
  7900  		// match: (NE (FPFlagFalse cmp) yes no)
  7901  		// result: (FPF cmp yes no)
  7902  		for b.Controls[0].Op == OpLOONG64FPFlagFalse {
  7903  			v_0 := b.Controls[0]
  7904  			cmp := v_0.Args[0]
  7905  			b.resetWithControl(BlockLOONG64FPF, cmp)
  7906  			return true
  7907  		}
  7908  		// match: (NE (XORconst [1] cmp:(SGT _ _)) yes no)
  7909  		// result: (EQ cmp yes no)
  7910  		for b.Controls[0].Op == OpLOONG64XORconst {
  7911  			v_0 := b.Controls[0]
  7912  			if auxIntToInt64(v_0.AuxInt) != 1 {
  7913  				break
  7914  			}
  7915  			cmp := v_0.Args[0]
  7916  			if cmp.Op != OpLOONG64SGT {
  7917  				break
  7918  			}
  7919  			b.resetWithControl(BlockLOONG64EQ, cmp)
  7920  			return true
  7921  		}
  7922  		// match: (NE (XORconst [1] cmp:(SGTU _ _)) yes no)
  7923  		// result: (EQ cmp yes no)
  7924  		for b.Controls[0].Op == OpLOONG64XORconst {
  7925  			v_0 := b.Controls[0]
  7926  			if auxIntToInt64(v_0.AuxInt) != 1 {
  7927  				break
  7928  			}
  7929  			cmp := v_0.Args[0]
  7930  			if cmp.Op != OpLOONG64SGTU {
  7931  				break
  7932  			}
  7933  			b.resetWithControl(BlockLOONG64EQ, cmp)
  7934  			return true
  7935  		}
  7936  		// match: (NE (XORconst [1] cmp:(SGTconst _)) yes no)
  7937  		// result: (EQ cmp yes no)
  7938  		for b.Controls[0].Op == OpLOONG64XORconst {
  7939  			v_0 := b.Controls[0]
  7940  			if auxIntToInt64(v_0.AuxInt) != 1 {
  7941  				break
  7942  			}
  7943  			cmp := v_0.Args[0]
  7944  			if cmp.Op != OpLOONG64SGTconst {
  7945  				break
  7946  			}
  7947  			b.resetWithControl(BlockLOONG64EQ, cmp)
  7948  			return true
  7949  		}
  7950  		// match: (NE (XORconst [1] cmp:(SGTUconst _)) yes no)
  7951  		// result: (EQ cmp yes no)
  7952  		for b.Controls[0].Op == OpLOONG64XORconst {
  7953  			v_0 := b.Controls[0]
  7954  			if auxIntToInt64(v_0.AuxInt) != 1 {
  7955  				break
  7956  			}
  7957  			cmp := v_0.Args[0]
  7958  			if cmp.Op != OpLOONG64SGTUconst {
  7959  				break
  7960  			}
  7961  			b.resetWithControl(BlockLOONG64EQ, cmp)
  7962  			return true
  7963  		}
  7964  		// match: (NE (SGTUconst [1] x) yes no)
  7965  		// result: (EQ x yes no)
  7966  		for b.Controls[0].Op == OpLOONG64SGTUconst {
  7967  			v_0 := b.Controls[0]
  7968  			if auxIntToInt64(v_0.AuxInt) != 1 {
  7969  				break
  7970  			}
  7971  			x := v_0.Args[0]
  7972  			b.resetWithControl(BlockLOONG64EQ, x)
  7973  			return true
  7974  		}
  7975  		// match: (NE (SGTU x (MOVVconst [0])) yes no)
  7976  		// result: (NE x yes no)
  7977  		for b.Controls[0].Op == OpLOONG64SGTU {
  7978  			v_0 := b.Controls[0]
  7979  			_ = v_0.Args[1]
  7980  			x := v_0.Args[0]
  7981  			v_0_1 := v_0.Args[1]
  7982  			if v_0_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
  7983  				break
  7984  			}
  7985  			b.resetWithControl(BlockLOONG64NE, x)
  7986  			return true
  7987  		}
  7988  		// match: (NE (SGTconst [0] x) yes no)
  7989  		// result: (LTZ x yes no)
  7990  		for b.Controls[0].Op == OpLOONG64SGTconst {
  7991  			v_0 := b.Controls[0]
  7992  			if auxIntToInt64(v_0.AuxInt) != 0 {
  7993  				break
  7994  			}
  7995  			x := v_0.Args[0]
  7996  			b.resetWithControl(BlockLOONG64LTZ, x)
  7997  			return true
  7998  		}
  7999  		// match: (NE (SGT x (MOVVconst [0])) yes no)
  8000  		// result: (GTZ x yes no)
  8001  		for b.Controls[0].Op == OpLOONG64SGT {
  8002  			v_0 := b.Controls[0]
  8003  			_ = v_0.Args[1]
  8004  			x := v_0.Args[0]
  8005  			v_0_1 := v_0.Args[1]
  8006  			if v_0_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
  8007  				break
  8008  			}
  8009  			b.resetWithControl(BlockLOONG64GTZ, x)
  8010  			return true
  8011  		}
  8012  		// match: (NE (MOVVconst [0]) yes no)
  8013  		// result: (First no yes)
  8014  		for b.Controls[0].Op == OpLOONG64MOVVconst {
  8015  			v_0 := b.Controls[0]
  8016  			if auxIntToInt64(v_0.AuxInt) != 0 {
  8017  				break
  8018  			}
  8019  			b.Reset(BlockFirst)
  8020  			b.swapSuccessors()
  8021  			return true
  8022  		}
  8023  		// match: (NE (MOVVconst [c]) yes no)
  8024  		// cond: c != 0
  8025  		// result: (First yes no)
  8026  		for b.Controls[0].Op == OpLOONG64MOVVconst {
  8027  			v_0 := b.Controls[0]
  8028  			c := auxIntToInt64(v_0.AuxInt)
  8029  			if !(c != 0) {
  8030  				break
  8031  			}
  8032  			b.Reset(BlockFirst)
  8033  			return true
  8034  		}
  8035  	}
  8036  	return false
  8037  }
  8038  

View as plain text