Source file src/cmd/compile/internal/ssa/rewriteARM64latelower.go

     1  // Code generated from _gen/ARM64latelower.rules using 'go generate'; DO NOT EDIT.
     2  
     3  package ssa
     4  
     5  func rewriteValueARM64latelower(v *Value) bool {
     6  	switch v.Op {
     7  	case OpARM64ADDSconstflags:
     8  		return rewriteValueARM64latelower_OpARM64ADDSconstflags(v)
     9  	case OpARM64ADDconst:
    10  		return rewriteValueARM64latelower_OpARM64ADDconst(v)
    11  	case OpARM64ANDconst:
    12  		return rewriteValueARM64latelower_OpARM64ANDconst(v)
    13  	case OpARM64CMNWconst:
    14  		return rewriteValueARM64latelower_OpARM64CMNWconst(v)
    15  	case OpARM64CMNconst:
    16  		return rewriteValueARM64latelower_OpARM64CMNconst(v)
    17  	case OpARM64CMPWconst:
    18  		return rewriteValueARM64latelower_OpARM64CMPWconst(v)
    19  	case OpARM64CMPconst:
    20  		return rewriteValueARM64latelower_OpARM64CMPconst(v)
    21  	case OpARM64MOVBUreg:
    22  		return rewriteValueARM64latelower_OpARM64MOVBUreg(v)
    23  	case OpARM64MOVBreg:
    24  		return rewriteValueARM64latelower_OpARM64MOVBreg(v)
    25  	case OpARM64MOVDconst:
    26  		return rewriteValueARM64latelower_OpARM64MOVDconst(v)
    27  	case OpARM64MOVDnop:
    28  		return rewriteValueARM64latelower_OpARM64MOVDnop(v)
    29  	case OpARM64MOVDreg:
    30  		return rewriteValueARM64latelower_OpARM64MOVDreg(v)
    31  	case OpARM64MOVHUreg:
    32  		return rewriteValueARM64latelower_OpARM64MOVHUreg(v)
    33  	case OpARM64MOVHreg:
    34  		return rewriteValueARM64latelower_OpARM64MOVHreg(v)
    35  	case OpARM64MOVWUreg:
    36  		return rewriteValueARM64latelower_OpARM64MOVWUreg(v)
    37  	case OpARM64MOVWreg:
    38  		return rewriteValueARM64latelower_OpARM64MOVWreg(v)
    39  	case OpARM64ORconst:
    40  		return rewriteValueARM64latelower_OpARM64ORconst(v)
    41  	case OpARM64SLLconst:
    42  		return rewriteValueARM64latelower_OpARM64SLLconst(v)
    43  	case OpARM64SUBconst:
    44  		return rewriteValueARM64latelower_OpARM64SUBconst(v)
    45  	case OpARM64TSTWconst:
    46  		return rewriteValueARM64latelower_OpARM64TSTWconst(v)
    47  	case OpARM64TSTconst:
    48  		return rewriteValueARM64latelower_OpARM64TSTconst(v)
    49  	case OpARM64XORconst:
    50  		return rewriteValueARM64latelower_OpARM64XORconst(v)
    51  	}
    52  	return false
    53  }
    54  func rewriteValueARM64latelower_OpARM64ADDSconstflags(v *Value) bool {
    55  	v_0 := v.Args[0]
    56  	b := v.Block
    57  	typ := &b.Func.Config.Types
    58  	// match: (ADDSconstflags [c] x)
    59  	// cond: !isARM64addcon(c)
    60  	// result: (ADDSflags x (MOVDconst [c]))
    61  	for {
    62  		c := auxIntToInt64(v.AuxInt)
    63  		x := v_0
    64  		if !(!isARM64addcon(c)) {
    65  			break
    66  		}
    67  		v.reset(OpARM64ADDSflags)
    68  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
    69  		v0.AuxInt = int64ToAuxInt(c)
    70  		v.AddArg2(x, v0)
    71  		return true
    72  	}
    73  	return false
    74  }
    75  func rewriteValueARM64latelower_OpARM64ADDconst(v *Value) bool {
    76  	v_0 := v.Args[0]
    77  	b := v.Block
    78  	typ := &b.Func.Config.Types
    79  	// match: (ADDconst [c] x)
    80  	// cond: !isARM64addcon(c)
    81  	// result: (ADD x (MOVDconst [c]))
    82  	for {
    83  		c := auxIntToInt64(v.AuxInt)
    84  		x := v_0
    85  		if !(!isARM64addcon(c)) {
    86  			break
    87  		}
    88  		v.reset(OpARM64ADD)
    89  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
    90  		v0.AuxInt = int64ToAuxInt(c)
    91  		v.AddArg2(x, v0)
    92  		return true
    93  	}
    94  	return false
    95  }
    96  func rewriteValueARM64latelower_OpARM64ANDconst(v *Value) bool {
    97  	v_0 := v.Args[0]
    98  	b := v.Block
    99  	typ := &b.Func.Config.Types
   100  	// match: (ANDconst [c] x)
   101  	// cond: !isARM64bitcon(uint64(c))
   102  	// result: (AND x (MOVDconst [c]))
   103  	for {
   104  		c := auxIntToInt64(v.AuxInt)
   105  		x := v_0
   106  		if !(!isARM64bitcon(uint64(c))) {
   107  			break
   108  		}
   109  		v.reset(OpARM64AND)
   110  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
   111  		v0.AuxInt = int64ToAuxInt(c)
   112  		v.AddArg2(x, v0)
   113  		return true
   114  	}
   115  	return false
   116  }
   117  func rewriteValueARM64latelower_OpARM64CMNWconst(v *Value) bool {
   118  	v_0 := v.Args[0]
   119  	b := v.Block
   120  	typ := &b.Func.Config.Types
   121  	// match: (CMNWconst [c] x)
   122  	// cond: !isARM64addcon(int64(c))
   123  	// result: (CMNW x (MOVDconst [int64(c)]))
   124  	for {
   125  		c := auxIntToInt32(v.AuxInt)
   126  		x := v_0
   127  		if !(!isARM64addcon(int64(c))) {
   128  			break
   129  		}
   130  		v.reset(OpARM64CMNW)
   131  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
   132  		v0.AuxInt = int64ToAuxInt(int64(c))
   133  		v.AddArg2(x, v0)
   134  		return true
   135  	}
   136  	return false
   137  }
   138  func rewriteValueARM64latelower_OpARM64CMNconst(v *Value) bool {
   139  	v_0 := v.Args[0]
   140  	b := v.Block
   141  	typ := &b.Func.Config.Types
   142  	// match: (CMNconst [c] x)
   143  	// cond: !isARM64addcon(c)
   144  	// result: (CMN x (MOVDconst [c]))
   145  	for {
   146  		c := auxIntToInt64(v.AuxInt)
   147  		x := v_0
   148  		if !(!isARM64addcon(c)) {
   149  			break
   150  		}
   151  		v.reset(OpARM64CMN)
   152  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
   153  		v0.AuxInt = int64ToAuxInt(c)
   154  		v.AddArg2(x, v0)
   155  		return true
   156  	}
   157  	return false
   158  }
   159  func rewriteValueARM64latelower_OpARM64CMPWconst(v *Value) bool {
   160  	v_0 := v.Args[0]
   161  	b := v.Block
   162  	typ := &b.Func.Config.Types
   163  	// match: (CMPWconst [c] x)
   164  	// cond: !isARM64addcon(int64(c))
   165  	// result: (CMPW x (MOVDconst [int64(c)]))
   166  	for {
   167  		c := auxIntToInt32(v.AuxInt)
   168  		x := v_0
   169  		if !(!isARM64addcon(int64(c))) {
   170  			break
   171  		}
   172  		v.reset(OpARM64CMPW)
   173  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
   174  		v0.AuxInt = int64ToAuxInt(int64(c))
   175  		v.AddArg2(x, v0)
   176  		return true
   177  	}
   178  	return false
   179  }
   180  func rewriteValueARM64latelower_OpARM64CMPconst(v *Value) bool {
   181  	v_0 := v.Args[0]
   182  	b := v.Block
   183  	typ := &b.Func.Config.Types
   184  	// match: (CMPconst [c] x)
   185  	// cond: !isARM64addcon(c)
   186  	// result: (CMP x (MOVDconst [c]))
   187  	for {
   188  		c := auxIntToInt64(v.AuxInt)
   189  		x := v_0
   190  		if !(!isARM64addcon(c)) {
   191  			break
   192  		}
   193  		v.reset(OpARM64CMP)
   194  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
   195  		v0.AuxInt = int64ToAuxInt(c)
   196  		v.AddArg2(x, v0)
   197  		return true
   198  	}
   199  	return false
   200  }
   201  func rewriteValueARM64latelower_OpARM64MOVBUreg(v *Value) bool {
   202  	v_0 := v.Args[0]
   203  	// match: (MOVBUreg x:(Equal _))
   204  	// result: x
   205  	for {
   206  		x := v_0
   207  		if x.Op != OpARM64Equal {
   208  			break
   209  		}
   210  		v.copyOf(x)
   211  		return true
   212  	}
   213  	// match: (MOVBUreg x:(NotEqual _))
   214  	// result: x
   215  	for {
   216  		x := v_0
   217  		if x.Op != OpARM64NotEqual {
   218  			break
   219  		}
   220  		v.copyOf(x)
   221  		return true
   222  	}
   223  	// match: (MOVBUreg x:(LessThan _))
   224  	// result: x
   225  	for {
   226  		x := v_0
   227  		if x.Op != OpARM64LessThan {
   228  			break
   229  		}
   230  		v.copyOf(x)
   231  		return true
   232  	}
   233  	// match: (MOVBUreg x:(LessThanU _))
   234  	// result: x
   235  	for {
   236  		x := v_0
   237  		if x.Op != OpARM64LessThanU {
   238  			break
   239  		}
   240  		v.copyOf(x)
   241  		return true
   242  	}
   243  	// match: (MOVBUreg x:(LessThanF _))
   244  	// result: x
   245  	for {
   246  		x := v_0
   247  		if x.Op != OpARM64LessThanF {
   248  			break
   249  		}
   250  		v.copyOf(x)
   251  		return true
   252  	}
   253  	// match: (MOVBUreg x:(LessEqual _))
   254  	// result: x
   255  	for {
   256  		x := v_0
   257  		if x.Op != OpARM64LessEqual {
   258  			break
   259  		}
   260  		v.copyOf(x)
   261  		return true
   262  	}
   263  	// match: (MOVBUreg x:(LessEqualU _))
   264  	// result: x
   265  	for {
   266  		x := v_0
   267  		if x.Op != OpARM64LessEqualU {
   268  			break
   269  		}
   270  		v.copyOf(x)
   271  		return true
   272  	}
   273  	// match: (MOVBUreg x:(LessEqualF _))
   274  	// result: x
   275  	for {
   276  		x := v_0
   277  		if x.Op != OpARM64LessEqualF {
   278  			break
   279  		}
   280  		v.copyOf(x)
   281  		return true
   282  	}
   283  	// match: (MOVBUreg x:(GreaterThan _))
   284  	// result: x
   285  	for {
   286  		x := v_0
   287  		if x.Op != OpARM64GreaterThan {
   288  			break
   289  		}
   290  		v.copyOf(x)
   291  		return true
   292  	}
   293  	// match: (MOVBUreg x:(GreaterThanU _))
   294  	// result: x
   295  	for {
   296  		x := v_0
   297  		if x.Op != OpARM64GreaterThanU {
   298  			break
   299  		}
   300  		v.copyOf(x)
   301  		return true
   302  	}
   303  	// match: (MOVBUreg x:(GreaterThanF _))
   304  	// result: x
   305  	for {
   306  		x := v_0
   307  		if x.Op != OpARM64GreaterThanF {
   308  			break
   309  		}
   310  		v.copyOf(x)
   311  		return true
   312  	}
   313  	// match: (MOVBUreg x:(GreaterEqual _))
   314  	// result: x
   315  	for {
   316  		x := v_0
   317  		if x.Op != OpARM64GreaterEqual {
   318  			break
   319  		}
   320  		v.copyOf(x)
   321  		return true
   322  	}
   323  	// match: (MOVBUreg x:(GreaterEqualU _))
   324  	// result: x
   325  	for {
   326  		x := v_0
   327  		if x.Op != OpARM64GreaterEqualU {
   328  			break
   329  		}
   330  		v.copyOf(x)
   331  		return true
   332  	}
   333  	// match: (MOVBUreg x:(GreaterEqualF _))
   334  	// result: x
   335  	for {
   336  		x := v_0
   337  		if x.Op != OpARM64GreaterEqualF {
   338  			break
   339  		}
   340  		v.copyOf(x)
   341  		return true
   342  	}
   343  	// match: (MOVBUreg x:(MOVBUload _ _))
   344  	// result: (MOVDreg x)
   345  	for {
   346  		x := v_0
   347  		if x.Op != OpARM64MOVBUload {
   348  			break
   349  		}
   350  		v.reset(OpARM64MOVDreg)
   351  		v.AddArg(x)
   352  		return true
   353  	}
   354  	// match: (MOVBUreg x:(MOVBUloadidx _ _ _))
   355  	// result: (MOVDreg x)
   356  	for {
   357  		x := v_0
   358  		if x.Op != OpARM64MOVBUloadidx {
   359  			break
   360  		}
   361  		v.reset(OpARM64MOVDreg)
   362  		v.AddArg(x)
   363  		return true
   364  	}
   365  	// match: (MOVBUreg x:(MOVBUreg _))
   366  	// result: (MOVDreg x)
   367  	for {
   368  		x := v_0
   369  		if x.Op != OpARM64MOVBUreg {
   370  			break
   371  		}
   372  		v.reset(OpARM64MOVDreg)
   373  		v.AddArg(x)
   374  		return true
   375  	}
   376  	return false
   377  }
   378  func rewriteValueARM64latelower_OpARM64MOVBreg(v *Value) bool {
   379  	v_0 := v.Args[0]
   380  	// match: (MOVBreg x:(MOVBload _ _))
   381  	// result: (MOVDreg x)
   382  	for {
   383  		x := v_0
   384  		if x.Op != OpARM64MOVBload {
   385  			break
   386  		}
   387  		v.reset(OpARM64MOVDreg)
   388  		v.AddArg(x)
   389  		return true
   390  	}
   391  	// match: (MOVBreg x:(MOVBloadidx _ _ _))
   392  	// result: (MOVDreg x)
   393  	for {
   394  		x := v_0
   395  		if x.Op != OpARM64MOVBloadidx {
   396  			break
   397  		}
   398  		v.reset(OpARM64MOVDreg)
   399  		v.AddArg(x)
   400  		return true
   401  	}
   402  	// match: (MOVBreg x:(MOVBreg _))
   403  	// result: (MOVDreg x)
   404  	for {
   405  		x := v_0
   406  		if x.Op != OpARM64MOVBreg {
   407  			break
   408  		}
   409  		v.reset(OpARM64MOVDreg)
   410  		v.AddArg(x)
   411  		return true
   412  	}
   413  	return false
   414  }
   415  func rewriteValueARM64latelower_OpARM64MOVDconst(v *Value) bool {
   416  	// match: (MOVDconst [0])
   417  	// result: (ZERO)
   418  	for {
   419  		if auxIntToInt64(v.AuxInt) != 0 {
   420  			break
   421  		}
   422  		v.reset(OpARM64ZERO)
   423  		return true
   424  	}
   425  	return false
   426  }
   427  func rewriteValueARM64latelower_OpARM64MOVDnop(v *Value) bool {
   428  	v_0 := v.Args[0]
   429  	// match: (MOVDnop (MOVDconst [c]))
   430  	// result: (MOVDconst [c])
   431  	for {
   432  		if v_0.Op != OpARM64MOVDconst {
   433  			break
   434  		}
   435  		c := auxIntToInt64(v_0.AuxInt)
   436  		v.reset(OpARM64MOVDconst)
   437  		v.AuxInt = int64ToAuxInt(c)
   438  		return true
   439  	}
   440  	return false
   441  }
   442  func rewriteValueARM64latelower_OpARM64MOVDreg(v *Value) bool {
   443  	v_0 := v.Args[0]
   444  	// match: (MOVDreg x)
   445  	// cond: x.Uses == 1
   446  	// result: (MOVDnop x)
   447  	for {
   448  		x := v_0
   449  		if !(x.Uses == 1) {
   450  			break
   451  		}
   452  		v.reset(OpARM64MOVDnop)
   453  		v.AddArg(x)
   454  		return true
   455  	}
   456  	return false
   457  }
   458  func rewriteValueARM64latelower_OpARM64MOVHUreg(v *Value) bool {
   459  	v_0 := v.Args[0]
   460  	// match: (MOVHUreg x:(MOVBUload _ _))
   461  	// result: (MOVDreg x)
   462  	for {
   463  		x := v_0
   464  		if x.Op != OpARM64MOVBUload {
   465  			break
   466  		}
   467  		v.reset(OpARM64MOVDreg)
   468  		v.AddArg(x)
   469  		return true
   470  	}
   471  	// match: (MOVHUreg x:(MOVHUload _ _))
   472  	// result: (MOVDreg x)
   473  	for {
   474  		x := v_0
   475  		if x.Op != OpARM64MOVHUload {
   476  			break
   477  		}
   478  		v.reset(OpARM64MOVDreg)
   479  		v.AddArg(x)
   480  		return true
   481  	}
   482  	// match: (MOVHUreg x:(MOVBUloadidx _ _ _))
   483  	// result: (MOVDreg x)
   484  	for {
   485  		x := v_0
   486  		if x.Op != OpARM64MOVBUloadidx {
   487  			break
   488  		}
   489  		v.reset(OpARM64MOVDreg)
   490  		v.AddArg(x)
   491  		return true
   492  	}
   493  	// match: (MOVHUreg x:(MOVHUloadidx _ _ _))
   494  	// result: (MOVDreg x)
   495  	for {
   496  		x := v_0
   497  		if x.Op != OpARM64MOVHUloadidx {
   498  			break
   499  		}
   500  		v.reset(OpARM64MOVDreg)
   501  		v.AddArg(x)
   502  		return true
   503  	}
   504  	// match: (MOVHUreg x:(MOVHUloadidx2 _ _ _))
   505  	// result: (MOVDreg x)
   506  	for {
   507  		x := v_0
   508  		if x.Op != OpARM64MOVHUloadidx2 {
   509  			break
   510  		}
   511  		v.reset(OpARM64MOVDreg)
   512  		v.AddArg(x)
   513  		return true
   514  	}
   515  	// match: (MOVHUreg x:(MOVBUreg _))
   516  	// result: (MOVDreg x)
   517  	for {
   518  		x := v_0
   519  		if x.Op != OpARM64MOVBUreg {
   520  			break
   521  		}
   522  		v.reset(OpARM64MOVDreg)
   523  		v.AddArg(x)
   524  		return true
   525  	}
   526  	// match: (MOVHUreg x:(MOVHUreg _))
   527  	// result: (MOVDreg x)
   528  	for {
   529  		x := v_0
   530  		if x.Op != OpARM64MOVHUreg {
   531  			break
   532  		}
   533  		v.reset(OpARM64MOVDreg)
   534  		v.AddArg(x)
   535  		return true
   536  	}
   537  	return false
   538  }
   539  func rewriteValueARM64latelower_OpARM64MOVHreg(v *Value) bool {
   540  	v_0 := v.Args[0]
   541  	// match: (MOVHreg x:(MOVBload _ _))
   542  	// result: (MOVDreg x)
   543  	for {
   544  		x := v_0
   545  		if x.Op != OpARM64MOVBload {
   546  			break
   547  		}
   548  		v.reset(OpARM64MOVDreg)
   549  		v.AddArg(x)
   550  		return true
   551  	}
   552  	// match: (MOVHreg x:(MOVBUload _ _))
   553  	// result: (MOVDreg x)
   554  	for {
   555  		x := v_0
   556  		if x.Op != OpARM64MOVBUload {
   557  			break
   558  		}
   559  		v.reset(OpARM64MOVDreg)
   560  		v.AddArg(x)
   561  		return true
   562  	}
   563  	// match: (MOVHreg x:(MOVHload _ _))
   564  	// result: (MOVDreg x)
   565  	for {
   566  		x := v_0
   567  		if x.Op != OpARM64MOVHload {
   568  			break
   569  		}
   570  		v.reset(OpARM64MOVDreg)
   571  		v.AddArg(x)
   572  		return true
   573  	}
   574  	// match: (MOVHreg x:(MOVBloadidx _ _ _))
   575  	// result: (MOVDreg x)
   576  	for {
   577  		x := v_0
   578  		if x.Op != OpARM64MOVBloadidx {
   579  			break
   580  		}
   581  		v.reset(OpARM64MOVDreg)
   582  		v.AddArg(x)
   583  		return true
   584  	}
   585  	// match: (MOVHreg x:(MOVBUloadidx _ _ _))
   586  	// result: (MOVDreg x)
   587  	for {
   588  		x := v_0
   589  		if x.Op != OpARM64MOVBUloadidx {
   590  			break
   591  		}
   592  		v.reset(OpARM64MOVDreg)
   593  		v.AddArg(x)
   594  		return true
   595  	}
   596  	// match: (MOVHreg x:(MOVHloadidx _ _ _))
   597  	// result: (MOVDreg x)
   598  	for {
   599  		x := v_0
   600  		if x.Op != OpARM64MOVHloadidx {
   601  			break
   602  		}
   603  		v.reset(OpARM64MOVDreg)
   604  		v.AddArg(x)
   605  		return true
   606  	}
   607  	// match: (MOVHreg x:(MOVHloadidx2 _ _ _))
   608  	// result: (MOVDreg x)
   609  	for {
   610  		x := v_0
   611  		if x.Op != OpARM64MOVHloadidx2 {
   612  			break
   613  		}
   614  		v.reset(OpARM64MOVDreg)
   615  		v.AddArg(x)
   616  		return true
   617  	}
   618  	// match: (MOVHreg x:(MOVBreg _))
   619  	// result: (MOVDreg x)
   620  	for {
   621  		x := v_0
   622  		if x.Op != OpARM64MOVBreg {
   623  			break
   624  		}
   625  		v.reset(OpARM64MOVDreg)
   626  		v.AddArg(x)
   627  		return true
   628  	}
   629  	// match: (MOVHreg x:(MOVBUreg _))
   630  	// result: (MOVDreg x)
   631  	for {
   632  		x := v_0
   633  		if x.Op != OpARM64MOVBUreg {
   634  			break
   635  		}
   636  		v.reset(OpARM64MOVDreg)
   637  		v.AddArg(x)
   638  		return true
   639  	}
   640  	// match: (MOVHreg x:(MOVHreg _))
   641  	// result: (MOVDreg x)
   642  	for {
   643  		x := v_0
   644  		if x.Op != OpARM64MOVHreg {
   645  			break
   646  		}
   647  		v.reset(OpARM64MOVDreg)
   648  		v.AddArg(x)
   649  		return true
   650  	}
   651  	return false
   652  }
   653  func rewriteValueARM64latelower_OpARM64MOVWUreg(v *Value) bool {
   654  	v_0 := v.Args[0]
   655  	// match: (MOVWUreg x)
   656  	// cond: zeroUpper32Bits(x, 3)
   657  	// result: x
   658  	for {
   659  		x := v_0
   660  		if !(zeroUpper32Bits(x, 3)) {
   661  			break
   662  		}
   663  		v.copyOf(x)
   664  		return true
   665  	}
   666  	// match: (MOVWUreg x:(MOVBUload _ _))
   667  	// result: (MOVDreg x)
   668  	for {
   669  		x := v_0
   670  		if x.Op != OpARM64MOVBUload {
   671  			break
   672  		}
   673  		v.reset(OpARM64MOVDreg)
   674  		v.AddArg(x)
   675  		return true
   676  	}
   677  	// match: (MOVWUreg x:(MOVHUload _ _))
   678  	// result: (MOVDreg x)
   679  	for {
   680  		x := v_0
   681  		if x.Op != OpARM64MOVHUload {
   682  			break
   683  		}
   684  		v.reset(OpARM64MOVDreg)
   685  		v.AddArg(x)
   686  		return true
   687  	}
   688  	// match: (MOVWUreg x:(MOVWUload _ _))
   689  	// result: (MOVDreg x)
   690  	for {
   691  		x := v_0
   692  		if x.Op != OpARM64MOVWUload {
   693  			break
   694  		}
   695  		v.reset(OpARM64MOVDreg)
   696  		v.AddArg(x)
   697  		return true
   698  	}
   699  	// match: (MOVWUreg x:(MOVBUloadidx _ _ _))
   700  	// result: (MOVDreg x)
   701  	for {
   702  		x := v_0
   703  		if x.Op != OpARM64MOVBUloadidx {
   704  			break
   705  		}
   706  		v.reset(OpARM64MOVDreg)
   707  		v.AddArg(x)
   708  		return true
   709  	}
   710  	// match: (MOVWUreg x:(MOVHUloadidx _ _ _))
   711  	// result: (MOVDreg x)
   712  	for {
   713  		x := v_0
   714  		if x.Op != OpARM64MOVHUloadidx {
   715  			break
   716  		}
   717  		v.reset(OpARM64MOVDreg)
   718  		v.AddArg(x)
   719  		return true
   720  	}
   721  	// match: (MOVWUreg x:(MOVWUloadidx _ _ _))
   722  	// result: (MOVDreg x)
   723  	for {
   724  		x := v_0
   725  		if x.Op != OpARM64MOVWUloadidx {
   726  			break
   727  		}
   728  		v.reset(OpARM64MOVDreg)
   729  		v.AddArg(x)
   730  		return true
   731  	}
   732  	// match: (MOVWUreg x:(MOVHUloadidx2 _ _ _))
   733  	// result: (MOVDreg x)
   734  	for {
   735  		x := v_0
   736  		if x.Op != OpARM64MOVHUloadidx2 {
   737  			break
   738  		}
   739  		v.reset(OpARM64MOVDreg)
   740  		v.AddArg(x)
   741  		return true
   742  	}
   743  	// match: (MOVWUreg x:(MOVWUloadidx4 _ _ _))
   744  	// result: (MOVDreg x)
   745  	for {
   746  		x := v_0
   747  		if x.Op != OpARM64MOVWUloadidx4 {
   748  			break
   749  		}
   750  		v.reset(OpARM64MOVDreg)
   751  		v.AddArg(x)
   752  		return true
   753  	}
   754  	// match: (MOVWUreg x:(MOVBUreg _))
   755  	// result: (MOVDreg x)
   756  	for {
   757  		x := v_0
   758  		if x.Op != OpARM64MOVBUreg {
   759  			break
   760  		}
   761  		v.reset(OpARM64MOVDreg)
   762  		v.AddArg(x)
   763  		return true
   764  	}
   765  	// match: (MOVWUreg x:(MOVHUreg _))
   766  	// result: (MOVDreg x)
   767  	for {
   768  		x := v_0
   769  		if x.Op != OpARM64MOVHUreg {
   770  			break
   771  		}
   772  		v.reset(OpARM64MOVDreg)
   773  		v.AddArg(x)
   774  		return true
   775  	}
   776  	// match: (MOVWUreg x:(MOVWUreg _))
   777  	// result: (MOVDreg x)
   778  	for {
   779  		x := v_0
   780  		if x.Op != OpARM64MOVWUreg {
   781  			break
   782  		}
   783  		v.reset(OpARM64MOVDreg)
   784  		v.AddArg(x)
   785  		return true
   786  	}
   787  	return false
   788  }
   789  func rewriteValueARM64latelower_OpARM64MOVWreg(v *Value) bool {
   790  	v_0 := v.Args[0]
   791  	// match: (MOVWreg x:(MOVBload _ _))
   792  	// result: (MOVDreg x)
   793  	for {
   794  		x := v_0
   795  		if x.Op != OpARM64MOVBload {
   796  			break
   797  		}
   798  		v.reset(OpARM64MOVDreg)
   799  		v.AddArg(x)
   800  		return true
   801  	}
   802  	// match: (MOVWreg x:(MOVBUload _ _))
   803  	// result: (MOVDreg x)
   804  	for {
   805  		x := v_0
   806  		if x.Op != OpARM64MOVBUload {
   807  			break
   808  		}
   809  		v.reset(OpARM64MOVDreg)
   810  		v.AddArg(x)
   811  		return true
   812  	}
   813  	// match: (MOVWreg x:(MOVHload _ _))
   814  	// result: (MOVDreg x)
   815  	for {
   816  		x := v_0
   817  		if x.Op != OpARM64MOVHload {
   818  			break
   819  		}
   820  		v.reset(OpARM64MOVDreg)
   821  		v.AddArg(x)
   822  		return true
   823  	}
   824  	// match: (MOVWreg x:(MOVHUload _ _))
   825  	// result: (MOVDreg x)
   826  	for {
   827  		x := v_0
   828  		if x.Op != OpARM64MOVHUload {
   829  			break
   830  		}
   831  		v.reset(OpARM64MOVDreg)
   832  		v.AddArg(x)
   833  		return true
   834  	}
   835  	// match: (MOVWreg x:(MOVWload _ _))
   836  	// result: (MOVDreg x)
   837  	for {
   838  		x := v_0
   839  		if x.Op != OpARM64MOVWload {
   840  			break
   841  		}
   842  		v.reset(OpARM64MOVDreg)
   843  		v.AddArg(x)
   844  		return true
   845  	}
   846  	// match: (MOVWreg x:(MOVBloadidx _ _ _))
   847  	// result: (MOVDreg x)
   848  	for {
   849  		x := v_0
   850  		if x.Op != OpARM64MOVBloadidx {
   851  			break
   852  		}
   853  		v.reset(OpARM64MOVDreg)
   854  		v.AddArg(x)
   855  		return true
   856  	}
   857  	// match: (MOVWreg x:(MOVBUloadidx _ _ _))
   858  	// result: (MOVDreg x)
   859  	for {
   860  		x := v_0
   861  		if x.Op != OpARM64MOVBUloadidx {
   862  			break
   863  		}
   864  		v.reset(OpARM64MOVDreg)
   865  		v.AddArg(x)
   866  		return true
   867  	}
   868  	// match: (MOVWreg x:(MOVHloadidx _ _ _))
   869  	// result: (MOVDreg x)
   870  	for {
   871  		x := v_0
   872  		if x.Op != OpARM64MOVHloadidx {
   873  			break
   874  		}
   875  		v.reset(OpARM64MOVDreg)
   876  		v.AddArg(x)
   877  		return true
   878  	}
   879  	// match: (MOVWreg x:(MOVHUloadidx _ _ _))
   880  	// result: (MOVDreg x)
   881  	for {
   882  		x := v_0
   883  		if x.Op != OpARM64MOVHUloadidx {
   884  			break
   885  		}
   886  		v.reset(OpARM64MOVDreg)
   887  		v.AddArg(x)
   888  		return true
   889  	}
   890  	// match: (MOVWreg x:(MOVWloadidx _ _ _))
   891  	// result: (MOVDreg x)
   892  	for {
   893  		x := v_0
   894  		if x.Op != OpARM64MOVWloadidx {
   895  			break
   896  		}
   897  		v.reset(OpARM64MOVDreg)
   898  		v.AddArg(x)
   899  		return true
   900  	}
   901  	// match: (MOVWreg x:(MOVHloadidx2 _ _ _))
   902  	// result: (MOVDreg x)
   903  	for {
   904  		x := v_0
   905  		if x.Op != OpARM64MOVHloadidx2 {
   906  			break
   907  		}
   908  		v.reset(OpARM64MOVDreg)
   909  		v.AddArg(x)
   910  		return true
   911  	}
   912  	// match: (MOVWreg x:(MOVHUloadidx2 _ _ _))
   913  	// result: (MOVDreg x)
   914  	for {
   915  		x := v_0
   916  		if x.Op != OpARM64MOVHUloadidx2 {
   917  			break
   918  		}
   919  		v.reset(OpARM64MOVDreg)
   920  		v.AddArg(x)
   921  		return true
   922  	}
   923  	// match: (MOVWreg x:(MOVWloadidx4 _ _ _))
   924  	// result: (MOVDreg x)
   925  	for {
   926  		x := v_0
   927  		if x.Op != OpARM64MOVWloadidx4 {
   928  			break
   929  		}
   930  		v.reset(OpARM64MOVDreg)
   931  		v.AddArg(x)
   932  		return true
   933  	}
   934  	// match: (MOVWreg x:(MOVBreg _))
   935  	// result: (MOVDreg x)
   936  	for {
   937  		x := v_0
   938  		if x.Op != OpARM64MOVBreg {
   939  			break
   940  		}
   941  		v.reset(OpARM64MOVDreg)
   942  		v.AddArg(x)
   943  		return true
   944  	}
   945  	// match: (MOVWreg x:(MOVBUreg _))
   946  	// result: (MOVDreg x)
   947  	for {
   948  		x := v_0
   949  		if x.Op != OpARM64MOVBUreg {
   950  			break
   951  		}
   952  		v.reset(OpARM64MOVDreg)
   953  		v.AddArg(x)
   954  		return true
   955  	}
   956  	// match: (MOVWreg x:(MOVHreg _))
   957  	// result: (MOVDreg x)
   958  	for {
   959  		x := v_0
   960  		if x.Op != OpARM64MOVHreg {
   961  			break
   962  		}
   963  		v.reset(OpARM64MOVDreg)
   964  		v.AddArg(x)
   965  		return true
   966  	}
   967  	// match: (MOVWreg x:(MOVWreg _))
   968  	// result: (MOVDreg x)
   969  	for {
   970  		x := v_0
   971  		if x.Op != OpARM64MOVWreg {
   972  			break
   973  		}
   974  		v.reset(OpARM64MOVDreg)
   975  		v.AddArg(x)
   976  		return true
   977  	}
   978  	return false
   979  }
   980  func rewriteValueARM64latelower_OpARM64ORconst(v *Value) bool {
   981  	v_0 := v.Args[0]
   982  	b := v.Block
   983  	typ := &b.Func.Config.Types
   984  	// match: (ORconst [c] x)
   985  	// cond: !isARM64bitcon(uint64(c))
   986  	// result: (OR x (MOVDconst [c]))
   987  	for {
   988  		c := auxIntToInt64(v.AuxInt)
   989  		x := v_0
   990  		if !(!isARM64bitcon(uint64(c))) {
   991  			break
   992  		}
   993  		v.reset(OpARM64OR)
   994  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
   995  		v0.AuxInt = int64ToAuxInt(c)
   996  		v.AddArg2(x, v0)
   997  		return true
   998  	}
   999  	return false
  1000  }
  1001  func rewriteValueARM64latelower_OpARM64SLLconst(v *Value) bool {
  1002  	v_0 := v.Args[0]
  1003  	// match: (SLLconst [1] x)
  1004  	// result: (ADD x x)
  1005  	for {
  1006  		if auxIntToInt64(v.AuxInt) != 1 {
  1007  			break
  1008  		}
  1009  		x := v_0
  1010  		v.reset(OpARM64ADD)
  1011  		v.AddArg2(x, x)
  1012  		return true
  1013  	}
  1014  	return false
  1015  }
  1016  func rewriteValueARM64latelower_OpARM64SUBconst(v *Value) bool {
  1017  	v_0 := v.Args[0]
  1018  	b := v.Block
  1019  	typ := &b.Func.Config.Types
  1020  	// match: (SUBconst [c] x)
  1021  	// cond: !isARM64addcon(c)
  1022  	// result: (SUB x (MOVDconst [c]))
  1023  	for {
  1024  		c := auxIntToInt64(v.AuxInt)
  1025  		x := v_0
  1026  		if !(!isARM64addcon(c)) {
  1027  			break
  1028  		}
  1029  		v.reset(OpARM64SUB)
  1030  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
  1031  		v0.AuxInt = int64ToAuxInt(c)
  1032  		v.AddArg2(x, v0)
  1033  		return true
  1034  	}
  1035  	return false
  1036  }
  1037  func rewriteValueARM64latelower_OpARM64TSTWconst(v *Value) bool {
  1038  	v_0 := v.Args[0]
  1039  	b := v.Block
  1040  	typ := &b.Func.Config.Types
  1041  	// match: (TSTWconst [c] x)
  1042  	// cond: !isARM64bitcon(uint64(c)|uint64(c)<<32)
  1043  	// result: (TSTW x (MOVDconst [int64(c)]))
  1044  	for {
  1045  		c := auxIntToInt32(v.AuxInt)
  1046  		x := v_0
  1047  		if !(!isARM64bitcon(uint64(c) | uint64(c)<<32)) {
  1048  			break
  1049  		}
  1050  		v.reset(OpARM64TSTW)
  1051  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
  1052  		v0.AuxInt = int64ToAuxInt(int64(c))
  1053  		v.AddArg2(x, v0)
  1054  		return true
  1055  	}
  1056  	return false
  1057  }
  1058  func rewriteValueARM64latelower_OpARM64TSTconst(v *Value) bool {
  1059  	v_0 := v.Args[0]
  1060  	b := v.Block
  1061  	typ := &b.Func.Config.Types
  1062  	// match: (TSTconst [c] x)
  1063  	// cond: !isARM64bitcon(uint64(c))
  1064  	// result: (TST x (MOVDconst [c]))
  1065  	for {
  1066  		c := auxIntToInt64(v.AuxInt)
  1067  		x := v_0
  1068  		if !(!isARM64bitcon(uint64(c))) {
  1069  			break
  1070  		}
  1071  		v.reset(OpARM64TST)
  1072  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
  1073  		v0.AuxInt = int64ToAuxInt(c)
  1074  		v.AddArg2(x, v0)
  1075  		return true
  1076  	}
  1077  	return false
  1078  }
  1079  func rewriteValueARM64latelower_OpARM64XORconst(v *Value) bool {
  1080  	v_0 := v.Args[0]
  1081  	b := v.Block
  1082  	typ := &b.Func.Config.Types
  1083  	// match: (XORconst [c] x)
  1084  	// cond: !isARM64bitcon(uint64(c))
  1085  	// result: (XOR x (MOVDconst [c]))
  1086  	for {
  1087  		c := auxIntToInt64(v.AuxInt)
  1088  		x := v_0
  1089  		if !(!isARM64bitcon(uint64(c))) {
  1090  			break
  1091  		}
  1092  		v.reset(OpARM64XOR)
  1093  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
  1094  		v0.AuxInt = int64ToAuxInt(c)
  1095  		v.AddArg2(x, v0)
  1096  		return true
  1097  	}
  1098  	return false
  1099  }
  1100  func rewriteBlockARM64latelower(b *Block) bool {
  1101  	return false
  1102  }
  1103  

View as plain text