...

Source file src/cmd/compile/internal/walk/assign.go

Documentation: cmd/compile/internal/walk

     1  // Copyright 2009 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package walk
     6  
     7  import (
     8  	"go/constant"
     9  	"internal/abi"
    10  
    11  	"cmd/compile/internal/base"
    12  	"cmd/compile/internal/ir"
    13  	"cmd/compile/internal/reflectdata"
    14  	"cmd/compile/internal/typecheck"
    15  	"cmd/compile/internal/types"
    16  	"cmd/internal/src"
    17  )
    18  
    19  // walkAssign walks an OAS (AssignExpr) or OASOP (AssignOpExpr) node.
    20  func walkAssign(init *ir.Nodes, n ir.Node) ir.Node {
    21  	init.Append(ir.TakeInit(n)...)
    22  
    23  	var left, right ir.Node
    24  	switch n.Op() {
    25  	case ir.OAS:
    26  		n := n.(*ir.AssignStmt)
    27  		left, right = n.X, n.Y
    28  	case ir.OASOP:
    29  		n := n.(*ir.AssignOpStmt)
    30  		left, right = n.X, n.Y
    31  	}
    32  
    33  	// Recognize m[k] = append(m[k], ...) so we can reuse
    34  	// the mapassign call.
    35  	var mapAppend *ir.CallExpr
    36  	if left.Op() == ir.OINDEXMAP && right.Op() == ir.OAPPEND {
    37  		left := left.(*ir.IndexExpr)
    38  		mapAppend = right.(*ir.CallExpr)
    39  		if !ir.SameSafeExpr(left, mapAppend.Args[0]) {
    40  			base.Fatalf("not same expressions: %v != %v", left, mapAppend.Args[0])
    41  		}
    42  	}
    43  
    44  	left = walkExpr(left, init)
    45  	left = safeExpr(left, init)
    46  	if mapAppend != nil {
    47  		mapAppend.Args[0] = left
    48  	}
    49  
    50  	if n.Op() == ir.OASOP {
    51  		// Rewrite x op= y into x = x op y.
    52  		n = ir.NewAssignStmt(base.Pos, left, typecheck.Expr(ir.NewBinaryExpr(base.Pos, n.(*ir.AssignOpStmt).AsOp, left, right)))
    53  	} else {
    54  		n.(*ir.AssignStmt).X = left
    55  	}
    56  	as := n.(*ir.AssignStmt)
    57  
    58  	if oaslit(as, init) {
    59  		return ir.NewBlockStmt(as.Pos(), nil)
    60  	}
    61  
    62  	if as.Y == nil {
    63  		// TODO(austin): Check all "implicit zeroing"
    64  		return as
    65  	}
    66  
    67  	if !base.Flag.Cfg.Instrumenting && ir.IsZero(as.Y) {
    68  		return as
    69  	}
    70  
    71  	switch as.Y.Op() {
    72  	default:
    73  		as.Y = walkExpr(as.Y, init)
    74  
    75  	case ir.ORECV:
    76  		// x = <-c; as.Left is x, as.Right.Left is c.
    77  		// order.stmt made sure x is addressable.
    78  		recv := as.Y.(*ir.UnaryExpr)
    79  		recv.X = walkExpr(recv.X, init)
    80  
    81  		n1 := typecheck.NodAddr(as.X)
    82  		r := recv.X // the channel
    83  		return mkcall1(chanfn("chanrecv1", 2, r.Type()), nil, init, r, n1)
    84  
    85  	case ir.OAPPEND:
    86  		// x = append(...)
    87  		call := as.Y.(*ir.CallExpr)
    88  		if call.Type().Elem().NotInHeap() {
    89  			base.Errorf("%v can't be allocated in Go; it is incomplete (or unallocatable)", call.Type().Elem())
    90  		}
    91  		var r ir.Node
    92  		switch {
    93  		case isAppendOfMake(call):
    94  			// x = append(y, make([]T, y)...)
    95  			r = extendSlice(call, init)
    96  		case call.IsDDD:
    97  			r = appendSlice(call, init) // also works for append(slice, string).
    98  		default:
    99  			r = walkAppend(call, init, as)
   100  		}
   101  		as.Y = r
   102  		if r.Op() == ir.OAPPEND {
   103  			r := r.(*ir.CallExpr)
   104  			// Left in place for back end.
   105  			// Do not add a new write barrier.
   106  			// Set up address of type for back end.
   107  			r.Fun = reflectdata.AppendElemRType(base.Pos, r)
   108  			return as
   109  		}
   110  		// Otherwise, lowered for race detector.
   111  		// Treat as ordinary assignment.
   112  	}
   113  
   114  	if as.X != nil && as.Y != nil {
   115  		return convas(as, init)
   116  	}
   117  	return as
   118  }
   119  
   120  // walkAssignDotType walks an OAS2DOTTYPE node.
   121  func walkAssignDotType(n *ir.AssignListStmt, init *ir.Nodes) ir.Node {
   122  	walkExprListSafe(n.Lhs, init)
   123  	n.Rhs[0] = walkExpr(n.Rhs[0], init)
   124  	return n
   125  }
   126  
   127  // walkAssignFunc walks an OAS2FUNC node.
   128  func walkAssignFunc(init *ir.Nodes, n *ir.AssignListStmt) ir.Node {
   129  	init.Append(ir.TakeInit(n)...)
   130  
   131  	r := n.Rhs[0]
   132  	walkExprListSafe(n.Lhs, init)
   133  	r = walkExpr(r, init)
   134  
   135  	if ir.IsIntrinsicCall(r.(*ir.CallExpr)) {
   136  		n.Rhs = []ir.Node{r}
   137  		return n
   138  	}
   139  	init.Append(r)
   140  
   141  	ll := ascompatet(n.Lhs, r.Type())
   142  	return ir.NewBlockStmt(src.NoXPos, ll)
   143  }
   144  
   145  // walkAssignList walks an OAS2 node.
   146  func walkAssignList(init *ir.Nodes, n *ir.AssignListStmt) ir.Node {
   147  	init.Append(ir.TakeInit(n)...)
   148  	return ir.NewBlockStmt(src.NoXPos, ascompatee(ir.OAS, n.Lhs, n.Rhs))
   149  }
   150  
   151  // walkAssignMapRead walks an OAS2MAPR node.
   152  func walkAssignMapRead(init *ir.Nodes, n *ir.AssignListStmt) ir.Node {
   153  	init.Append(ir.TakeInit(n)...)
   154  
   155  	r := n.Rhs[0].(*ir.IndexExpr)
   156  	walkExprListSafe(n.Lhs, init)
   157  	r.X = walkExpr(r.X, init)
   158  	r.Index = walkExpr(r.Index, init)
   159  	t := r.X.Type()
   160  
   161  	fast := mapfast(t)
   162  	key := mapKeyArg(fast, r, r.Index, false)
   163  
   164  	// from:
   165  	//   a,b = m[i]
   166  	// to:
   167  	//   var,b = mapaccess2*(t, m, i)
   168  	//   a = *var
   169  	a := n.Lhs[0]
   170  
   171  	var call *ir.CallExpr
   172  	if w := t.Elem().Size(); w <= abi.ZeroValSize {
   173  		fn := mapfn(mapaccess2[fast], t, false)
   174  		call = mkcall1(fn, fn.Type().ResultsTuple(), init, reflectdata.IndexMapRType(base.Pos, r), r.X, key)
   175  	} else {
   176  		fn := mapfn("mapaccess2_fat", t, true)
   177  		z := reflectdata.ZeroAddr(w)
   178  		call = mkcall1(fn, fn.Type().ResultsTuple(), init, reflectdata.IndexMapRType(base.Pos, r), r.X, key, z)
   179  	}
   180  
   181  	// mapaccess2* returns a typed bool, but due to spec changes,
   182  	// the boolean result of i.(T) is now untyped so we make it the
   183  	// same type as the variable on the lhs.
   184  	if ok := n.Lhs[1]; !ir.IsBlank(ok) && ok.Type().IsBoolean() {
   185  		call.Type().Field(1).Type = ok.Type()
   186  	}
   187  	n.Rhs = []ir.Node{call}
   188  	n.SetOp(ir.OAS2FUNC)
   189  
   190  	// don't generate a = *var if a is _
   191  	if ir.IsBlank(a) {
   192  		return walkExpr(typecheck.Stmt(n), init)
   193  	}
   194  
   195  	var_ := typecheck.TempAt(base.Pos, ir.CurFunc, types.NewPtr(t.Elem()))
   196  	var_.SetTypecheck(1)
   197  	var_.MarkNonNil() // mapaccess always returns a non-nil pointer
   198  
   199  	n.Lhs[0] = var_
   200  	init.Append(walkExpr(n, init))
   201  
   202  	as := ir.NewAssignStmt(base.Pos, a, ir.NewStarExpr(base.Pos, var_))
   203  	return walkExpr(typecheck.Stmt(as), init)
   204  }
   205  
   206  // walkAssignRecv walks an OAS2RECV node.
   207  func walkAssignRecv(init *ir.Nodes, n *ir.AssignListStmt) ir.Node {
   208  	init.Append(ir.TakeInit(n)...)
   209  
   210  	r := n.Rhs[0].(*ir.UnaryExpr) // recv
   211  	walkExprListSafe(n.Lhs, init)
   212  	r.X = walkExpr(r.X, init)
   213  	var n1 ir.Node
   214  	if ir.IsBlank(n.Lhs[0]) {
   215  		n1 = typecheck.NodNil()
   216  	} else {
   217  		n1 = typecheck.NodAddr(n.Lhs[0])
   218  	}
   219  	fn := chanfn("chanrecv2", 2, r.X.Type())
   220  	ok := n.Lhs[1]
   221  	call := mkcall1(fn, types.Types[types.TBOOL], init, r.X, n1)
   222  	return typecheck.Stmt(ir.NewAssignStmt(base.Pos, ok, call))
   223  }
   224  
   225  // walkReturn walks an ORETURN node.
   226  func walkReturn(n *ir.ReturnStmt) ir.Node {
   227  	fn := ir.CurFunc
   228  
   229  	fn.NumReturns++
   230  	if len(n.Results) == 0 {
   231  		return n
   232  	}
   233  
   234  	results := fn.Type().Results()
   235  	dsts := make([]ir.Node, len(results))
   236  	for i, v := range results {
   237  		// TODO(mdempsky): typecheck should have already checked the result variables.
   238  		dsts[i] = typecheck.AssignExpr(v.Nname.(*ir.Name))
   239  	}
   240  
   241  	n.Results = ascompatee(n.Op(), dsts, n.Results)
   242  	return n
   243  }
   244  
   245  // check assign type list to
   246  // an expression list. called in
   247  //
   248  //	expr-list = func()
   249  func ascompatet(nl ir.Nodes, nr *types.Type) []ir.Node {
   250  	if len(nl) != nr.NumFields() {
   251  		base.Fatalf("ascompatet: assignment count mismatch: %d = %d", len(nl), nr.NumFields())
   252  	}
   253  
   254  	var nn ir.Nodes
   255  	for i, l := range nl {
   256  		if ir.IsBlank(l) {
   257  			continue
   258  		}
   259  		r := nr.Field(i)
   260  
   261  		// Order should have created autotemps of the appropriate type for
   262  		// us to store results into.
   263  		if tmp, ok := l.(*ir.Name); !ok || !tmp.AutoTemp() || !types.Identical(tmp.Type(), r.Type) {
   264  			base.FatalfAt(l.Pos(), "assigning %v to %+v", r.Type, l)
   265  		}
   266  
   267  		res := ir.NewResultExpr(base.Pos, nil, types.BADWIDTH)
   268  		res.Index = int64(i)
   269  		res.SetType(r.Type)
   270  		res.SetTypecheck(1)
   271  
   272  		nn.Append(ir.NewAssignStmt(base.Pos, l, res))
   273  	}
   274  	return nn
   275  }
   276  
   277  // check assign expression list to
   278  // an expression list. called in
   279  //
   280  //	expr-list = expr-list
   281  func ascompatee(op ir.Op, nl, nr []ir.Node) []ir.Node {
   282  	// cannot happen: should have been rejected during type checking
   283  	if len(nl) != len(nr) {
   284  		base.Fatalf("assignment operands mismatch: %+v / %+v", ir.Nodes(nl), ir.Nodes(nr))
   285  	}
   286  
   287  	var assigned ir.NameSet
   288  	var memWrite, deferResultWrite bool
   289  
   290  	// affected reports whether expression n could be affected by
   291  	// the assignments applied so far.
   292  	affected := func(n ir.Node) bool {
   293  		if deferResultWrite {
   294  			return true
   295  		}
   296  		return ir.Any(n, func(n ir.Node) bool {
   297  			if n.Op() == ir.ONAME && assigned.Has(n.(*ir.Name)) {
   298  				return true
   299  			}
   300  			if memWrite && readsMemory(n) {
   301  				return true
   302  			}
   303  			return false
   304  		})
   305  	}
   306  
   307  	// If a needed expression may be affected by an
   308  	// earlier assignment, make an early copy of that
   309  	// expression and use the copy instead.
   310  	var early ir.Nodes
   311  	save := func(np *ir.Node) {
   312  		if n := *np; affected(n) {
   313  			*np = copyExpr(n, n.Type(), &early)
   314  		}
   315  	}
   316  
   317  	var late ir.Nodes
   318  	for i, lorig := range nl {
   319  		l, r := lorig, nr[i]
   320  
   321  		// Do not generate 'x = x' during return. See issue 4014.
   322  		if op == ir.ORETURN && ir.SameSafeExpr(l, r) {
   323  			continue
   324  		}
   325  
   326  		// Save subexpressions needed on left side.
   327  		// Drill through non-dereferences.
   328  		for {
   329  			// If an expression has init statements, they must be evaluated
   330  			// before any of its saved sub-operands (#45706).
   331  			// TODO(mdempsky): Disallow init statements on lvalues.
   332  			init := ir.TakeInit(l)
   333  			walkStmtList(init)
   334  			early.Append(init...)
   335  
   336  			switch ll := l.(type) {
   337  			case *ir.IndexExpr:
   338  				if ll.X.Type().IsArray() {
   339  					save(&ll.Index)
   340  					l = ll.X
   341  					continue
   342  				}
   343  			case *ir.ParenExpr:
   344  				l = ll.X
   345  				continue
   346  			case *ir.SelectorExpr:
   347  				if ll.Op() == ir.ODOT {
   348  					l = ll.X
   349  					continue
   350  				}
   351  			}
   352  			break
   353  		}
   354  
   355  		var name *ir.Name
   356  		switch l.Op() {
   357  		default:
   358  			base.Fatalf("unexpected lvalue %v", l.Op())
   359  		case ir.ONAME:
   360  			name = l.(*ir.Name)
   361  		case ir.OINDEX, ir.OINDEXMAP:
   362  			l := l.(*ir.IndexExpr)
   363  			save(&l.X)
   364  			save(&l.Index)
   365  		case ir.ODEREF:
   366  			l := l.(*ir.StarExpr)
   367  			save(&l.X)
   368  		case ir.ODOTPTR:
   369  			l := l.(*ir.SelectorExpr)
   370  			save(&l.X)
   371  		}
   372  
   373  		// Save expression on right side.
   374  		save(&r)
   375  
   376  		appendWalkStmt(&late, convas(ir.NewAssignStmt(base.Pos, lorig, r), &late))
   377  
   378  		// Check for reasons why we may need to compute later expressions
   379  		// before this assignment happens.
   380  
   381  		if name == nil {
   382  			// Not a direct assignment to a declared variable.
   383  			// Conservatively assume any memory access might alias.
   384  			memWrite = true
   385  			continue
   386  		}
   387  
   388  		if name.Class == ir.PPARAMOUT && ir.CurFunc.HasDefer() {
   389  			// Assignments to a result parameter in a function with defers
   390  			// becomes visible early if evaluation of any later expression
   391  			// panics (#43835).
   392  			deferResultWrite = true
   393  			continue
   394  		}
   395  
   396  		if ir.IsBlank(name) {
   397  			// We can ignore assignments to blank or anonymous result parameters.
   398  			// These can't appear in expressions anyway.
   399  			continue
   400  		}
   401  
   402  		if name.Addrtaken() || !name.OnStack() {
   403  			// Global variable, heap escaped, or just addrtaken.
   404  			// Conservatively assume any memory access might alias.
   405  			memWrite = true
   406  			continue
   407  		}
   408  
   409  		// Local, non-addrtaken variable.
   410  		// Assignments can only alias with direct uses of this variable.
   411  		assigned.Add(name)
   412  	}
   413  
   414  	early.Append(late.Take()...)
   415  	return early
   416  }
   417  
   418  // readsMemory reports whether the evaluation n directly reads from
   419  // memory that might be written to indirectly.
   420  func readsMemory(n ir.Node) bool {
   421  	switch n.Op() {
   422  	case ir.ONAME:
   423  		n := n.(*ir.Name)
   424  		if n.Class == ir.PFUNC {
   425  			return false
   426  		}
   427  		return n.Addrtaken() || !n.OnStack()
   428  
   429  	case ir.OADD,
   430  		ir.OAND,
   431  		ir.OANDAND,
   432  		ir.OANDNOT,
   433  		ir.OBITNOT,
   434  		ir.OCONV,
   435  		ir.OCONVIFACE,
   436  		ir.OCONVNOP,
   437  		ir.ODIV,
   438  		ir.ODOT,
   439  		ir.ODOTTYPE,
   440  		ir.OLITERAL,
   441  		ir.OLSH,
   442  		ir.OMOD,
   443  		ir.OMUL,
   444  		ir.ONEG,
   445  		ir.ONIL,
   446  		ir.OOR,
   447  		ir.OOROR,
   448  		ir.OPAREN,
   449  		ir.OPLUS,
   450  		ir.ORSH,
   451  		ir.OSUB,
   452  		ir.OXOR:
   453  		return false
   454  	}
   455  
   456  	// Be conservative.
   457  	return true
   458  }
   459  
   460  // expand append(l1, l2...) to
   461  //
   462  //	init {
   463  //	  s := l1
   464  //	  newLen := s.len + l2.len
   465  //	  // Compare as uint so growslice can panic on overflow.
   466  //	  if uint(newLen) <= uint(s.cap) {
   467  //	    s = s[:newLen]
   468  //	  } else {
   469  //	    s = growslice(s.ptr, s.len, s.cap, l2.len, T)
   470  //	  }
   471  //	  memmove(&s[s.len-l2.len], &l2[0], l2.len*sizeof(T))
   472  //	}
   473  //	s
   474  //
   475  // l2 is allowed to be a string.
   476  func appendSlice(n *ir.CallExpr, init *ir.Nodes) ir.Node {
   477  	walkAppendArgs(n, init)
   478  
   479  	l1 := n.Args[0]
   480  	l2 := n.Args[1]
   481  	l2 = cheapExpr(l2, init)
   482  	n.Args[1] = l2
   483  
   484  	var nodes ir.Nodes
   485  
   486  	// var s []T
   487  	s := typecheck.TempAt(base.Pos, ir.CurFunc, l1.Type())
   488  	nodes.Append(ir.NewAssignStmt(base.Pos, s, l1)) // s = l1
   489  
   490  	elemtype := s.Type().Elem()
   491  
   492  	// Decompose slice.
   493  	oldPtr := ir.NewUnaryExpr(base.Pos, ir.OSPTR, s)
   494  	oldLen := ir.NewUnaryExpr(base.Pos, ir.OLEN, s)
   495  	oldCap := ir.NewUnaryExpr(base.Pos, ir.OCAP, s)
   496  
   497  	// Number of elements we are adding
   498  	num := ir.NewUnaryExpr(base.Pos, ir.OLEN, l2)
   499  
   500  	// newLen := oldLen + num
   501  	newLen := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TINT])
   502  	nodes.Append(ir.NewAssignStmt(base.Pos, newLen, ir.NewBinaryExpr(base.Pos, ir.OADD, oldLen, num)))
   503  
   504  	// if uint(newLen) <= uint(oldCap)
   505  	nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
   506  	nuint := typecheck.Conv(newLen, types.Types[types.TUINT])
   507  	scapuint := typecheck.Conv(oldCap, types.Types[types.TUINT])
   508  	nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OLE, nuint, scapuint)
   509  	nif.Likely = true
   510  
   511  	// then { s = s[:newLen] }
   512  	slice := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, nil, newLen, nil)
   513  	slice.SetBounded(true)
   514  	nif.Body = []ir.Node{ir.NewAssignStmt(base.Pos, s, slice)}
   515  
   516  	// else { s = growslice(oldPtr, newLen, oldCap, num, T) }
   517  	call := walkGrowslice(s, nif.PtrInit(), oldPtr, newLen, oldCap, num)
   518  	nif.Else = []ir.Node{ir.NewAssignStmt(base.Pos, s, call)}
   519  
   520  	nodes.Append(nif)
   521  
   522  	// Index to start copying into s.
   523  	//   idx = newLen - len(l2)
   524  	// We use this expression instead of oldLen because it avoids
   525  	// a spill/restore of oldLen.
   526  	// Note: this doesn't work optimally currently because
   527  	// the compiler optimizer undoes this arithmetic.
   528  	idx := ir.NewBinaryExpr(base.Pos, ir.OSUB, newLen, ir.NewUnaryExpr(base.Pos, ir.OLEN, l2))
   529  
   530  	var ncopy ir.Node
   531  	if elemtype.HasPointers() {
   532  		// copy(s[idx:], l2)
   533  		slice := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, idx, nil, nil)
   534  		slice.SetType(s.Type())
   535  		slice.SetBounded(true)
   536  
   537  		ir.CurFunc.SetWBPos(n.Pos())
   538  
   539  		// instantiate typedslicecopy(typ *type, dstPtr *any, dstLen int, srcPtr *any, srcLen int) int
   540  		fn := typecheck.LookupRuntime("typedslicecopy", l1.Type().Elem(), l2.Type().Elem())
   541  		ptr1, len1 := backingArrayPtrLen(cheapExpr(slice, &nodes))
   542  		ptr2, len2 := backingArrayPtrLen(l2)
   543  		ncopy = mkcall1(fn, types.Types[types.TINT], &nodes, reflectdata.AppendElemRType(base.Pos, n), ptr1, len1, ptr2, len2)
   544  	} else if base.Flag.Cfg.Instrumenting && !base.Flag.CompilingRuntime {
   545  		// rely on runtime to instrument:
   546  		//  copy(s[idx:], l2)
   547  		// l2 can be a slice or string.
   548  		slice := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, idx, nil, nil)
   549  		slice.SetType(s.Type())
   550  		slice.SetBounded(true)
   551  
   552  		ptr1, len1 := backingArrayPtrLen(cheapExpr(slice, &nodes))
   553  		ptr2, len2 := backingArrayPtrLen(l2)
   554  
   555  		fn := typecheck.LookupRuntime("slicecopy", ptr1.Type().Elem(), ptr2.Type().Elem())
   556  		ncopy = mkcall1(fn, types.Types[types.TINT], &nodes, ptr1, len1, ptr2, len2, ir.NewInt(base.Pos, elemtype.Size()))
   557  	} else {
   558  		// memmove(&s[idx], &l2[0], len(l2)*sizeof(T))
   559  		ix := ir.NewIndexExpr(base.Pos, s, idx)
   560  		ix.SetBounded(true)
   561  		addr := typecheck.NodAddr(ix)
   562  
   563  		sptr := ir.NewUnaryExpr(base.Pos, ir.OSPTR, l2)
   564  
   565  		nwid := cheapExpr(typecheck.Conv(ir.NewUnaryExpr(base.Pos, ir.OLEN, l2), types.Types[types.TUINTPTR]), &nodes)
   566  		nwid = ir.NewBinaryExpr(base.Pos, ir.OMUL, nwid, ir.NewInt(base.Pos, elemtype.Size()))
   567  
   568  		// instantiate func memmove(to *any, frm *any, length uintptr)
   569  		fn := typecheck.LookupRuntime("memmove", elemtype, elemtype)
   570  		ncopy = mkcall1(fn, nil, &nodes, addr, sptr, nwid)
   571  	}
   572  	ln := append(nodes, ncopy)
   573  
   574  	typecheck.Stmts(ln)
   575  	walkStmtList(ln)
   576  	init.Append(ln...)
   577  	return s
   578  }
   579  
   580  // isAppendOfMake reports whether n is of the form append(x, make([]T, y)...).
   581  // isAppendOfMake assumes n has already been typechecked.
   582  func isAppendOfMake(n ir.Node) bool {
   583  	if base.Flag.N != 0 || base.Flag.Cfg.Instrumenting {
   584  		return false
   585  	}
   586  
   587  	if n.Typecheck() == 0 {
   588  		base.Fatalf("missing typecheck: %+v", n)
   589  	}
   590  
   591  	if n.Op() != ir.OAPPEND {
   592  		return false
   593  	}
   594  	call := n.(*ir.CallExpr)
   595  	if !call.IsDDD || len(call.Args) != 2 || call.Args[1].Op() != ir.OMAKESLICE {
   596  		return false
   597  	}
   598  
   599  	mk := call.Args[1].(*ir.MakeExpr)
   600  	if mk.Cap != nil {
   601  		return false
   602  	}
   603  
   604  	// y must be either an integer constant or the largest possible positive value
   605  	// of variable y needs to fit into a uint.
   606  
   607  	// typecheck made sure that constant arguments to make are not negative and fit into an int.
   608  
   609  	// The care of overflow of the len argument to make will be handled by an explicit check of int(len) < 0 during runtime.
   610  	y := mk.Len
   611  	if !ir.IsConst(y, constant.Int) && y.Type().Size() > types.Types[types.TUINT].Size() {
   612  		return false
   613  	}
   614  
   615  	return true
   616  }
   617  
   618  // extendSlice rewrites append(l1, make([]T, l2)...) to
   619  //
   620  //	init {
   621  //	  if l2 >= 0 { // Empty if block here for more meaningful node.SetLikely(true)
   622  //	  } else {
   623  //	    panicmakeslicelen()
   624  //	  }
   625  //	  s := l1
   626  //	  if l2 != 0 {
   627  //	    n := len(s) + l2
   628  //	    // Compare n and s as uint so growslice can panic on overflow of len(s) + l2.
   629  //	    // cap is a positive int and n can become negative when len(s) + l2
   630  //	    // overflows int. Interpreting n when negative as uint makes it larger
   631  //	    // than cap(s). growslice will check the int n arg and panic if n is
   632  //	    // negative. This prevents the overflow from being undetected.
   633  //	    if uint(n) <= uint(cap(s)) {
   634  //	      s = s[:n]
   635  //	    } else {
   636  //	      s = growslice(T, s.ptr, n, s.cap, l2, T)
   637  //	    }
   638  //	    // clear the new portion of the underlying array.
   639  //	    hp := &s[len(s)-l2]
   640  //	    hn := l2 * sizeof(T)
   641  //	    memclr(hp, hn)
   642  //	  }
   643  //	}
   644  //	s
   645  //
   646  //	if T has pointers, the final memclr can go inside the "then" branch, as
   647  //	growslice will have done the clearing for us.
   648  
   649  func extendSlice(n *ir.CallExpr, init *ir.Nodes) ir.Node {
   650  	// isAppendOfMake made sure all possible positive values of l2 fit into a uint.
   651  	// The case of l2 overflow when converting from e.g. uint to int is handled by an explicit
   652  	// check of l2 < 0 at runtime which is generated below.
   653  	l2 := typecheck.Conv(n.Args[1].(*ir.MakeExpr).Len, types.Types[types.TINT])
   654  	l2 = typecheck.Expr(l2)
   655  	n.Args[1] = l2 // walkAppendArgs expects l2 in n.List.Second().
   656  
   657  	walkAppendArgs(n, init)
   658  
   659  	l1 := n.Args[0]
   660  	l2 = n.Args[1] // re-read l2, as it may have been updated by walkAppendArgs
   661  
   662  	var nodes []ir.Node
   663  
   664  	// if l2 >= 0 (likely happens), do nothing
   665  	nifneg := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OGE, l2, ir.NewInt(base.Pos, 0)), nil, nil)
   666  	nifneg.Likely = true
   667  
   668  	// else panicmakeslicelen()
   669  	nifneg.Else = []ir.Node{mkcall("panicmakeslicelen", nil, init)}
   670  	nodes = append(nodes, nifneg)
   671  
   672  	// s := l1
   673  	s := typecheck.TempAt(base.Pos, ir.CurFunc, l1.Type())
   674  	nodes = append(nodes, ir.NewAssignStmt(base.Pos, s, l1))
   675  
   676  	// if l2 != 0 {
   677  	// Avoid work if we're not appending anything. But more importantly,
   678  	// avoid allowing hp to be a past-the-end pointer when clearing. See issue 67255.
   679  	nifnz := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.ONE, l2, ir.NewInt(base.Pos, 0)), nil, nil)
   680  	nifnz.Likely = true
   681  	nodes = append(nodes, nifnz)
   682  
   683  	elemtype := s.Type().Elem()
   684  
   685  	// n := s.len + l2
   686  	nn := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TINT])
   687  	nifnz.Body = append(nifnz.Body, ir.NewAssignStmt(base.Pos, nn, ir.NewBinaryExpr(base.Pos, ir.OADD, ir.NewUnaryExpr(base.Pos, ir.OLEN, s), l2)))
   688  
   689  	// if uint(n) <= uint(s.cap)
   690  	nuint := typecheck.Conv(nn, types.Types[types.TUINT])
   691  	capuint := typecheck.Conv(ir.NewUnaryExpr(base.Pos, ir.OCAP, s), types.Types[types.TUINT])
   692  	nif := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OLE, nuint, capuint), nil, nil)
   693  	nif.Likely = true
   694  
   695  	// then { s = s[:n] }
   696  	nt := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, nil, nn, nil)
   697  	nt.SetBounded(true)
   698  	nif.Body = []ir.Node{ir.NewAssignStmt(base.Pos, s, nt)}
   699  
   700  	// else { s = growslice(s.ptr, n, s.cap, l2, T) }
   701  	nif.Else = []ir.Node{
   702  		ir.NewAssignStmt(base.Pos, s, walkGrowslice(s, nif.PtrInit(),
   703  			ir.NewUnaryExpr(base.Pos, ir.OSPTR, s),
   704  			nn,
   705  			ir.NewUnaryExpr(base.Pos, ir.OCAP, s),
   706  			l2)),
   707  	}
   708  
   709  	nifnz.Body = append(nifnz.Body, nif)
   710  
   711  	// hp := &s[s.len - l2]
   712  	// TODO: &s[s.len] - hn?
   713  	ix := ir.NewIndexExpr(base.Pos, s, ir.NewBinaryExpr(base.Pos, ir.OSUB, ir.NewUnaryExpr(base.Pos, ir.OLEN, s), l2))
   714  	ix.SetBounded(true)
   715  	hp := typecheck.ConvNop(typecheck.NodAddr(ix), types.Types[types.TUNSAFEPTR])
   716  
   717  	// hn := l2 * sizeof(elem(s))
   718  	hn := typecheck.Conv(ir.NewBinaryExpr(base.Pos, ir.OMUL, l2, ir.NewInt(base.Pos, elemtype.Size())), types.Types[types.TUINTPTR])
   719  
   720  	clrname := "memclrNoHeapPointers"
   721  	hasPointers := elemtype.HasPointers()
   722  	if hasPointers {
   723  		clrname = "memclrHasPointers"
   724  		ir.CurFunc.SetWBPos(n.Pos())
   725  	}
   726  
   727  	var clr ir.Nodes
   728  	clrfn := mkcall(clrname, nil, &clr, hp, hn)
   729  	clr.Append(clrfn)
   730  	if hasPointers {
   731  		// growslice will have cleared the new entries, so only
   732  		// if growslice isn't called do we need to do the zeroing ourselves.
   733  		nif.Body = append(nif.Body, clr...)
   734  	} else {
   735  		nifnz.Body = append(nifnz.Body, clr...)
   736  	}
   737  
   738  	typecheck.Stmts(nodes)
   739  	walkStmtList(nodes)
   740  	init.Append(nodes...)
   741  	return s
   742  }
   743  

View as plain text