1
2
3
4
5 package walk
6
7 import (
8 "fmt"
9 "go/constant"
10 "go/token"
11 "internal/abi"
12 "internal/buildcfg"
13 "strings"
14
15 "cmd/compile/internal/base"
16 "cmd/compile/internal/escape"
17 "cmd/compile/internal/ir"
18 "cmd/compile/internal/reflectdata"
19 "cmd/compile/internal/typecheck"
20 "cmd/compile/internal/types"
21 )
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45 func walkAppend(n *ir.CallExpr, init *ir.Nodes, dst ir.Node) ir.Node {
46 if !ir.SameSafeExpr(dst, n.Args[0]) {
47 n.Args[0] = safeExpr(n.Args[0], init)
48 n.Args[0] = walkExpr(n.Args[0], init)
49 }
50 walkExprListSafe(n.Args[1:], init)
51
52 nsrc := n.Args[0]
53
54
55
56
57
58
59
60 ls := n.Args[1:]
61 for i, n := range ls {
62 n = cheapExpr(n, init)
63 if !types.Identical(n.Type(), nsrc.Type().Elem()) {
64 n = typecheck.AssignConv(n, nsrc.Type().Elem(), "append")
65 n = walkExpr(n, init)
66 }
67 ls[i] = n
68 }
69
70 argc := len(n.Args) - 1
71 if argc < 1 {
72 return nsrc
73 }
74
75
76
77 if !base.Flag.Cfg.Instrumenting || base.Flag.CompilingRuntime {
78 return n
79 }
80
81 var l []ir.Node
82
83
84 s := typecheck.TempAt(base.Pos, ir.CurFunc, nsrc.Type())
85 l = append(l, ir.NewAssignStmt(base.Pos, s, nsrc))
86
87
88 num := ir.NewInt(base.Pos, int64(argc))
89
90
91 newLen := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TINT])
92 l = append(l, ir.NewAssignStmt(base.Pos, newLen, ir.NewBinaryExpr(base.Pos, ir.OADD, ir.NewUnaryExpr(base.Pos, ir.OLEN, s), num)))
93
94
95 nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
96 nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OLE, typecheck.Conv(newLen, types.Types[types.TUINT]), typecheck.Conv(ir.NewUnaryExpr(base.Pos, ir.OCAP, s), types.Types[types.TUINT]))
97 nif.Likely = true
98
99
100 slice := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, nil, newLen, nil)
101 slice.SetBounded(true)
102 nif.Body = []ir.Node{
103 ir.NewAssignStmt(base.Pos, s, slice),
104 }
105
106
107 nif.Else = []ir.Node{
108 ir.NewAssignStmt(base.Pos, s, walkGrowslice(s, nif.PtrInit(),
109 ir.NewUnaryExpr(base.Pos, ir.OSPTR, s),
110 newLen,
111 ir.NewUnaryExpr(base.Pos, ir.OCAP, s),
112 num)),
113 }
114
115 l = append(l, nif)
116
117 ls = n.Args[1:]
118 for i, n := range ls {
119
120 ix := ir.NewIndexExpr(base.Pos, s, ir.NewBinaryExpr(base.Pos, ir.OSUB, newLen, ir.NewInt(base.Pos, int64(argc-i))))
121 ix.SetBounded(true)
122 l = append(l, ir.NewAssignStmt(base.Pos, ix, n))
123 }
124
125 typecheck.Stmts(l)
126 walkStmtList(l)
127 init.Append(l...)
128 return s
129 }
130
131
132 func walkGrowslice(slice *ir.Name, init *ir.Nodes, oldPtr, newLen, oldCap, num ir.Node) *ir.CallExpr {
133 elemtype := slice.Type().Elem()
134 fn := typecheck.LookupRuntime("growslice", elemtype, elemtype)
135 elemtypeptr := reflectdata.TypePtrAt(base.Pos, elemtype)
136 return mkcall1(fn, slice.Type(), init, oldPtr, newLen, oldCap, num, elemtypeptr)
137 }
138
139
140 func walkClear(n *ir.UnaryExpr) ir.Node {
141 typ := n.X.Type()
142 switch {
143 case typ.IsSlice():
144 if n := arrayClear(n.X.Pos(), n.X, nil); n != nil {
145 return n
146 }
147
148 return ir.NewBlockStmt(n.Pos(), nil)
149 case typ.IsMap():
150 return mapClear(n.X, reflectdata.TypePtrAt(n.X.Pos(), n.X.Type()))
151 }
152 panic("unreachable")
153 }
154
155
156 func walkClose(n *ir.UnaryExpr, init *ir.Nodes) ir.Node {
157 return mkcall1(chanfn("closechan", 1, n.X.Type()), nil, init, n.X)
158 }
159
160
161
162
163
164
165
166
167
168
169
170 func walkCopy(n *ir.BinaryExpr, init *ir.Nodes, runtimecall bool) ir.Node {
171 if n.X.Type().Elem().HasPointers() {
172 ir.CurFunc.SetWBPos(n.Pos())
173 fn := writebarrierfn("typedslicecopy", n.X.Type().Elem(), n.Y.Type().Elem())
174 n.X = cheapExpr(n.X, init)
175 ptrL, lenL := backingArrayPtrLen(n.X)
176 n.Y = cheapExpr(n.Y, init)
177 ptrR, lenR := backingArrayPtrLen(n.Y)
178 return mkcall1(fn, n.Type(), init, reflectdata.CopyElemRType(base.Pos, n), ptrL, lenL, ptrR, lenR)
179 }
180
181 if runtimecall {
182
183
184
185
186 n.X = cheapExpr(n.X, init)
187 ptrL, lenL := backingArrayPtrLen(n.X)
188 n.Y = cheapExpr(n.Y, init)
189 ptrR, lenR := backingArrayPtrLen(n.Y)
190
191 fn := typecheck.LookupRuntime("slicecopy", ptrL.Type().Elem(), ptrR.Type().Elem())
192
193 return mkcall1(fn, n.Type(), init, ptrL, lenL, ptrR, lenR, ir.NewInt(base.Pos, n.X.Type().Elem().Size()))
194 }
195
196 n.X = walkExpr(n.X, init)
197 n.Y = walkExpr(n.Y, init)
198 nl := typecheck.TempAt(base.Pos, ir.CurFunc, n.X.Type())
199 nr := typecheck.TempAt(base.Pos, ir.CurFunc, n.Y.Type())
200 var l []ir.Node
201 l = append(l, ir.NewAssignStmt(base.Pos, nl, n.X))
202 l = append(l, ir.NewAssignStmt(base.Pos, nr, n.Y))
203
204 nfrm := ir.NewUnaryExpr(base.Pos, ir.OSPTR, nr)
205 nto := ir.NewUnaryExpr(base.Pos, ir.OSPTR, nl)
206
207 nlen := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TINT])
208
209
210 l = append(l, ir.NewAssignStmt(base.Pos, nlen, ir.NewUnaryExpr(base.Pos, ir.OLEN, nl)))
211
212
213 nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
214
215 nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OGT, nlen, ir.NewUnaryExpr(base.Pos, ir.OLEN, nr))
216 nif.Body.Append(ir.NewAssignStmt(base.Pos, nlen, ir.NewUnaryExpr(base.Pos, ir.OLEN, nr)))
217 l = append(l, nif)
218
219
220 ne := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.ONE, nto, nfrm), nil, nil)
221 ne.Likely = true
222 l = append(l, ne)
223
224 fn := typecheck.LookupRuntime("memmove", nl.Type().Elem(), nl.Type().Elem())
225 nwid := ir.Node(typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TUINTPTR]))
226 setwid := ir.NewAssignStmt(base.Pos, nwid, typecheck.Conv(nlen, types.Types[types.TUINTPTR]))
227 ne.Body.Append(setwid)
228 nwid = ir.NewBinaryExpr(base.Pos, ir.OMUL, nwid, ir.NewInt(base.Pos, nl.Type().Elem().Size()))
229 call := mkcall1(fn, nil, init, nto, nfrm, nwid)
230 ne.Body.Append(call)
231
232 typecheck.Stmts(l)
233 walkStmtList(l)
234 init.Append(l...)
235 return nlen
236 }
237
238
239 func walkDelete(init *ir.Nodes, n *ir.CallExpr) ir.Node {
240 init.Append(ir.TakeInit(n)...)
241 map_ := n.Args[0]
242 key := n.Args[1]
243 map_ = walkExpr(map_, init)
244 key = walkExpr(key, init)
245
246 t := map_.Type()
247 fast := mapfast(t)
248 key = mapKeyArg(fast, n, key, false)
249 return mkcall1(mapfndel(mapdelete[fast], t), nil, init, reflectdata.DeleteMapRType(base.Pos, n), map_, key)
250 }
251
252
253 func walkLenCap(n *ir.UnaryExpr, init *ir.Nodes) ir.Node {
254 if isRuneCount(n) {
255
256 return mkcall("countrunes", n.Type(), init, typecheck.Conv(n.X.(*ir.ConvExpr).X, types.Types[types.TSTRING]))
257 }
258 if isByteCount(n) {
259 conv := n.X.(*ir.ConvExpr)
260 walkStmtList(conv.Init())
261 init.Append(ir.TakeInit(conv)...)
262 _, len := backingArrayPtrLen(cheapExpr(conv.X, init))
263 return len
264 }
265 if isChanLenCap(n) {
266 name := "chanlen"
267 if n.Op() == ir.OCAP {
268 name = "chancap"
269 }
270
271
272 fn := typecheck.LookupRuntime(name, n.X.Type())
273 return mkcall1(fn, n.Type(), init, n.X)
274 }
275
276 n.X = walkExpr(n.X, init)
277
278
279
280 t := n.X.Type()
281 if t.IsPtr() {
282 t = t.Elem()
283 }
284 if t.IsArray() {
285
286 appendWalkStmt(init, ir.NewAssignStmt(base.Pos, ir.BlankNode, n.X))
287
288 con := ir.NewConstExpr(constant.MakeInt64(t.NumElem()), n)
289 con.SetTypecheck(1)
290 return con
291 }
292 return n
293 }
294
295
296 func walkMakeChan(n *ir.MakeExpr, init *ir.Nodes) ir.Node {
297
298
299 size := n.Len
300 fnname := "makechan64"
301 argtype := types.Types[types.TINT64]
302
303
304
305
306 if size.Type().IsKind(types.TIDEAL) || size.Type().Size() <= types.Types[types.TUINT].Size() {
307 fnname = "makechan"
308 argtype = types.Types[types.TINT]
309 }
310
311 return mkcall1(chanfn(fnname, 1, n.Type()), n.Type(), init, reflectdata.MakeChanRType(base.Pos, n), typecheck.Conv(size, argtype))
312 }
313
314
315 func walkMakeMap(n *ir.MakeExpr, init *ir.Nodes) ir.Node {
316 if buildcfg.Experiment.SwissMap {
317 return walkMakeSwissMap(n, init)
318 }
319 return walkMakeOldMap(n, init)
320 }
321
322 func walkMakeSwissMap(n *ir.MakeExpr, init *ir.Nodes) ir.Node {
323 t := n.Type()
324 mapType := reflectdata.SwissMapType()
325 hint := n.Len
326
327
328 var m ir.Node
329 if n.Esc() == ir.EscNone {
330
331
332
333
334 m = stackTempAddr(init, mapType)
335
336
337
338
339
340
341 if !ir.IsConst(hint, constant.Int) ||
342 constant.Compare(hint.Val(), token.LEQ, constant.MakeInt64(abi.SwissMapGroupSlots)) {
343
344
345
346
347
348
349
350
351
352
353
354
355 nif := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OLE, hint, ir.NewInt(base.Pos, abi.SwissMapGroupSlots)), nil, nil)
356 nif.Likely = true
357
358 groupType := reflectdata.SwissMapGroupType(t)
359
360
361
362 g := stackTempAddr(&nif.Body, groupType)
363
364
365
366 empty := ir.NewBasicLit(base.Pos, types.UntypedInt, constant.MakeUint64(abi.SwissMapCtrlEmpty))
367
368
369 csym := groupType.Field(0).Sym
370 ca := ir.NewAssignStmt(base.Pos, ir.NewSelectorExpr(base.Pos, ir.ODOT, g, csym), empty)
371 nif.Body.Append(ca)
372
373
374 dsym := mapType.Field(2).Sym
375 na := ir.NewAssignStmt(base.Pos, ir.NewSelectorExpr(base.Pos, ir.ODOT, m, dsym), typecheck.ConvNop(g, types.Types[types.TUNSAFEPTR]))
376 nif.Body.Append(na)
377 appendWalkStmt(init, nif)
378 }
379 }
380
381 if ir.IsConst(hint, constant.Int) && constant.Compare(hint.Val(), token.LEQ, constant.MakeInt64(abi.SwissMapGroupSlots)) {
382
383
384
385
386
387
388
389 if n.Esc() == ir.EscNone {
390
391
392
393 rand := mkcall("rand", types.Types[types.TUINT64], init)
394 seedSym := mapType.Field(1).Sym
395 appendWalkStmt(init, ir.NewAssignStmt(base.Pos, ir.NewSelectorExpr(base.Pos, ir.ODOT, m, seedSym), typecheck.Conv(rand, types.Types[types.TUINTPTR])))
396 return typecheck.ConvNop(m, t)
397 }
398
399
400 fn := typecheck.LookupRuntime("makemap_small", t.Key(), t.Elem())
401 return mkcall1(fn, n.Type(), init)
402 }
403
404 if n.Esc() != ir.EscNone {
405 m = typecheck.NodNil()
406 }
407
408
409
410
411
412
413
414
415 fnname := "makemap64"
416 argtype := types.Types[types.TINT64]
417
418
419
420
421
422 if hint.Type().IsKind(types.TIDEAL) || hint.Type().Size() <= types.Types[types.TUINT].Size() {
423 fnname = "makemap"
424 argtype = types.Types[types.TINT]
425 }
426
427 fn := typecheck.LookupRuntime(fnname, mapType, t.Key(), t.Elem())
428 return mkcall1(fn, n.Type(), init, reflectdata.MakeMapRType(base.Pos, n), typecheck.Conv(hint, argtype), m)
429 }
430
431 func walkMakeOldMap(n *ir.MakeExpr, init *ir.Nodes) ir.Node {
432 t := n.Type()
433 hmapType := reflectdata.OldMapType()
434 hint := n.Len
435
436
437 var h ir.Node
438 if n.Esc() == ir.EscNone {
439
440
441
442
443 h = stackTempAddr(init, hmapType)
444
445
446
447
448
449
450 if !ir.IsConst(hint, constant.Int) ||
451 constant.Compare(hint.Val(), token.LEQ, constant.MakeInt64(abi.OldMapBucketCount)) {
452
453
454
455
456
457
458
459
460
461
462 nif := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OLE, hint, ir.NewInt(base.Pos, abi.OldMapBucketCount)), nil, nil)
463 nif.Likely = true
464
465
466
467 b := stackTempAddr(&nif.Body, reflectdata.OldMapBucketType(t))
468
469
470 bsym := hmapType.Field(5).Sym
471 na := ir.NewAssignStmt(base.Pos, ir.NewSelectorExpr(base.Pos, ir.ODOT, h, bsym), typecheck.ConvNop(b, types.Types[types.TUNSAFEPTR]))
472 nif.Body.Append(na)
473 appendWalkStmt(init, nif)
474 }
475 }
476
477 if ir.IsConst(hint, constant.Int) && constant.Compare(hint.Val(), token.LEQ, constant.MakeInt64(abi.OldMapBucketCount)) {
478
479
480
481
482
483
484
485 if n.Esc() == ir.EscNone {
486
487
488
489 rand := mkcall("rand32", types.Types[types.TUINT32], init)
490 hashsym := hmapType.Field(4).Sym
491 appendWalkStmt(init, ir.NewAssignStmt(base.Pos, ir.NewSelectorExpr(base.Pos, ir.ODOT, h, hashsym), rand))
492 return typecheck.ConvNop(h, t)
493 }
494
495
496 fn := typecheck.LookupRuntime("makemap_small", t.Key(), t.Elem())
497 return mkcall1(fn, n.Type(), init)
498 }
499
500 if n.Esc() != ir.EscNone {
501 h = typecheck.NodNil()
502 }
503
504
505
506
507
508
509
510 fnname := "makemap64"
511 argtype := types.Types[types.TINT64]
512
513
514
515
516
517 if hint.Type().IsKind(types.TIDEAL) || hint.Type().Size() <= types.Types[types.TUINT].Size() {
518 fnname = "makemap"
519 argtype = types.Types[types.TINT]
520 }
521
522 fn := typecheck.LookupRuntime(fnname, hmapType, t.Key(), t.Elem())
523 return mkcall1(fn, n.Type(), init, reflectdata.MakeMapRType(base.Pos, n), typecheck.Conv(hint, argtype), h)
524 }
525
526
527 func walkMakeSlice(n *ir.MakeExpr, init *ir.Nodes) ir.Node {
528 len := n.Len
529 cap := n.Cap
530 len = safeExpr(len, init)
531 if cap != nil {
532 cap = safeExpr(cap, init)
533 } else {
534 cap = len
535 }
536 t := n.Type()
537 if t.Elem().NotInHeap() {
538 base.Errorf("%v can't be allocated in Go; it is incomplete (or unallocatable)", t.Elem())
539 }
540
541 tryStack := false
542 if n.Esc() == ir.EscNone {
543 if why := escape.HeapAllocReason(n); why != "" {
544 base.Fatalf("%v has EscNone, but %v", n, why)
545 }
546 if ir.IsSmallIntConst(cap) {
547
548 cap := typecheck.IndexConst(cap)
549
550
551
552
553
554
555
556
557 nif := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OGT, typecheck.Conv(len, types.Types[types.TUINT64]), ir.NewInt(base.Pos, cap)), nil, nil)
558 niflen := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OLT, len, ir.NewInt(base.Pos, 0)), nil, nil)
559 niflen.Body = []ir.Node{mkcall("panicmakeslicelen", nil, init)}
560 nif.Body.Append(niflen, mkcall("panicmakeslicecap", nil, init))
561 init.Append(typecheck.Stmt(nif))
562
563
564
565 t := types.NewArray(t.Elem(), cap)
566 arr := typecheck.TempAt(base.Pos, ir.CurFunc, t)
567 appendWalkStmt(init, ir.NewAssignStmt(base.Pos, arr, nil))
568 s := ir.NewSliceExpr(base.Pos, ir.OSLICE, arr, nil, len, nil)
569
570 return walkExpr(typecheck.Expr(typecheck.Conv(s, n.Type())), init)
571 }
572
573 tryStack = base.Flag.N == 0 && base.VariableMakeHash.MatchPos(n.Pos(), nil)
574 }
575
576
577 slice := typecheck.TempAt(base.Pos, ir.CurFunc, n.Type())
578
579 if tryStack {
580
581
582
583
584
585
586
587 maxStackSize := int64(base.Debug.VariableMakeThreshold)
588 K := maxStackSize / t.Elem().Size()
589 if K > 0 {
590 nif := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OLE, typecheck.Conv(cap, types.Types[types.TUINT64]), ir.NewInt(base.Pos, K)), nil, nil)
591
592
593
594
595
596 lenCap := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OGT, typecheck.Conv(len, types.Types[types.TUINT64]), typecheck.Conv(cap, types.Types[types.TUINT64])), nil, nil)
597 lenZero := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OLT, len, ir.NewInt(base.Pos, 0)), nil, nil)
598 lenZero.Body.Append(mkcall("panicmakeslicelen", nil, &lenZero.Body))
599 lenCap.Body.Append(lenZero)
600 lenCap.Body.Append(mkcall("panicmakeslicecap", nil, &lenCap.Body))
601 nif.Body.Append(lenCap)
602
603 t := types.NewArray(t.Elem(), K)
604 arr := typecheck.TempAt(base.Pos, ir.CurFunc, t)
605 nif.Body.Append(ir.NewAssignStmt(base.Pos, arr, nil))
606 s := ir.NewSliceExpr(base.Pos, ir.OSLICE, arr, nil, len, cap)
607 nif.Body.Append(ir.NewAssignStmt(base.Pos, slice, s))
608
609 appendWalkStmt(init, typecheck.Stmt(nif))
610
611
612 init = &nif.Else
613 }
614 }
615
616
617
618
619 fnname := "makeslice64"
620 argtype := types.Types[types.TINT64]
621
622
623
624
625 if (len.Type().IsKind(types.TIDEAL) || len.Type().Size() <= types.Types[types.TUINT].Size()) &&
626 (cap.Type().IsKind(types.TIDEAL) || cap.Type().Size() <= types.Types[types.TUINT].Size()) {
627 fnname = "makeslice"
628 argtype = types.Types[types.TINT]
629 }
630 fn := typecheck.LookupRuntime(fnname)
631 ptr := mkcall1(fn, types.Types[types.TUNSAFEPTR], init, reflectdata.MakeSliceElemRType(base.Pos, n), typecheck.Conv(len, argtype), typecheck.Conv(cap, argtype))
632 ptr.MarkNonNil()
633 len = typecheck.Conv(len, types.Types[types.TINT])
634 cap = typecheck.Conv(cap, types.Types[types.TINT])
635 s := ir.NewSliceHeaderExpr(base.Pos, t, ptr, len, cap)
636 appendWalkStmt(init, ir.NewAssignStmt(base.Pos, slice, s))
637
638 return slice
639 }
640
641
642 func walkMakeSliceCopy(n *ir.MakeExpr, init *ir.Nodes) ir.Node {
643 if n.Esc() == ir.EscNone {
644 base.Fatalf("OMAKESLICECOPY with EscNone: %v", n)
645 }
646
647 t := n.Type()
648 if t.Elem().NotInHeap() {
649 base.Errorf("%v can't be allocated in Go; it is incomplete (or unallocatable)", t.Elem())
650 }
651
652 length := typecheck.Conv(n.Len, types.Types[types.TINT])
653 copylen := ir.NewUnaryExpr(base.Pos, ir.OLEN, n.Cap)
654 copyptr := ir.NewUnaryExpr(base.Pos, ir.OSPTR, n.Cap)
655
656 if !t.Elem().HasPointers() && n.Bounded() {
657
658
659
660
661
662
663 size := ir.NewBinaryExpr(base.Pos, ir.OMUL, typecheck.Conv(length, types.Types[types.TUINTPTR]), typecheck.Conv(ir.NewInt(base.Pos, t.Elem().Size()), types.Types[types.TUINTPTR]))
664
665
666 fn := typecheck.LookupRuntime("mallocgc")
667 ptr := mkcall1(fn, types.Types[types.TUNSAFEPTR], init, size, typecheck.NodNil(), ir.NewBool(base.Pos, false))
668 ptr.MarkNonNil()
669 sh := ir.NewSliceHeaderExpr(base.Pos, t, ptr, length, length)
670
671 s := typecheck.TempAt(base.Pos, ir.CurFunc, t)
672 r := typecheck.Stmt(ir.NewAssignStmt(base.Pos, s, sh))
673 r = walkExpr(r, init)
674 init.Append(r)
675
676
677 fn = typecheck.LookupRuntime("memmove", t.Elem(), t.Elem())
678 ncopy := mkcall1(fn, nil, init, ir.NewUnaryExpr(base.Pos, ir.OSPTR, s), copyptr, size)
679 init.Append(walkExpr(typecheck.Stmt(ncopy), init))
680
681 return s
682 }
683
684
685 fn := typecheck.LookupRuntime("makeslicecopy")
686 ptr := mkcall1(fn, types.Types[types.TUNSAFEPTR], init, reflectdata.MakeSliceElemRType(base.Pos, n), length, copylen, typecheck.Conv(copyptr, types.Types[types.TUNSAFEPTR]))
687 ptr.MarkNonNil()
688 sh := ir.NewSliceHeaderExpr(base.Pos, t, ptr, length, length)
689 return walkExpr(typecheck.Expr(sh), init)
690 }
691
692
693 func walkNew(n *ir.UnaryExpr, init *ir.Nodes) ir.Node {
694 t := n.Type().Elem()
695 if t.NotInHeap() {
696 base.Errorf("%v can't be allocated in Go; it is incomplete (or unallocatable)", n.Type().Elem())
697 }
698 if n.Esc() == ir.EscNone {
699 if t.Size() > ir.MaxImplicitStackVarSize {
700 base.Fatalf("large ONEW with EscNone: %v", n)
701 }
702 return stackTempAddr(init, t)
703 }
704 types.CalcSize(t)
705 n.MarkNonNil()
706 return n
707 }
708
709 func walkMinMax(n *ir.CallExpr, init *ir.Nodes) ir.Node {
710 init.Append(ir.TakeInit(n)...)
711 walkExprList(n.Args, init)
712 return n
713 }
714
715
716 func walkPrint(nn *ir.CallExpr, init *ir.Nodes) ir.Node {
717
718 walkExprListCheap(nn.Args, init)
719
720
721 if nn.Op() == ir.OPRINTLN {
722 s := nn.Args
723 t := make([]ir.Node, 0, len(s)*2)
724 for i, n := range s {
725 if i != 0 {
726 t = append(t, ir.NewString(base.Pos, " "))
727 }
728 t = append(t, n)
729 }
730 t = append(t, ir.NewString(base.Pos, "\n"))
731 nn.Args = t
732 }
733
734
735 s := nn.Args
736 t := make([]ir.Node, 0, len(s))
737 for i := 0; i < len(s); {
738 var strs []string
739 for i < len(s) && ir.IsConst(s[i], constant.String) {
740 strs = append(strs, ir.StringVal(s[i]))
741 i++
742 }
743 if len(strs) > 0 {
744 t = append(t, ir.NewString(base.Pos, strings.Join(strs, "")))
745 }
746 if i < len(s) {
747 t = append(t, s[i])
748 i++
749 }
750 }
751 nn.Args = t
752
753 calls := []ir.Node{mkcall("printlock", nil, init)}
754 for i, n := range nn.Args {
755 if n.Op() == ir.OLITERAL {
756 if n.Type() == types.UntypedRune {
757 n = typecheck.DefaultLit(n, types.RuneType)
758 }
759
760 switch n.Val().Kind() {
761 case constant.Int:
762 n = typecheck.DefaultLit(n, types.Types[types.TINT64])
763
764 case constant.Float:
765 n = typecheck.DefaultLit(n, types.Types[types.TFLOAT64])
766 }
767 }
768
769 if n.Op() != ir.OLITERAL && n.Type() != nil && n.Type().Kind() == types.TIDEAL {
770 n = typecheck.DefaultLit(n, types.Types[types.TINT64])
771 }
772 n = typecheck.DefaultLit(n, nil)
773 nn.Args[i] = n
774 if n.Type() == nil || n.Type().Kind() == types.TFORW {
775 continue
776 }
777
778 var on *ir.Name
779 switch n.Type().Kind() {
780 case types.TINTER:
781 if n.Type().IsEmptyInterface() {
782 on = typecheck.LookupRuntime("printeface", n.Type())
783 } else {
784 on = typecheck.LookupRuntime("printiface", n.Type())
785 }
786 case types.TPTR:
787 if n.Type().Elem().NotInHeap() {
788 on = typecheck.LookupRuntime("printuintptr")
789 n = ir.NewConvExpr(base.Pos, ir.OCONV, nil, n)
790 n.SetType(types.Types[types.TUNSAFEPTR])
791 n = ir.NewConvExpr(base.Pos, ir.OCONV, nil, n)
792 n.SetType(types.Types[types.TUINTPTR])
793 break
794 }
795 fallthrough
796 case types.TCHAN, types.TMAP, types.TFUNC, types.TUNSAFEPTR:
797 on = typecheck.LookupRuntime("printpointer", n.Type())
798 case types.TSLICE:
799 on = typecheck.LookupRuntime("printslice", n.Type())
800 case types.TUINT, types.TUINT8, types.TUINT16, types.TUINT32, types.TUINT64, types.TUINTPTR:
801 if types.RuntimeSymName(n.Type().Sym()) == "hex" {
802 on = typecheck.LookupRuntime("printhex")
803 } else {
804 on = typecheck.LookupRuntime("printuint")
805 }
806 case types.TINT, types.TINT8, types.TINT16, types.TINT32, types.TINT64:
807 on = typecheck.LookupRuntime("printint")
808 case types.TFLOAT32, types.TFLOAT64:
809 on = typecheck.LookupRuntime("printfloat")
810 case types.TCOMPLEX64, types.TCOMPLEX128:
811 on = typecheck.LookupRuntime("printcomplex")
812 case types.TBOOL:
813 on = typecheck.LookupRuntime("printbool")
814 case types.TSTRING:
815 cs := ""
816 if ir.IsConst(n, constant.String) {
817 cs = ir.StringVal(n)
818 }
819 switch cs {
820 case " ":
821 on = typecheck.LookupRuntime("printsp")
822 case "\n":
823 on = typecheck.LookupRuntime("printnl")
824 default:
825 on = typecheck.LookupRuntime("printstring")
826 }
827 default:
828 badtype(ir.OPRINT, n.Type(), nil)
829 continue
830 }
831
832 r := ir.NewCallExpr(base.Pos, ir.OCALL, on, nil)
833 if params := on.Type().Params(); len(params) > 0 {
834 t := params[0].Type
835 n = typecheck.Conv(n, t)
836 r.Args.Append(n)
837 }
838 calls = append(calls, r)
839 }
840
841 calls = append(calls, mkcall("printunlock", nil, init))
842
843 typecheck.Stmts(calls)
844 walkExprList(calls, init)
845
846 r := ir.NewBlockStmt(base.Pos, nil)
847 r.List = calls
848 return walkStmt(typecheck.Stmt(r))
849 }
850
851
852 func walkRecoverFP(nn *ir.CallExpr, init *ir.Nodes) ir.Node {
853 return mkcall("gorecover", nn.Type(), init, walkExpr(nn.Args[0], init))
854 }
855
856
857 func walkUnsafeData(n *ir.UnaryExpr, init *ir.Nodes) ir.Node {
858 slice := walkExpr(n.X, init)
859 res := typecheck.Expr(ir.NewUnaryExpr(n.Pos(), ir.OSPTR, slice))
860 res.SetType(n.Type())
861 return walkExpr(res, init)
862 }
863
864 func walkUnsafeSlice(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
865 ptr := safeExpr(n.X, init)
866 len := safeExpr(n.Y, init)
867 sliceType := n.Type()
868
869 lenType := types.Types[types.TINT64]
870 unsafePtr := typecheck.Conv(ptr, types.Types[types.TUNSAFEPTR])
871
872
873
874
875
876
877 if ir.ShouldCheckPtr(ir.CurFunc, 1) {
878 fnname := "unsafeslicecheckptr"
879 fn := typecheck.LookupRuntime(fnname)
880 init.Append(mkcall1(fn, nil, init, reflectdata.UnsafeSliceElemRType(base.Pos, n), unsafePtr, typecheck.Conv(len, lenType)))
881 } else {
882
883
884 if len.Type().IsKind(types.TIDEAL) || len.Type().Size() <= types.Types[types.TUINT].Size() {
885 lenType = types.Types[types.TINT]
886 } else {
887
888
889
890
891 len64 := typecheck.Conv(len, lenType)
892 nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
893 nif.Cond = ir.NewBinaryExpr(base.Pos, ir.ONE, typecheck.Conv(typecheck.Conv(len64, types.Types[types.TINT]), lenType), len64)
894 nif.Body.Append(mkcall("panicunsafeslicelen", nil, &nif.Body))
895 appendWalkStmt(init, nif)
896 }
897
898
899 nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
900 nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OLT, typecheck.Conv(len, lenType), ir.NewInt(base.Pos, 0))
901 nif.Body.Append(mkcall("panicunsafeslicelen", nil, &nif.Body))
902 appendWalkStmt(init, nif)
903
904 if sliceType.Elem().Size() == 0 {
905
906
907
908 nifPtr := ir.NewIfStmt(base.Pos, nil, nil, nil)
909 isNil := ir.NewBinaryExpr(base.Pos, ir.OEQ, unsafePtr, typecheck.NodNil())
910 gtZero := ir.NewBinaryExpr(base.Pos, ir.OGT, typecheck.Conv(len, lenType), ir.NewInt(base.Pos, 0))
911 nifPtr.Cond =
912 ir.NewLogicalExpr(base.Pos, ir.OANDAND, isNil, gtZero)
913 nifPtr.Body.Append(mkcall("panicunsafeslicenilptr", nil, &nifPtr.Body))
914 appendWalkStmt(init, nifPtr)
915
916 h := ir.NewSliceHeaderExpr(n.Pos(), sliceType,
917 typecheck.Conv(ptr, types.Types[types.TUNSAFEPTR]),
918 typecheck.Conv(len, types.Types[types.TINT]),
919 typecheck.Conv(len, types.Types[types.TINT]))
920 return walkExpr(typecheck.Expr(h), init)
921 }
922
923
924 mem := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TUINTPTR])
925 overflow := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TBOOL])
926
927 decl := types.NewSignature(nil,
928 []*types.Field{
929 types.NewField(base.Pos, nil, types.Types[types.TUINTPTR]),
930 types.NewField(base.Pos, nil, types.Types[types.TUINTPTR]),
931 },
932 []*types.Field{
933 types.NewField(base.Pos, nil, types.Types[types.TUINTPTR]),
934 types.NewField(base.Pos, nil, types.Types[types.TBOOL]),
935 })
936
937 fn := ir.NewFunc(n.Pos(), n.Pos(), math_MulUintptr, decl)
938
939 call := mkcall1(fn.Nname, fn.Type().ResultsTuple(), init, ir.NewInt(base.Pos, sliceType.Elem().Size()), typecheck.Conv(typecheck.Conv(len, lenType), types.Types[types.TUINTPTR]))
940 appendWalkStmt(init, ir.NewAssignListStmt(base.Pos, ir.OAS2, []ir.Node{mem, overflow}, []ir.Node{call}))
941
942
943
944
945
946
947
948 nif = ir.NewIfStmt(base.Pos, nil, nil, nil)
949 memCond := ir.NewBinaryExpr(base.Pos, ir.OGT, mem, ir.NewUnaryExpr(base.Pos, ir.ONEG, typecheck.Conv(unsafePtr, types.Types[types.TUINTPTR])))
950 nif.Cond = ir.NewLogicalExpr(base.Pos, ir.OOROR, overflow, memCond)
951 nifPtr := ir.NewIfStmt(base.Pos, nil, nil, nil)
952 nifPtr.Cond = ir.NewBinaryExpr(base.Pos, ir.OEQ, unsafePtr, typecheck.NodNil())
953 nifPtr.Body.Append(mkcall("panicunsafeslicenilptr", nil, &nifPtr.Body))
954 nif.Body.Append(nifPtr, mkcall("panicunsafeslicelen", nil, &nif.Body))
955 appendWalkStmt(init, nif)
956 }
957
958 h := ir.NewSliceHeaderExpr(n.Pos(), sliceType,
959 typecheck.Conv(ptr, types.Types[types.TUNSAFEPTR]),
960 typecheck.Conv(len, types.Types[types.TINT]),
961 typecheck.Conv(len, types.Types[types.TINT]))
962 return walkExpr(typecheck.Expr(h), init)
963 }
964
965 var math_MulUintptr = &types.Sym{Pkg: types.NewPkg("internal/runtime/math", "math"), Name: "MulUintptr"}
966
967 func walkUnsafeString(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
968 ptr := safeExpr(n.X, init)
969 len := safeExpr(n.Y, init)
970
971 lenType := types.Types[types.TINT64]
972 unsafePtr := typecheck.Conv(ptr, types.Types[types.TUNSAFEPTR])
973
974
975
976
977 if ir.ShouldCheckPtr(ir.CurFunc, 1) {
978 fnname := "unsafestringcheckptr"
979 fn := typecheck.LookupRuntime(fnname)
980 init.Append(mkcall1(fn, nil, init, unsafePtr, typecheck.Conv(len, lenType)))
981 } else {
982
983
984 if len.Type().IsKind(types.TIDEAL) || len.Type().Size() <= types.Types[types.TUINT].Size() {
985 lenType = types.Types[types.TINT]
986 } else {
987
988
989
990
991 len64 := typecheck.Conv(len, lenType)
992 nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
993 nif.Cond = ir.NewBinaryExpr(base.Pos, ir.ONE, typecheck.Conv(typecheck.Conv(len64, types.Types[types.TINT]), lenType), len64)
994 nif.Body.Append(mkcall("panicunsafestringlen", nil, &nif.Body))
995 appendWalkStmt(init, nif)
996 }
997
998
999 nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
1000 nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OLT, typecheck.Conv(len, lenType), ir.NewInt(base.Pos, 0))
1001 nif.Body.Append(mkcall("panicunsafestringlen", nil, &nif.Body))
1002 appendWalkStmt(init, nif)
1003
1004
1005
1006
1007
1008
1009
1010 nifLen := ir.NewIfStmt(base.Pos, nil, nil, nil)
1011 nifLen.Cond = ir.NewBinaryExpr(base.Pos, ir.OGT, typecheck.Conv(len, types.Types[types.TUINTPTR]), ir.NewUnaryExpr(base.Pos, ir.ONEG, typecheck.Conv(unsafePtr, types.Types[types.TUINTPTR])))
1012 nifPtr := ir.NewIfStmt(base.Pos, nil, nil, nil)
1013 nifPtr.Cond = ir.NewBinaryExpr(base.Pos, ir.OEQ, unsafePtr, typecheck.NodNil())
1014 nifPtr.Body.Append(mkcall("panicunsafestringnilptr", nil, &nifPtr.Body))
1015 nifLen.Body.Append(nifPtr, mkcall("panicunsafestringlen", nil, &nifLen.Body))
1016 appendWalkStmt(init, nifLen)
1017 }
1018 h := ir.NewStringHeaderExpr(n.Pos(),
1019 typecheck.Conv(ptr, types.Types[types.TUNSAFEPTR]),
1020 typecheck.Conv(len, types.Types[types.TINT]),
1021 )
1022 return walkExpr(typecheck.Expr(h), init)
1023 }
1024
1025 func badtype(op ir.Op, tl, tr *types.Type) {
1026 var s string
1027 if tl != nil {
1028 s += fmt.Sprintf("\n\t%v", tl)
1029 }
1030 if tr != nil {
1031 s += fmt.Sprintf("\n\t%v", tr)
1032 }
1033
1034
1035 if tl != nil && tr != nil && tl.IsPtr() && tr.IsPtr() {
1036 if tl.Elem().IsStruct() && tr.Elem().IsInterface() {
1037 s += "\n\t(*struct vs *interface)"
1038 } else if tl.Elem().IsInterface() && tr.Elem().IsStruct() {
1039 s += "\n\t(*interface vs *struct)"
1040 }
1041 }
1042
1043 base.Errorf("illegal types for operand: %v%s", op, s)
1044 }
1045
1046 func writebarrierfn(name string, l *types.Type, r *types.Type) ir.Node {
1047 return typecheck.LookupRuntime(name, l, r)
1048 }
1049
1050
1051
1052 func isRuneCount(n ir.Node) bool {
1053 return base.Flag.N == 0 && !base.Flag.Cfg.Instrumenting && n.Op() == ir.OLEN && n.(*ir.UnaryExpr).X.Op() == ir.OSTR2RUNES
1054 }
1055
1056
1057 func isByteCount(n ir.Node) bool {
1058 return base.Flag.N == 0 && !base.Flag.Cfg.Instrumenting && n.Op() == ir.OLEN &&
1059 (n.(*ir.UnaryExpr).X.Op() == ir.OBYTES2STR || n.(*ir.UnaryExpr).X.Op() == ir.OBYTES2STRTMP)
1060 }
1061
1062
1063
1064
1065 func isChanLenCap(n ir.Node) bool {
1066 return (n.Op() == ir.OLEN || n.Op() == ir.OCAP) && n.(*ir.UnaryExpr).X.Type().IsChan()
1067 }
1068
View as plain text