1// Copyright 2009 The Go Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style
3// license that can be found in the LICENSE file.
4
5package gc
6
7import (
8	"cmd/compile/internal/types"
9	"sort"
10)
11
12// sizeCalculationDisabled indicates whether it is safe
13// to calculate Types' widths and alignments. See dowidth.
14var sizeCalculationDisabled bool
15
16// machine size and rounding alignment is dictated around
17// the size of a pointer, set in betypeinit (see ../amd64/galign.go).
18var defercalc int
19
20func Rnd(o int64, r int64) int64 {
21	if r < 1 || r > 8 || r&(r-1) != 0 {
22		Fatalf("rnd %d", r)
23	}
24	return (o + r - 1) &^ (r - 1)
25}
26
27// expandiface computes the method set for interface type t by
28// expanding embedded interfaces.
29func expandiface(t *types.Type) {
30	var fields []*types.Field
31	for _, m := range t.Methods().Slice() {
32		if m.Sym != nil {
33			fields = append(fields, m)
34			checkwidth(m.Type)
35			continue
36		}
37
38		if !m.Type.IsInterface() {
39			yyerrorl(asNode(m.Nname).Pos, "interface contains embedded non-interface %v", m.Type)
40			m.SetBroke(true)
41			t.SetBroke(true)
42			// Add to fields so that error messages
43			// include the broken embedded type when
44			// printing t.
45			// TODO(mdempsky): Revisit this.
46			fields = append(fields, m)
47			continue
48		}
49
50		// Embedded interface: duplicate all methods
51		// (including broken ones, if any) and add to t's
52		// method set.
53		for _, t1 := range m.Type.Fields().Slice() {
54			f := types.NewField()
55			f.Type = t1.Type
56			f.SetBroke(t1.Broke())
57			f.Sym = t1.Sym
58			f.Nname = m.Nname // preserve embedding position
59			fields = append(fields, f)
60		}
61	}
62	sort.Sort(methcmp(fields))
63
64	// Access fields directly to avoid recursively calling dowidth
65	// within Type.Fields().
66	t.Extra.(*types.Interface).Fields.Set(fields)
67}
68
69func offmod(t *types.Type) {
70	o := int32(0)
71	for _, f := range t.Fields().Slice() {
72		f.Offset = int64(o)
73		o += int32(Widthptr)
74		if int64(o) >= thearch.MAXWIDTH {
75			yyerror("interface too large")
76			o = int32(Widthptr)
77		}
78	}
79}
80
81func widstruct(errtype *types.Type, t *types.Type, o int64, flag int) int64 {
82	starto := o
83	maxalign := int32(flag)
84	if maxalign < 1 {
85		maxalign = 1
86	}
87	lastzero := int64(0)
88	for _, f := range t.Fields().Slice() {
89		if f.Type == nil {
90			// broken field, just skip it so that other valid fields
91			// get a width.
92			continue
93		}
94
95		dowidth(f.Type)
96		if int32(f.Type.Align) > maxalign {
97			maxalign = int32(f.Type.Align)
98		}
99		if f.Type.Align > 0 {
100			o = Rnd(o, int64(f.Type.Align))
101		}
102		f.Offset = o
103		if asNode(f.Nname) != nil {
104			// addrescapes has similar code to update these offsets.
105			// Usually addrescapes runs after widstruct,
106			// in which case we could drop this,
107			// but function closure functions are the exception.
108			// NOTE(rsc): This comment may be stale.
109			// It's possible the ordering has changed and this is
110			// now the common case. I'm not sure.
111			if asNode(f.Nname).Name.Param.Stackcopy != nil {
112				asNode(f.Nname).Name.Param.Stackcopy.Xoffset = o
113				asNode(f.Nname).Xoffset = 0
114			} else {
115				asNode(f.Nname).Xoffset = o
116			}
117		}
118
119		w := f.Type.Width
120		if w < 0 {
121			Fatalf("invalid width %d", f.Type.Width)
122		}
123		if w == 0 {
124			lastzero = o
125		}
126		o += w
127		maxwidth := thearch.MAXWIDTH
128		// On 32-bit systems, reflect tables impose an additional constraint
129		// that each field start offset must fit in 31 bits.
130		if maxwidth < 1<<32 {
131			maxwidth = 1<<31 - 1
132		}
133		if o >= maxwidth {
134			yyerror("type %L too large", errtype)
135			o = 8 // small but nonzero
136		}
137	}
138
139	// For nonzero-sized structs which end in a zero-sized thing, we add
140	// an extra byte of padding to the type. This padding ensures that
141	// taking the address of the zero-sized thing can't manufacture a
142	// pointer to the next object in the heap. See issue 9401.
143	if flag == 1 && o > starto && o == lastzero {
144		o++
145	}
146
147	// final width is rounded
148	if flag != 0 {
149		o = Rnd(o, int64(maxalign))
150	}
151	t.Align = uint8(maxalign)
152
153	// type width only includes back to first field's offset
154	t.Width = o - starto
155
156	return o
157}
158
159// dowidth calculates and stores the size and alignment for t.
160// If sizeCalculationDisabled is set, and the size/alignment
161// have not already been calculated, it calls Fatal.
162// This is used to prevent data races in the back end.
163func dowidth(t *types.Type) {
164	if Widthptr == 0 {
165		Fatalf("dowidth without betypeinit")
166	}
167
168	if t == nil {
169		return
170	}
171
172	if t.Width == -2 {
173		if !t.Broke() {
174			t.SetBroke(true)
175			yyerrorl(asNode(t.Nod).Pos, "invalid recursive type %v", t)
176		}
177
178		t.Width = 0
179		return
180	}
181
182	if t.WidthCalculated() {
183		return
184	}
185
186	if sizeCalculationDisabled {
187		if t.Broke() {
188			// break infinite recursion from Fatal call below
189			return
190		}
191		t.SetBroke(true)
192		Fatalf("width not calculated: %v", t)
193	}
194
195	// break infinite recursion if the broken recursive type
196	// is referenced again
197	if t.Broke() && t.Width == 0 {
198		return
199	}
200
201	// defer checkwidth calls until after we're done
202	defercalc++
203
204	lno := lineno
205	if asNode(t.Nod) != nil {
206		lineno = asNode(t.Nod).Pos
207	}
208
209	t.Width = -2
210	t.Align = 0
211
212	et := t.Etype
213	switch et {
214	case TFUNC, TCHAN, TMAP, TSTRING:
215		break
216
217	// simtype == 0 during bootstrap
218	default:
219		if simtype[t.Etype] != 0 {
220			et = simtype[t.Etype]
221		}
222	}
223
224	w := int64(0)
225	switch et {
226	default:
227		Fatalf("dowidth: unknown type: %v", t)
228
229	// compiler-specific stuff
230	case TINT8, TUINT8, TBOOL:
231		// bool is int8
232		w = 1
233
234	case TINT16, TUINT16:
235		w = 2
236
237	case TINT32, TUINT32, TFLOAT32:
238		w = 4
239
240	case TINT64, TUINT64, TFLOAT64:
241		w = 8
242		t.Align = uint8(Widthreg)
243
244	case TCOMPLEX64:
245		w = 8
246		t.Align = 4
247
248	case TCOMPLEX128:
249		w = 16
250		t.Align = uint8(Widthreg)
251
252	case TPTR32:
253		w = 4
254		checkwidth(t.Elem())
255
256	case TPTR64:
257		w = 8
258		checkwidth(t.Elem())
259
260	case TUNSAFEPTR:
261		w = int64(Widthptr)
262
263	case TINTER: // implemented as 2 pointers
264		w = 2 * int64(Widthptr)
265		t.Align = uint8(Widthptr)
266		expandiface(t)
267
268	case TCHAN: // implemented as pointer
269		w = int64(Widthptr)
270
271		checkwidth(t.Elem())
272
273		// make fake type to check later to
274		// trigger channel argument check.
275		t1 := types.NewChanArgs(t)
276		checkwidth(t1)
277
278	case TCHANARGS:
279		t1 := t.ChanArgs()
280		dowidth(t1) // just in case
281		if t1.Elem().Width >= 1<<16 {
282			yyerror("channel element type too large (>64kB)")
283		}
284		w = 1 // anything will do
285
286	case TMAP: // implemented as pointer
287		w = int64(Widthptr)
288		checkwidth(t.Val())
289		checkwidth(t.Key())
290
291	case TFORW: // should have been filled in
292		if !t.Broke() {
293			yyerror("invalid recursive type %v", t)
294		}
295		w = 1 // anything will do
296
297	case TANY:
298		// dummy type; should be replaced before use.
299		Fatalf("dowidth any")
300
301	case TSTRING:
302		if sizeof_String == 0 {
303			Fatalf("early dowidth string")
304		}
305		w = int64(sizeof_String)
306		t.Align = uint8(Widthptr)
307
308	case TARRAY:
309		if t.Elem() == nil {
310			break
311		}
312		if t.IsDDDArray() {
313			if !t.Broke() {
314				yyerror("use of [...] array outside of array literal")
315				t.SetBroke(true)
316			}
317			break
318		}
319
320		dowidth(t.Elem())
321		if t.Elem().Width != 0 {
322			cap := (uint64(thearch.MAXWIDTH) - 1) / uint64(t.Elem().Width)
323			if uint64(t.NumElem()) > cap {
324				yyerror("type %L larger than address space", t)
325			}
326		}
327		w = t.NumElem() * t.Elem().Width
328		t.Align = t.Elem().Align
329
330	case TSLICE:
331		if t.Elem() == nil {
332			break
333		}
334		w = int64(sizeof_Array)
335		checkwidth(t.Elem())
336		t.Align = uint8(Widthptr)
337
338	case TSTRUCT:
339		if t.IsFuncArgStruct() {
340			Fatalf("dowidth fn struct %v", t)
341		}
342		w = widstruct(t, t, 0, 1)
343
344	// make fake type to check later to
345	// trigger function argument computation.
346	case TFUNC:
347		t1 := types.NewFuncArgs(t)
348		checkwidth(t1)
349		w = int64(Widthptr) // width of func type is pointer
350
351	// function is 3 cated structures;
352	// compute their widths as side-effect.
353	case TFUNCARGS:
354		t1 := t.FuncArgs()
355		w = widstruct(t1, t1.Recvs(), 0, 0)
356		w = widstruct(t1, t1.Params(), w, Widthreg)
357		w = widstruct(t1, t1.Results(), w, Widthreg)
358		t1.Extra.(*types.Func).Argwid = w
359		if w%int64(Widthreg) != 0 {
360			Warn("bad type %v %d\n", t1, w)
361		}
362		t.Align = 1
363	}
364
365	if Widthptr == 4 && w != int64(int32(w)) {
366		yyerror("type %v too large", t)
367	}
368
369	t.Width = w
370	if t.Align == 0 {
371		if w > 8 || w&(w-1) != 0 || w == 0 {
372			Fatalf("invalid alignment for %v", t)
373		}
374		t.Align = uint8(w)
375	}
376
377	if t.Etype == TINTER {
378		// We defer calling these functions until after
379		// setting t.Width and t.Align so the recursive calls
380		// to dowidth within t.Fields() will succeed.
381		checkdupfields("method", t)
382		offmod(t)
383	}
384
385	lineno = lno
386
387	if defercalc == 1 {
388		resumecheckwidth()
389	} else {
390		defercalc--
391	}
392}
393
394// when a type's width should be known, we call checkwidth
395// to compute it.  during a declaration like
396//
397//	type T *struct { next T }
398//
399// it is necessary to defer the calculation of the struct width
400// until after T has been initialized to be a pointer to that struct.
401// similarly, during import processing structs may be used
402// before their definition.  in those situations, calling
403// defercheckwidth() stops width calculations until
404// resumecheckwidth() is called, at which point all the
405// checkwidths that were deferred are executed.
406// dowidth should only be called when the type's size
407// is needed immediately.  checkwidth makes sure the
408// size is evaluated eventually.
409
410var deferredTypeStack []*types.Type
411
412func checkwidth(t *types.Type) {
413	if t == nil {
414		return
415	}
416
417	// function arg structs should not be checked
418	// outside of the enclosing function.
419	if t.IsFuncArgStruct() {
420		Fatalf("checkwidth %v", t)
421	}
422
423	if defercalc == 0 {
424		dowidth(t)
425		return
426	}
427
428	if t.Deferwidth() {
429		return
430	}
431	t.SetDeferwidth(true)
432
433	deferredTypeStack = append(deferredTypeStack, t)
434}
435
436func defercheckwidth() {
437	// we get out of sync on syntax errors, so don't be pedantic.
438	if defercalc != 0 && nerrors == 0 {
439		Fatalf("defercheckwidth")
440	}
441	defercalc = 1
442}
443
444func resumecheckwidth() {
445	if defercalc == 0 {
446		Fatalf("resumecheckwidth")
447	}
448	for len(deferredTypeStack) > 0 {
449		t := deferredTypeStack[len(deferredTypeStack)-1]
450		deferredTypeStack = deferredTypeStack[:len(deferredTypeStack)-1]
451		t.SetDeferwidth(false)
452		dowidth(t)
453	}
454
455	defercalc = 0
456}
457