1 /*-------------------------------------------------------------------------
2  *
3  * llvmjit_deform.c
4  *	  Generate code for deforming a heap tuple.
5  *
6  * This gains performance benefits over unJITed deforming from compile-time
7  * knowledge of the tuple descriptor. Fixed column widths, NOT NULLness, etc
8  * can be taken advantage of.
9  *
10  * Portions Copyright (c) 1996-2020, PostgreSQL Global Development Group
11  * Portions Copyright (c) 1994, Regents of the University of California
12  *
13  * IDENTIFICATION
14  *	  src/backend/jit/llvm/llvmjit_deform.c
15  *
16  *-------------------------------------------------------------------------
17  */
18 
19 #include "postgres.h"
20 
21 #include <llvm-c/Core.h>
22 
23 #include "access/htup_details.h"
24 #include "access/tupdesc_details.h"
25 #include "executor/tuptable.h"
26 #include "jit/llvmjit.h"
27 #include "jit/llvmjit_emit.h"
28 
29 
30 /*
31  * Create a function that deforms a tuple of type desc up to natts columns.
32  */
33 LLVMValueRef
slot_compile_deform(LLVMJitContext * context,TupleDesc desc,const TupleTableSlotOps * ops,int natts)34 slot_compile_deform(LLVMJitContext *context, TupleDesc desc,
35 					const TupleTableSlotOps *ops, int natts)
36 {
37 	char	   *funcname;
38 
39 	LLVMModuleRef mod;
40 	LLVMBuilderRef b;
41 
42 	LLVMTypeRef deform_sig;
43 	LLVMValueRef v_deform_fn;
44 
45 	LLVMBasicBlockRef b_entry;
46 	LLVMBasicBlockRef b_adjust_unavail_cols;
47 	LLVMBasicBlockRef b_find_start;
48 
49 	LLVMBasicBlockRef b_out;
50 	LLVMBasicBlockRef b_dead;
51 	LLVMBasicBlockRef *attcheckattnoblocks;
52 	LLVMBasicBlockRef *attstartblocks;
53 	LLVMBasicBlockRef *attisnullblocks;
54 	LLVMBasicBlockRef *attcheckalignblocks;
55 	LLVMBasicBlockRef *attalignblocks;
56 	LLVMBasicBlockRef *attstoreblocks;
57 
58 	LLVMValueRef v_offp;
59 
60 	LLVMValueRef v_tupdata_base;
61 	LLVMValueRef v_tts_values;
62 	LLVMValueRef v_tts_nulls;
63 	LLVMValueRef v_slotoffp;
64 	LLVMValueRef v_flagsp;
65 	LLVMValueRef v_nvalidp;
66 	LLVMValueRef v_nvalid;
67 	LLVMValueRef v_maxatt;
68 
69 	LLVMValueRef v_slot;
70 
71 	LLVMValueRef v_tupleheaderp;
72 	LLVMValueRef v_tuplep;
73 	LLVMValueRef v_infomask1;
74 	LLVMValueRef v_infomask2;
75 	LLVMValueRef v_bits;
76 
77 	LLVMValueRef v_hoff;
78 
79 	LLVMValueRef v_hasnulls;
80 
81 	/* last column (0 indexed) guaranteed to exist */
82 	int			guaranteed_column_number = -1;
83 
84 	/* current known alignment */
85 	int			known_alignment = 0;
86 
87 	/* if true, known_alignment describes definite offset of column */
88 	bool		attguaranteedalign = true;
89 
90 	int			attnum;
91 
92 	/* virtual tuples never need deforming, so don't generate code */
93 	if (ops == &TTSOpsVirtual)
94 		return NULL;
95 
96 	/* decline to JIT for slot types we don't know to handle */
97 	if (ops != &TTSOpsHeapTuple && ops != &TTSOpsBufferHeapTuple &&
98 		ops != &TTSOpsMinimalTuple)
99 		return NULL;
100 
101 	mod = llvm_mutable_module(context);
102 
103 	funcname = llvm_expand_funcname(context, "deform");
104 
105 	/*
106 	 * Check which columns have to exist, so we don't have to check the row's
107 	 * natts unnecessarily.
108 	 */
109 	for (attnum = 0; attnum < desc->natts; attnum++)
110 	{
111 		Form_pg_attribute att = TupleDescAttr(desc, attnum);
112 
113 		/*
114 		 * If the column is declared NOT NULL then it must be present in every
115 		 * tuple, unless there's a "missing" entry that could provide a
116 		 * non-NULL value for it. That in turn guarantees that the NULL bitmap
117 		 * - if there are any NULLable columns - is at least long enough to
118 		 * cover columns up to attnum.
119 		 *
120 		 * Be paranoid and also check !attisdropped, even though the
121 		 * combination of attisdropped && attnotnull combination shouldn't
122 		 * exist.
123 		 */
124 		if (att->attnotnull &&
125 			!att->atthasmissing &&
126 			!att->attisdropped)
127 			guaranteed_column_number = attnum;
128 	}
129 
130 	/* Create the signature and function */
131 	{
132 		LLVMTypeRef param_types[1];
133 
134 		param_types[0] = l_ptr(StructTupleTableSlot);
135 
136 		deform_sig = LLVMFunctionType(LLVMVoidType(), param_types,
137 									  lengthof(param_types), 0);
138 	}
139 	v_deform_fn = LLVMAddFunction(mod, funcname, deform_sig);
140 	LLVMSetLinkage(v_deform_fn, LLVMInternalLinkage);
141 	LLVMSetParamAlignment(LLVMGetParam(v_deform_fn, 0), MAXIMUM_ALIGNOF);
142 	llvm_copy_attributes(AttributeTemplate, v_deform_fn);
143 
144 	b_entry =
145 		LLVMAppendBasicBlock(v_deform_fn, "entry");
146 	b_adjust_unavail_cols =
147 		LLVMAppendBasicBlock(v_deform_fn, "adjust_unavail_cols");
148 	b_find_start =
149 		LLVMAppendBasicBlock(v_deform_fn, "find_startblock");
150 	b_out =
151 		LLVMAppendBasicBlock(v_deform_fn, "outblock");
152 	b_dead =
153 		LLVMAppendBasicBlock(v_deform_fn, "deadblock");
154 
155 	b = LLVMCreateBuilder();
156 
157 	attcheckattnoblocks = palloc(sizeof(LLVMBasicBlockRef) * natts);
158 	attstartblocks = palloc(sizeof(LLVMBasicBlockRef) * natts);
159 	attisnullblocks = palloc(sizeof(LLVMBasicBlockRef) * natts);
160 	attcheckalignblocks = palloc(sizeof(LLVMBasicBlockRef) * natts);
161 	attalignblocks = palloc(sizeof(LLVMBasicBlockRef) * natts);
162 	attstoreblocks = palloc(sizeof(LLVMBasicBlockRef) * natts);
163 
164 	known_alignment = 0;
165 
166 	LLVMPositionBuilderAtEnd(b, b_entry);
167 
168 	/* perform allocas first, llvm only converts those to registers */
169 	v_offp = LLVMBuildAlloca(b, TypeSizeT, "v_offp");
170 
171 	v_slot = LLVMGetParam(v_deform_fn, 0);
172 
173 	v_tts_values =
174 		l_load_struct_gep(b, v_slot, FIELDNO_TUPLETABLESLOT_VALUES,
175 						  "tts_values");
176 	v_tts_nulls =
177 		l_load_struct_gep(b, v_slot, FIELDNO_TUPLETABLESLOT_ISNULL,
178 						  "tts_ISNULL");
179 	v_flagsp = LLVMBuildStructGEP(b, v_slot, FIELDNO_TUPLETABLESLOT_FLAGS, "");
180 	v_nvalidp = LLVMBuildStructGEP(b, v_slot, FIELDNO_TUPLETABLESLOT_NVALID, "");
181 
182 	if (ops == &TTSOpsHeapTuple || ops == &TTSOpsBufferHeapTuple)
183 	{
184 		LLVMValueRef v_heapslot;
185 
186 		v_heapslot =
187 			LLVMBuildBitCast(b,
188 							 v_slot,
189 							 l_ptr(StructHeapTupleTableSlot),
190 							 "heapslot");
191 		v_slotoffp = LLVMBuildStructGEP(b, v_heapslot, FIELDNO_HEAPTUPLETABLESLOT_OFF, "");
192 		v_tupleheaderp =
193 			l_load_struct_gep(b, v_heapslot, FIELDNO_HEAPTUPLETABLESLOT_TUPLE,
194 							  "tupleheader");
195 
196 	}
197 	else if (ops == &TTSOpsMinimalTuple)
198 	{
199 		LLVMValueRef v_minimalslot;
200 
201 		v_minimalslot =
202 			LLVMBuildBitCast(b,
203 							 v_slot,
204 							 l_ptr(StructMinimalTupleTableSlot),
205 							 "minimalslot");
206 		v_slotoffp = LLVMBuildStructGEP(b, v_minimalslot, FIELDNO_MINIMALTUPLETABLESLOT_OFF, "");
207 		v_tupleheaderp =
208 			l_load_struct_gep(b, v_minimalslot, FIELDNO_MINIMALTUPLETABLESLOT_TUPLE,
209 							  "tupleheader");
210 	}
211 	else
212 	{
213 		/* should've returned at the start of the function */
214 		pg_unreachable();
215 	}
216 
217 	v_tuplep =
218 		l_load_struct_gep(b, v_tupleheaderp, FIELDNO_HEAPTUPLEDATA_DATA,
219 						  "tuple");
220 	v_bits =
221 		LLVMBuildBitCast(b,
222 						 LLVMBuildStructGEP(b, v_tuplep,
223 											FIELDNO_HEAPTUPLEHEADERDATA_BITS,
224 											""),
225 						 l_ptr(LLVMInt8Type()),
226 						 "t_bits");
227 	v_infomask1 =
228 		l_load_struct_gep(b, v_tuplep,
229 						  FIELDNO_HEAPTUPLEHEADERDATA_INFOMASK,
230 						  "infomask1");
231 	v_infomask2 =
232 		l_load_struct_gep(b,
233 						  v_tuplep, FIELDNO_HEAPTUPLEHEADERDATA_INFOMASK2,
234 						  "infomask2");
235 
236 	/* t_infomask & HEAP_HASNULL */
237 	v_hasnulls =
238 		LLVMBuildICmp(b, LLVMIntNE,
239 					  LLVMBuildAnd(b,
240 								   l_int16_const(HEAP_HASNULL),
241 								   v_infomask1, ""),
242 					  l_int16_const(0),
243 					  "hasnulls");
244 
245 	/* t_infomask2 & HEAP_NATTS_MASK */
246 	v_maxatt = LLVMBuildAnd(b,
247 							l_int16_const(HEAP_NATTS_MASK),
248 							v_infomask2,
249 							"maxatt");
250 
251 	/*
252 	 * Need to zext, as getelementptr otherwise treats hoff as a signed 8bit
253 	 * integer, which'd yield a negative offset for t_hoff > 127.
254 	 */
255 	v_hoff =
256 		LLVMBuildZExt(b,
257 					  l_load_struct_gep(b, v_tuplep,
258 										FIELDNO_HEAPTUPLEHEADERDATA_HOFF,
259 										""),
260 					  LLVMInt32Type(), "t_hoff");
261 
262 	v_tupdata_base =
263 		LLVMBuildGEP(b,
264 					 LLVMBuildBitCast(b,
265 									  v_tuplep,
266 									  l_ptr(LLVMInt8Type()),
267 									  ""),
268 					 &v_hoff, 1,
269 					 "v_tupdata_base");
270 
271 	/*
272 	 * Load tuple start offset from slot. Will be reset below in case there's
273 	 * no existing deformed columns in slot.
274 	 */
275 	{
276 		LLVMValueRef v_off_start;
277 
278 		v_off_start = LLVMBuildLoad(b, v_slotoffp, "v_slot_off");
279 		v_off_start = LLVMBuildZExt(b, v_off_start, TypeSizeT, "");
280 		LLVMBuildStore(b, v_off_start, v_offp);
281 	}
282 
283 	/* build the basic block for each attribute, need them as jump target */
284 	for (attnum = 0; attnum < natts; attnum++)
285 	{
286 		attcheckattnoblocks[attnum] =
287 			l_bb_append_v(v_deform_fn, "block.attr.%d.attcheckattno", attnum);
288 		attstartblocks[attnum] =
289 			l_bb_append_v(v_deform_fn, "block.attr.%d.start", attnum);
290 		attisnullblocks[attnum] =
291 			l_bb_append_v(v_deform_fn, "block.attr.%d.attisnull", attnum);
292 		attcheckalignblocks[attnum] =
293 			l_bb_append_v(v_deform_fn, "block.attr.%d.attcheckalign", attnum);
294 		attalignblocks[attnum] =
295 			l_bb_append_v(v_deform_fn, "block.attr.%d.align", attnum);
296 		attstoreblocks[attnum] =
297 			l_bb_append_v(v_deform_fn, "block.attr.%d.store", attnum);
298 	}
299 
300 	/*
301 	 * Check if it is guaranteed that all the desired attributes are available
302 	 * in the tuple (but still possibly NULL), by dint of either the last
303 	 * to-be-deformed column being NOT NULL, or subsequent ones not accessed
304 	 * here being NOT NULL.  If that's not guaranteed the tuple headers natt's
305 	 * has to be checked, and missing attributes potentially have to be
306 	 * fetched (using slot_getmissingattrs().
307 	 */
308 	if ((natts - 1) <= guaranteed_column_number)
309 	{
310 		/* just skip through unnecessary blocks */
311 		LLVMBuildBr(b, b_adjust_unavail_cols);
312 		LLVMPositionBuilderAtEnd(b, b_adjust_unavail_cols);
313 		LLVMBuildBr(b, b_find_start);
314 	}
315 	else
316 	{
317 		LLVMValueRef v_params[3];
318 
319 		/* branch if not all columns available */
320 		LLVMBuildCondBr(b,
321 						LLVMBuildICmp(b, LLVMIntULT,
322 									  v_maxatt,
323 									  l_int16_const(natts),
324 									  ""),
325 						b_adjust_unavail_cols,
326 						b_find_start);
327 
328 		/* if not, memset tts_isnull of relevant cols to true */
329 		LLVMPositionBuilderAtEnd(b, b_adjust_unavail_cols);
330 
331 		v_params[0] = v_slot;
332 		v_params[1] = LLVMBuildZExt(b, v_maxatt, LLVMInt32Type(), "");
333 		v_params[2] = l_int32_const(natts);
334 		LLVMBuildCall(b, llvm_pg_func(mod, "slot_getmissingattrs"),
335 					  v_params, lengthof(v_params), "");
336 		LLVMBuildBr(b, b_find_start);
337 	}
338 
339 	LLVMPositionBuilderAtEnd(b, b_find_start);
340 
341 	v_nvalid = LLVMBuildLoad(b, v_nvalidp, "");
342 
343 	/*
344 	 * Build switch to go from nvalid to the right startblock.  Callers
345 	 * currently don't have the knowledge, but it'd be good for performance to
346 	 * avoid this check when it's known that the slot is empty (e.g. in scan
347 	 * nodes).
348 	 */
349 	if (true)
350 	{
351 		LLVMValueRef v_switch = LLVMBuildSwitch(b, v_nvalid,
352 												b_dead, natts);
353 
354 		for (attnum = 0; attnum < natts; attnum++)
355 		{
356 			LLVMValueRef v_attno = l_int16_const(attnum);
357 
358 			LLVMAddCase(v_switch, v_attno, attcheckattnoblocks[attnum]);
359 		}
360 
361 	}
362 	else
363 	{
364 		/* jump from entry block to first block */
365 		LLVMBuildBr(b, attcheckattnoblocks[0]);
366 	}
367 
368 	LLVMPositionBuilderAtEnd(b, b_dead);
369 	LLVMBuildUnreachable(b);
370 
371 	/*
372 	 * Iterate over each attribute that needs to be deformed, build code to
373 	 * deform it.
374 	 */
375 	for (attnum = 0; attnum < natts; attnum++)
376 	{
377 		Form_pg_attribute att = TupleDescAttr(desc, attnum);
378 		LLVMValueRef v_incby;
379 		int			alignto;
380 		LLVMValueRef l_attno = l_int16_const(attnum);
381 		LLVMValueRef v_attdatap;
382 		LLVMValueRef v_resultp;
383 
384 		/* build block checking whether we did all the necessary attributes */
385 		LLVMPositionBuilderAtEnd(b, attcheckattnoblocks[attnum]);
386 
387 		/*
388 		 * If this is the first attribute, slot->tts_nvalid was 0. Therefore
389 		 * also reset offset to 0, it may be from a previous execution.
390 		 */
391 		if (attnum == 0)
392 		{
393 			LLVMBuildStore(b, l_sizet_const(0), v_offp);
394 		}
395 
396 		/*
397 		 * Build check whether column is available (i.e. whether the tuple has
398 		 * that many columns stored). We can avoid the branch if we know
399 		 * there's a subsequent NOT NULL column.
400 		 */
401 		if (attnum <= guaranteed_column_number)
402 		{
403 			LLVMBuildBr(b, attstartblocks[attnum]);
404 		}
405 		else
406 		{
407 			LLVMValueRef v_islast;
408 
409 			v_islast = LLVMBuildICmp(b, LLVMIntUGE,
410 									 l_attno,
411 									 v_maxatt,
412 									 "heap_natts");
413 			LLVMBuildCondBr(b, v_islast, b_out, attstartblocks[attnum]);
414 		}
415 		LLVMPositionBuilderAtEnd(b, attstartblocks[attnum]);
416 
417 		/*
418 		 * Check for nulls if necessary. No need to take missing attributes
419 		 * into account, because if they're present the heaptuple's natts
420 		 * would have indicated that a slot_getmissingattrs() is needed.
421 		 */
422 		if (!att->attnotnull)
423 		{
424 			LLVMBasicBlockRef b_ifnotnull;
425 			LLVMBasicBlockRef b_ifnull;
426 			LLVMBasicBlockRef b_next;
427 			LLVMValueRef v_attisnull;
428 			LLVMValueRef v_nullbyteno;
429 			LLVMValueRef v_nullbytemask;
430 			LLVMValueRef v_nullbyte;
431 			LLVMValueRef v_nullbit;
432 
433 			b_ifnotnull = attcheckalignblocks[attnum];
434 			b_ifnull = attisnullblocks[attnum];
435 
436 			if (attnum + 1 == natts)
437 				b_next = b_out;
438 			else
439 				b_next = attcheckattnoblocks[attnum + 1];
440 
441 			v_nullbyteno = l_int32_const(attnum >> 3);
442 			v_nullbytemask = l_int8_const(1 << ((attnum) & 0x07));
443 			v_nullbyte = l_load_gep1(b, v_bits, v_nullbyteno, "attnullbyte");
444 
445 			v_nullbit = LLVMBuildICmp(b,
446 									  LLVMIntEQ,
447 									  LLVMBuildAnd(b, v_nullbyte, v_nullbytemask, ""),
448 									  l_int8_const(0),
449 									  "attisnull");
450 
451 			v_attisnull = LLVMBuildAnd(b, v_hasnulls, v_nullbit, "");
452 
453 			LLVMBuildCondBr(b, v_attisnull, b_ifnull, b_ifnotnull);
454 
455 			LLVMPositionBuilderAtEnd(b, b_ifnull);
456 
457 			/* store null-byte */
458 			LLVMBuildStore(b,
459 						   l_int8_const(1),
460 						   LLVMBuildGEP(b, v_tts_nulls, &l_attno, 1, ""));
461 			/* store zero datum */
462 			LLVMBuildStore(b,
463 						   l_sizet_const(0),
464 						   LLVMBuildGEP(b, v_tts_values, &l_attno, 1, ""));
465 
466 			LLVMBuildBr(b, b_next);
467 			attguaranteedalign = false;
468 		}
469 		else
470 		{
471 			/* nothing to do */
472 			LLVMBuildBr(b, attcheckalignblocks[attnum]);
473 			LLVMPositionBuilderAtEnd(b, attisnullblocks[attnum]);
474 			LLVMBuildBr(b, attcheckalignblocks[attnum]);
475 		}
476 		LLVMPositionBuilderAtEnd(b, attcheckalignblocks[attnum]);
477 
478 		/* determine required alignment */
479 		if (att->attalign == TYPALIGN_INT)
480 			alignto = ALIGNOF_INT;
481 		else if (att->attalign == TYPALIGN_CHAR)
482 			alignto = 1;
483 		else if (att->attalign == TYPALIGN_DOUBLE)
484 			alignto = ALIGNOF_DOUBLE;
485 		else if (att->attalign == TYPALIGN_SHORT)
486 			alignto = ALIGNOF_SHORT;
487 		else
488 		{
489 			elog(ERROR, "unknown alignment");
490 			alignto = 0;
491 		}
492 
493 		/* ------
494 		 * Even if alignment is required, we can skip doing it if provably
495 		 * unnecessary:
496 		 * - first column is guaranteed to be aligned
497 		 * - columns following a NOT NULL fixed width datum have known
498 		 *   alignment, can skip alignment computation if that known alignment
499 		 *   is compatible with current column.
500 		 * ------
501 		 */
502 		if (alignto > 1 &&
503 			(known_alignment < 0 || known_alignment != TYPEALIGN(alignto, known_alignment)))
504 		{
505 			/*
506 			 * When accessing a varlena field, we have to "peek" to see if we
507 			 * are looking at a pad byte or the first byte of a 1-byte-header
508 			 * datum.  A zero byte must be either a pad byte, or the first
509 			 * byte of a correctly aligned 4-byte length word; in either case,
510 			 * we can align safely.  A non-zero byte must be either a 1-byte
511 			 * length word, or the first byte of a correctly aligned 4-byte
512 			 * length word; in either case, we need not align.
513 			 */
514 			if (att->attlen == -1)
515 			{
516 				LLVMValueRef v_possible_padbyte;
517 				LLVMValueRef v_ispad;
518 				LLVMValueRef v_off;
519 
520 				/* don't know if short varlena or not */
521 				attguaranteedalign = false;
522 
523 				v_off = LLVMBuildLoad(b, v_offp, "");
524 
525 				v_possible_padbyte =
526 					l_load_gep1(b, v_tupdata_base, v_off, "padbyte");
527 				v_ispad =
528 					LLVMBuildICmp(b, LLVMIntEQ,
529 								  v_possible_padbyte, l_int8_const(0),
530 								  "ispadbyte");
531 				LLVMBuildCondBr(b, v_ispad,
532 								attalignblocks[attnum],
533 								attstoreblocks[attnum]);
534 			}
535 			else
536 			{
537 				LLVMBuildBr(b, attalignblocks[attnum]);
538 			}
539 
540 			LLVMPositionBuilderAtEnd(b, attalignblocks[attnum]);
541 
542 			/* translation of alignment code (cf TYPEALIGN()) */
543 			{
544 				LLVMValueRef v_off_aligned;
545 				LLVMValueRef v_off = LLVMBuildLoad(b, v_offp, "");
546 
547 				/* ((ALIGNVAL) - 1) */
548 				LLVMValueRef v_alignval = l_sizet_const(alignto - 1);
549 
550 				/* ((uintptr_t) (LEN) + ((ALIGNVAL) - 1)) */
551 				LLVMValueRef v_lh = LLVMBuildAdd(b, v_off, v_alignval, "");
552 
553 				/* ~((uintptr_t) ((ALIGNVAL) - 1)) */
554 				LLVMValueRef v_rh = l_sizet_const(~(alignto - 1));
555 
556 				v_off_aligned = LLVMBuildAnd(b, v_lh, v_rh, "aligned_offset");
557 
558 				LLVMBuildStore(b, v_off_aligned, v_offp);
559 			}
560 
561 			/*
562 			 * As alignment either was unnecessary or has been performed, we
563 			 * now know the current alignment. This is only safe because this
564 			 * value isn't used for varlena and nullable columns.
565 			 */
566 			if (known_alignment >= 0)
567 			{
568 				Assert(known_alignment != 0);
569 				known_alignment = TYPEALIGN(alignto, known_alignment);
570 			}
571 
572 			LLVMBuildBr(b, attstoreblocks[attnum]);
573 			LLVMPositionBuilderAtEnd(b, attstoreblocks[attnum]);
574 		}
575 		else
576 		{
577 			LLVMPositionBuilderAtEnd(b, attcheckalignblocks[attnum]);
578 			LLVMBuildBr(b, attalignblocks[attnum]);
579 			LLVMPositionBuilderAtEnd(b, attalignblocks[attnum]);
580 			LLVMBuildBr(b, attstoreblocks[attnum]);
581 		}
582 		LLVMPositionBuilderAtEnd(b, attstoreblocks[attnum]);
583 
584 		/*
585 		 * Store the current offset if known to be constant. That allows LLVM
586 		 * to generate better code. Without that LLVM can't figure out that
587 		 * the offset might be constant due to the jumps for previously
588 		 * decoded columns.
589 		 */
590 		if (attguaranteedalign)
591 		{
592 			Assert(known_alignment >= 0);
593 			LLVMBuildStore(b, l_sizet_const(known_alignment), v_offp);
594 		}
595 
596 		/* compute what following columns are aligned to */
597 		if (att->attlen < 0)
598 		{
599 			/* can't guarantee any alignment after variable length field */
600 			known_alignment = -1;
601 			attguaranteedalign = false;
602 		}
603 		else if (att->attnotnull && attguaranteedalign && known_alignment >= 0)
604 		{
605 			/*
606 			 * If the offset to the column was previously known, a NOT NULL &
607 			 * fixed-width column guarantees that alignment is just the
608 			 * previous alignment plus column width.
609 			 */
610 			Assert(att->attlen > 0);
611 			known_alignment += att->attlen;
612 		}
613 		else if (att->attnotnull && (att->attlen % alignto) == 0)
614 		{
615 			/*
616 			 * After a NOT NULL fixed-width column with a length that is a
617 			 * multiple of its alignment requirement, we know the following
618 			 * column is aligned to at least the current column's alignment.
619 			 */
620 			Assert(att->attlen > 0);
621 			known_alignment = alignto;
622 			Assert(known_alignment > 0);
623 			attguaranteedalign = false;
624 		}
625 		else
626 		{
627 			known_alignment = -1;
628 			attguaranteedalign = false;
629 		}
630 
631 
632 		/* compute address to load data from */
633 		{
634 			LLVMValueRef v_off = LLVMBuildLoad(b, v_offp, "");
635 
636 			v_attdatap =
637 				LLVMBuildGEP(b, v_tupdata_base, &v_off, 1, "");
638 		}
639 
640 		/* compute address to store value at */
641 		v_resultp = LLVMBuildGEP(b, v_tts_values, &l_attno, 1, "");
642 
643 		/* store null-byte (false) */
644 		LLVMBuildStore(b, l_int8_const(0),
645 					   LLVMBuildGEP(b, v_tts_nulls, &l_attno, 1, ""));
646 
647 		/*
648 		 * Store datum. For byval: datums copy the value, extend to Datum's
649 		 * width, and store. For byref types: store pointer to data.
650 		 */
651 		if (att->attbyval)
652 		{
653 			LLVMValueRef v_tmp_loaddata;
654 			LLVMTypeRef vartypep =
655 			LLVMPointerType(LLVMIntType(att->attlen * 8), 0);
656 
657 			v_tmp_loaddata =
658 				LLVMBuildPointerCast(b, v_attdatap, vartypep, "");
659 			v_tmp_loaddata = LLVMBuildLoad(b, v_tmp_loaddata, "attr_byval");
660 			v_tmp_loaddata = LLVMBuildZExt(b, v_tmp_loaddata, TypeSizeT, "");
661 
662 			LLVMBuildStore(b, v_tmp_loaddata, v_resultp);
663 		}
664 		else
665 		{
666 			LLVMValueRef v_tmp_loaddata;
667 
668 			/* store pointer */
669 			v_tmp_loaddata =
670 				LLVMBuildPtrToInt(b,
671 								  v_attdatap,
672 								  TypeSizeT,
673 								  "attr_ptr");
674 			LLVMBuildStore(b, v_tmp_loaddata, v_resultp);
675 		}
676 
677 		/* increment data pointer */
678 		if (att->attlen > 0)
679 		{
680 			v_incby = l_sizet_const(att->attlen);
681 		}
682 		else if (att->attlen == -1)
683 		{
684 			v_incby = LLVMBuildCall(b,
685 									llvm_pg_func(mod, "varsize_any"),
686 									&v_attdatap, 1,
687 									"varsize_any");
688 			l_callsite_ro(v_incby);
689 			l_callsite_alwaysinline(v_incby);
690 		}
691 		else if (att->attlen == -2)
692 		{
693 			v_incby = LLVMBuildCall(b,
694 									llvm_pg_func(mod, "strlen"),
695 									&v_attdatap, 1, "strlen");
696 
697 			l_callsite_ro(v_incby);
698 
699 			/* add 1 for NUL byte */
700 			v_incby = LLVMBuildAdd(b, v_incby, l_sizet_const(1), "");
701 		}
702 		else
703 		{
704 			Assert(false);
705 			v_incby = NULL;		/* silence compiler */
706 		}
707 
708 		if (attguaranteedalign)
709 		{
710 			Assert(known_alignment >= 0);
711 			LLVMBuildStore(b, l_sizet_const(known_alignment), v_offp);
712 		}
713 		else
714 		{
715 			LLVMValueRef v_off = LLVMBuildLoad(b, v_offp, "");
716 
717 			v_off = LLVMBuildAdd(b, v_off, v_incby, "increment_offset");
718 			LLVMBuildStore(b, v_off, v_offp);
719 		}
720 
721 		/*
722 		 * jump to next block, unless last possible column, or all desired
723 		 * (available) attributes have been fetched.
724 		 */
725 		if (attnum + 1 == natts)
726 		{
727 			/* jump out */
728 			LLVMBuildBr(b, b_out);
729 		}
730 		else
731 		{
732 			LLVMBuildBr(b, attcheckattnoblocks[attnum + 1]);
733 		}
734 	}
735 
736 
737 	/* build block that returns */
738 	LLVMPositionBuilderAtEnd(b, b_out);
739 
740 	{
741 		LLVMValueRef v_off = LLVMBuildLoad(b, v_offp, "");
742 		LLVMValueRef v_flags;
743 
744 		LLVMBuildStore(b, l_int16_const(natts), v_nvalidp);
745 		v_off = LLVMBuildTrunc(b, v_off, LLVMInt32Type(), "");
746 		LLVMBuildStore(b, v_off, v_slotoffp);
747 		v_flags = LLVMBuildLoad(b, v_flagsp, "tts_flags");
748 		v_flags = LLVMBuildOr(b, v_flags, l_int16_const(TTS_FLAG_SLOW), "");
749 		LLVMBuildStore(b, v_flags, v_flagsp);
750 		LLVMBuildRetVoid(b);
751 	}
752 
753 	LLVMDisposeBuilder(b);
754 
755 	return v_deform_fn;
756 }
757