1 /* DAT.H (c) Copyright Roger Bowler, 1999-2009 */
2 /* ESA/390 Dynamic Address Translation */
3
4 /* Interpretive Execution - (c) Copyright Jan Jaeger, 1999-2009 */
5 /* z/Architecture support - (c) Copyright Jan Jaeger, 1999-2009 */
6
7 /*-------------------------------------------------------------------*/
8 /* This module implements the DAT, ALET, and ASN translation */
9 /* functions of the ESA/390 architecture, described in the manual */
10 /* SA22-7201-04 ESA/390 Principles of Operation. The numbers in */
11 /* square brackets in the comments refer to sections in the manual. */
12 /*-------------------------------------------------------------------*/
13
14 /*-------------------------------------------------------------------*/
15 /* Additional credits: */
16 /* S/370 DAT support by Jay Maynard (as described in */
17 /* GA22-7000 System/370 Principles of Operation) */
18 /* Clear remainder of ASTE when ASF=0 - Jan Jaeger */
19 /* S/370 DAT support when running under SIE - Jan Jaeger */
20 /* ESAME DAT support by Roger Bowler (SA22-7832) */
21 /* ESAME ASN authorization and ALET translation - Roger Bowler */
22 /*-------------------------------------------------------------------*/
23
24 #if !defined(OPTION_NO_INLINE_DAT) || defined(_DAT_C)
25 #if defined(FEATURE_DUAL_ADDRESS_SPACE)
26 /*-------------------------------------------------------------------*/
27 /* Translate ASN to produce address-space control parameters */
28 /* */
29 /* Input: */
30 /* asn Address space number to be translated */
31 /* regs Pointer to the CPU register context */
32 /* asteo Pointer to a word to receive real address of ASTE */
33 /* aste Pointer to 16-word area to receive a copy of the */
34 /* ASN second table entry associated with the ASN */
35 /* */
36 /* Output: */
37 /* If successful, the ASTE corresponding to the ASN value will */
38 /* be stored into the 16-word area pointed to by aste, and the */
39 /* return value is zero. Either 4 or 16 words will be stored */
40 /* depending on the value of the ASF control bit (CR0 bit 15). */
41 /* The real address of the ASTE will be stored into the word */
42 /* pointed to by asteo. */
43 /* */
44 /* If unsuccessful, the return value is a non-zero exception */
45 /* code indicating AFX-translation or ASX-translation error */
46 /* (this is to allow the LASP instruction to handle these */
47 /* exceptions by setting the condition code). */
48 /* */
49 /* A program check may be generated for addressing and ASN */
50 /* translation specification exceptions, in which case the */
51 /* function does not return. */
52 /*-------------------------------------------------------------------*/
ARCH_DEP(translate_asn)53 _DAT_C_STATIC U16 ARCH_DEP(translate_asn) (U16 asn, REGS *regs,
54 U32 *asteo, U32 aste[])
55 {
56 U32 afte_addr; /* Address of AFTE */
57 U32 afte; /* ASN first table entry */
58 U32 aste_addr; /* Address of ASTE */
59 BYTE *aste_main; /* ASTE mainstor address */
60 int code; /* Exception code */
61 int numwords; /* ASTE size (4 or 16 words) */
62 int i; /* Array subscript */
63
64 /* [3.9.3.1] Use the AFX to obtain the real address of the AFTE */
65 afte_addr = (regs->CR(14) & CR14_AFTO) << 12;
66 afte_addr += (asn & ASN_AFX) >> 4;
67
68 /* Addressing exception if AFTE is outside main storage */
69 if (afte_addr > regs->mainlim)
70 goto asn_addr_excp;
71
72 /* Load the AFTE from main storage. All four bytes must be
73 fetched concurrently as observed by other CPUs */
74 afte_addr = APPLY_PREFIXING (afte_addr, regs->PX);
75 afte = ARCH_DEP(fetch_fullword_absolute) (afte_addr, regs);
76
77 /* AFX translation exception if AFTE invalid bit is set */
78 if (afte & AFTE_INVALID)
79 goto asn_afx_tran_excp;
80
81 #if !defined(FEATURE_ESAME)
82 /* ASN translation specification exception if reserved bits set */
83 if (!ASF_ENABLED(regs)) {
84 if (afte & AFTE_RESV_0)
85 goto asn_asn_tran_spec_excp;
86 } else {
87 if (afte & AFTE_RESV_1)
88 goto asn_asn_tran_spec_excp;
89 }
90 #endif /*!defined(FEATURE_ESAME)*/
91
92 /* [3.9.3.2] Use AFTE and ASX to obtain real address of ASTE */
93 if (!ASF_ENABLED(regs)) {
94 aste_addr = afte & AFTE_ASTO_0;
95 aste_addr += (asn & ASN_ASX) << 4;
96 numwords = 4;
97 } else {
98 aste_addr = afte & AFTE_ASTO_1;
99 aste_addr += (asn & ASN_ASX) << 6;
100 numwords = 16;
101 }
102
103 /* Ignore carry into bit position 0 of ASTO */
104 aste_addr &= 0x7FFFFFFF;
105
106 /* Addressing exception if ASTE is outside main storage */
107 if (aste_addr > regs->mainlim)
108 goto asn_addr_excp;
109
110 /* Return the real address of the ASTE */
111 *asteo = aste_addr;
112
113 /* Fetch the 16- or 64-byte ASN second table entry from real
114 storage. Each fullword of the ASTE must be fetched
115 concurrently as observed by other CPUs */
116 aste_addr = APPLY_PREFIXING (aste_addr, regs->PX);
117 aste_main = FETCH_MAIN_ABSOLUTE(aste_addr, regs, numwords * 4);
118 for (i = 0; i < numwords; i++)
119 {
120 aste[i] = fetch_fw(aste_main);
121 aste_main += 4;
122 }
123 /* Clear remaining words if fewer than 16 words were loaded */
124 while (i < 16) aste[i++] = 0;
125
126
127 /* Check the ASX invalid bit in the ASTE */
128 if (aste[0] & ASTE0_INVALID)
129 goto asn_asx_tran_excp;
130
131 #if !defined(FEATURE_ESAME)
132 /* Check the reserved bits in first two words of ASTE */
133 if ((aste[0] & ASTE0_RESV) || (aste[1] & ASTE1_RESV)
134 || ((aste[0] & ASTE0_BASE)
135 #ifdef FEATURE_SUBSPACE_GROUP
136 && !ASF_ENABLED(regs)
137 #endif /*FEATURE_SUBSPACE_GROUP*/
138 ))
139 goto asn_asn_tran_spec_excp;
140 #endif /*!defined(FEATURE_ESAME)*/
141
142 return 0;
143
144 /* Conditions which always cause program check */
145 asn_addr_excp:
146 code = PGM_ADDRESSING_EXCEPTION;
147 goto asn_prog_check;
148
149 #if !defined(FEATURE_ESAME)
150 asn_asn_tran_spec_excp:
151 code = PGM_ASN_TRANSLATION_SPECIFICATION_EXCEPTION;
152 goto asn_prog_check;
153 #endif /*!defined(FEATURE_ESAME)*/
154
155 asn_prog_check:
156 regs->program_interrupt (regs, code);
157
158 /* Conditions which the caller may or may not program check */
159 asn_afx_tran_excp:
160 regs->TEA = asn;
161 code = PGM_AFX_TRANSLATION_EXCEPTION;
162 return code;
163
164 asn_asx_tran_excp:
165 regs->TEA = asn;
166 code = PGM_ASX_TRANSLATION_EXCEPTION;
167 return code;
168
169 } /* end function translate_asn */
170 #endif /*defined(FEATURE_DUAL_ADDRESS_SPACE)*/
171
172
173 #if defined(FEATURE_DUAL_ADDRESS_SPACE)
174 /*-------------------------------------------------------------------*/
175 /* Perform ASN authorization process */
176 /* */
177 /* Input: */
178 /* ax Authorization index */
179 /* aste Pointer to 16-word area containing a copy of the */
180 /* ASN second table entry associated with the ASN */
181 /* atemask Specifies which authority bit to test in the ATE: */
182 /* ATE_PRIMARY (for PT instruction) */
183 /* ATE_SECONDARY (for PR, SSAR, and LASP instructions, */
184 /* and all access register translations) */
185 /* regs Pointer to the CPU register context */
186 /* */
187 /* Operation: */
188 /* The AX is used to select an entry in the authority table */
189 /* pointed to by the ASTE, and an authorization bit in the ATE */
190 /* is tested. For ATE_PRIMARY (X'80'), the P bit is tested. */
191 /* For ATE_SECONDARY (X'40'), the S bit is tested. */
192 /* Authorization is successful if the ATE falls within the */
193 /* authority table limit and the tested bit value is 1. */
194 /* */
195 /* Output: */
196 /* If authorization is successful, the return value is zero. */
197 /* If authorization is unsuccessful, the return value is 1. */
198 /* */
199 /* A program check may be generated for addressing exception */
200 /* if the authority table entry address is invalid, and in */
201 /* this case the function does not return. */
202 /*-------------------------------------------------------------------*/
ARCH_DEP(authorize_asn)203 _DAT_C_STATIC int ARCH_DEP(authorize_asn) (U16 ax, U32 aste[],
204 int atemask, REGS *regs)
205 {
206 RADR ato; /* Authority table origin */
207 int atl; /* Authority table length */
208 BYTE ate; /* Authority table entry */
209
210 /* [3.10.3.1] Authority table lookup */
211
212 /* Isolate the authority table origin and length */
213 ato = aste[0] & ASTE0_ATO;
214 atl = aste[1] & ASTE1_ATL;
215
216 /* Authorization fails if AX is outside table */
217 if ((ax & 0xFFF0) > atl)
218 return 1;
219
220 /* Calculate the address of the byte in the authority
221 table which contains the 2 bit entry for this AX */
222 ato += (ax >> 2);
223
224 /* Ignore carry into bit position 0 */
225 ato &= 0x7FFFFFFF;
226
227 /* Addressing exception if ATE is outside main storage */
228 if (ato > regs->mainlim)
229 goto auth_addr_excp;
230
231 /* Load the byte containing the authority table entry
232 and shift the entry into the leftmost 2 bits */
233 ato = APPLY_PREFIXING (ato, regs->PX);
234
235 SIE_TRANSLATE(&ato, ACCTYPE_SIE, regs);
236
237 ate = regs->mainstor[ato];
238 ate <<= ((ax & 0x03)*2);
239
240 /* Set the main storage reference bit */
241 STORAGE_KEY(ato, regs) |= STORKEY_REF;
242
243 /* Authorization fails if the specified bit (either X'80' or
244 X'40' of the 2 bit authority table entry) is zero */
245 if ((ate & atemask) == 0)
246 return 1;
247
248 /* Exit with successful return code */
249 return 0;
250
251 /* Conditions which always cause program check */
252 auth_addr_excp:
253 regs->program_interrupt (regs, PGM_ADDRESSING_EXCEPTION);
254 return 1;
255
256 } /* end function authorize_asn */
257 #endif /*defined(FEATURE_DUAL_ADDRESS_SPACE)*/
258
259
260 #if defined(FEATURE_ACCESS_REGISTERS)
261 /*-------------------------------------------------------------------*/
262 /* Translate an ALET to produce the corresponding ASTE */
263 /* */
264 /* This routine performs both ordinary ART (as used by DAT when */
265 /* operating in access register mode, and by the TAR instruction), */
266 /* and special ART (as used by the BSG instruction). The caller */
267 /* is assumed to have already eliminated the special cases of ALET */
268 /* values 0 and 1 (which have different meanings depending on */
269 /* whether the caller is DAT, TAR, or BSG). */
270 /* */
271 /* Input: */
272 /* alet ALET value */
273 /* eax The authorization index (normally obtained from */
274 /* CR8; obtained from R2 for TAR; not used for BSG) */
275 /* acctype Type of access requested: READ, WRITE, instfetch, */
276 /* TAR, LRA, TPROT, or BSG */
277 /* regs Pointer to the CPU register context */
278 /* asteo Pointer to word to receive ASTE origin address */
279 /* aste Pointer to 16-word area to receive a copy of the */
280 /* ASN second table entry associated with the ALET */
281 /* */
282 /* Output: */
283 /* If successful, the ASTE is copied into the 16-word area, */
284 /* the real address of the ASTE is stored into the word pointed */
285 /* word pointed to by asteop, and the return value is zero; */
286 /* regs->dat.protect is set to 2 if the fetch-only bit */
287 /* in the ALE is set, otherwise it is set to zero. */
288 /* */
289 /* If unsuccessful, the return value is a non-zero exception */
290 /* code in the range X'0028' through X'002D' (this is to allow */
291 /* the TAR, LRA, and TPROT instructions to handle these */
292 /* exceptions by setting the condition code). */
293 /* regs->dat.xcode is also set to the exception code. */
294 /* */
295 /* A program check may be generated for addressing and ASN */
296 /* translation specification exceptions, in which case the */
297 /* function does not return. */
298 /*-------------------------------------------------------------------*/
ARCH_DEP(translate_alet)299 _DAT_C_STATIC U16 ARCH_DEP(translate_alet) (U32 alet, U16 eax,
300 int acctype, REGS *regs, U32 *asteo, U32 aste[])
301 {
302 U32 cb; /* DUCT or PASTE address */
303 U32 ald; /* Access-list designation */
304 U32 alo; /* Access-list origin */
305 U32 all; /* Access-list length */
306 U32 ale[4]; /* Access-list entry */
307 U32 aste_addr; /* Real address of ASTE */
308 U32 abs; /* Absolute address */
309 BYTE *mn; /* Mainstor address */
310 int i; /* Array subscript */
311
312 regs->dat.protect = 0;
313
314 /* [5.8.4.3] Check the reserved bits in the ALET */
315 if ( alet & ALET_RESV )
316 goto alet_spec_excp;
317
318 /* [5.8.4.4] Obtain the effective access-list designation */
319
320 /* Obtain the real address of the control block containing
321 the effective access-list designation. This is either
322 the Primary ASTE or the DUCT */
323 cb = (alet & ALET_PRI_LIST) ?
324 regs->CR(5) & CR5_PASTEO :
325 regs->CR(2) & CR2_DUCTO;
326
327 /* Addressing exception if outside main storage */
328 if (cb > regs->mainlim)
329 goto alet_addr_excp;
330
331 /* Load the effective access-list designation (ALD) from
332 offset 16 in the control block. All four bytes must be
333 fetched concurrently as observed by other CPUs. Note
334 that the DUCT and the PASTE cannot cross a page boundary */
335 cb = APPLY_PREFIXING (cb, regs->PX);
336 ald = ARCH_DEP(fetch_fullword_absolute) (cb+16, regs);
337
338 /* [5.8.4.5] Access-list lookup */
339
340 /* Isolate the access-list origin and access-list length */
341 alo = ald & ALD_ALO;
342 all = ald & ALD_ALL;
343
344 /* Check that the ALEN does not exceed the ALL */
345 if (((alet & ALET_ALEN) >> ALD_ALL_SHIFT) > all)
346 goto alen_tran_excp;
347
348 /* Add the ALEN x 16 to the access list origin */
349 alo += (alet & ALET_ALEN) << 4;
350
351 /* Addressing exception if outside main storage */
352 if (alo > regs->mainlim)
353 goto alet_addr_excp;
354
355 /* Fetch the 16-byte access list entry from absolute storage.
356 Each fullword of the ALE must be fetched concurrently as
357 observed by other CPUs */
358 alo = APPLY_PREFIXING (alo, regs->PX);
359 mn = FETCH_MAIN_ABSOLUTE(alo, regs, 16);
360 for (i = 0; i < 4; i++)
361 {
362 ale[i] = fetch_fw (mn);
363 mn += 4;
364 }
365
366 /* Check the ALEN invalid bit in the ALE */
367 if (ale[0] & ALE0_INVALID)
368 goto alen_tran_excp;
369
370 /* For ordinary ART (but not for special ART),
371 compare the ALE sequence number with the ALET */
372 if (!(acctype & ACC_SPECIAL_ART)
373 && (ale[0] & ALE0_ALESN) != (alet & ALET_ALESN))
374 goto ale_seq_excp;
375
376 /* [5.8.4.6] Locate the ASN-second-table entry */
377 aste_addr = ale[2] & ALE2_ASTE;
378
379 /* Addressing exception if ASTE is outside main storage */
380 abs = APPLY_PREFIXING (aste_addr, regs->PX);
381 if (abs > regs->mainlim)
382 goto alet_addr_excp;
383 mn = FETCH_MAIN_ABSOLUTE(abs, regs, 64);
384
385 /* Fetch the 64-byte ASN second table entry from real storage.
386 Each fullword of the ASTE must be fetched concurrently as
387 observed by other CPUs. ASTE cannot cross a page boundary */
388 for (i = 0; i < 16; i++)
389 {
390 aste[i] = fetch_fw(mn);
391 mn += 4;
392 }
393
394 /* Check the ASX invalid bit in the ASTE */
395 if (aste[0] & ASTE0_INVALID)
396 goto aste_vald_excp;
397
398 /* Compare the ASTE sequence number with the ALE */
399 if ((aste[5] & ASTE5_ASTESN) != (ale[3] & ALE3_ASTESN))
400 goto aste_seq_excp;
401
402 /* [5.8.4.7] For ordinary ART (but not for special ART),
403 authorize the use of the access-list entry */
404 if (!(acctype & ACC_SPECIAL_ART))
405 {
406 /* If ALE private bit is zero, or the ALE AX equals the
407 EAX, then authorization succeeds. Otherwise perform
408 the extended authorization process. */
409 if ((ale[0] & ALE0_PRIVATE)
410 && (ale[0] & ALE0_ALEAX) != eax)
411 {
412 #if !defined(FEATURE_ESAME)
413 /* Check the reserved bits in first two words of ASTE */
414 if ((aste[0] & ASTE0_RESV) || (aste[1] & ASTE1_RESV)
415 || ((aste[0] & ASTE0_BASE)
416 #ifdef FEATURE_SUBSPACE_GROUP
417 && !ASF_ENABLED(regs)
418 #endif /*FEATURE_SUBSPACE_GROUP*/
419 ))
420 goto alet_asn_tran_spec_excp;
421 #endif /*!defined(FEATURE_ESAME)*/
422
423 /* Perform extended authorization */
424 if (ARCH_DEP(authorize_asn)(eax, aste, ATE_SECONDARY, regs) != 0)
425 goto ext_auth_excp;
426 }
427
428 } /* end if(!ACCTYPE_BSG) */
429
430 /* [5.8.4.8] Check for access-list controlled protection */
431 if (ale[0] & ALE0_FETCHONLY)
432 regs->dat.protect = 2;
433
434 /* Return the ASTE origin address */
435 *asteo = aste_addr;
436 return 0;
437
438 /* Conditions which always cause program check, except
439 when performing translation for the control panel */
440 alet_addr_excp:
441 regs->dat.xcode = PGM_ADDRESSING_EXCEPTION;
442 goto alet_prog_check;
443
444 #if !defined(FEATURE_ESAME)
445 alet_asn_tran_spec_excp:
446 regs->dat.xcode = PGM_ASN_TRANSLATION_SPECIFICATION_EXCEPTION;
447 goto alet_prog_check;
448 #endif /*!defined(FEATURE_ESAME)*/
449
450 alet_prog_check:
451 regs->program_interrupt (regs, regs->dat.xcode);
452
453 /* Conditions which the caller may or may not program check */
454 alet_spec_excp:
455 regs->dat.xcode = PGM_ALET_SPECIFICATION_EXCEPTION;
456 return regs->dat.xcode;
457
458 alen_tran_excp:
459 regs->dat.xcode = PGM_ALEN_TRANSLATION_EXCEPTION;
460 return regs->dat.xcode;
461
462 ale_seq_excp:
463 regs->dat.xcode = PGM_ALE_SEQUENCE_EXCEPTION;
464 return regs->dat.xcode;
465
466 aste_vald_excp:
467 regs->dat.xcode = PGM_ASTE_VALIDITY_EXCEPTION;
468 return regs->dat.xcode;
469
470 aste_seq_excp:
471 regs->dat.xcode = PGM_ASTE_SEQUENCE_EXCEPTION;
472 return regs->dat.xcode;
473
474 ext_auth_excp:
475 regs->dat.xcode = PGM_EXTENDED_AUTHORITY_EXCEPTION;
476 return regs->dat.xcode;
477
478 } /* end function translate_alet */
479 #endif /*defined(FEATURE_ACCESS_REGISTERS)*/
480
481
482 #if defined(FEATURE_ACCESS_REGISTERS)
483 /*-------------------------------------------------------------------*/
484 /* Purge the ART lookaside buffer */
485 /*-------------------------------------------------------------------*/
ARCH_DEP(purge_alb)486 _DAT_C_STATIC void ARCH_DEP(purge_alb) (REGS *regs)
487 {
488 int i;
489
490 for(i = 1; i < 16; i++)
491 if(regs->aea_ar[i] >= CR_ALB_OFFSET && regs->aea_ar[i] != CR_ASD_REAL)
492 regs->aea_ar[i] = 0;
493
494 if(regs->host && regs->guestregs)
495 for(i = 1; i < 16; i++)
496 if(regs->guestregs->aea_ar[i] >= CR_ALB_OFFSET && regs->guestregs->aea_ar[i] != CR_ASD_REAL)
497 regs->guestregs->aea_ar[i] = 0;
498
499 } /* end function purge_alb */
500
501 /*-------------------------------------------------------------------*/
502 /* Purge the ART lookaside buffer for all CPUs */
503 /*-------------------------------------------------------------------*/
ARCH_DEP(purge_alb_all)504 _DAT_C_STATIC void ARCH_DEP(purge_alb_all) ()
505 {
506 int i;
507
508 for (i = 0; i < MAX_CPU; i++)
509 if (IS_CPU_ONLINE(i)
510 && (sysblk.regs[i]->cpubit & sysblk.started_mask))
511 ARCH_DEP(purge_alb) (sysblk.regs[i]);
512
513 } /* end function purge_alb_all */
514 #endif /*defined(FEATURE_ACCESS_REGISTERS)*/
515
516
517 /*-------------------------------------------------------------------*/
518 /* Determine effective ASCE or STD */
519 /* */
520 /* This routine returns either an address-space control element */
521 /* (for ESAME) or a segment table descriptor (for S/370 and ESA/390) */
522 /* loaded from control register 1, 7, or 13, or computed from the */
523 /* contents of an address register, together with an indication of */
524 /* the addressing mode (home, primary, secondary, or AR mode) */
525 /* which was used to determine the source of the ASCE or STD. */
526 /* */
527 /* Input: */
528 /* arn Access register number (0-15) to be used if the */
529 /* address-space control (PSW bits 16-17) indicates */
530 /* that ARMODE is the current translation mode. */
531 /* An access register number ORed with the special */
532 /* value USE_ARMODE forces this routine to use ARMODE */
533 /* regardless of the PSW address-space control setting. */
534 /* Access register 0 is treated as if it contained 0 */
535 /* and its actual contents are not examined. */
536 /* Alternatively the arn parameter may contain one */
537 /* of these special values (defined in hconsts.h): */
538 /* USE_PRIMARY_SPACE, USE_SECONDARY_SPACE, */
539 /* USE_HOME_SPACE, USE_REAL_ADDR to force the use of */
540 /* a specific translation mode instead of the mode */
541 /* indicated by the address-space control in the PSW. */
542 /* regs Pointer to the CPU register context */
543 /* acctype Type of access requested: READ, WRITE, INSTFETCH, */
544 /* LRA, IVSK, TPROT, STACK, PTE, LPTEA */
545 /* */
546 /* Output: */
547 /* regs->dat.asd = the selected ASCE or STD */
548 /* regs->dat.stid = TEA_ST_PRIMARY, TEA_ST_SECNDRY, */
549 /* TEA_ST_HOME, or TEA_ST_ARMODE indicates which */
550 /* address space was used to select the ASCE or STD. */
551 /* regs->dat.protect = 2 if in AR mode and access-list */
552 /* controlled protection is indicated by the ALE */
553 /* fetch-only bit; otherwise it remains unchanged. */
554 /* */
555 /* If an ALET translation error occurs, the return value */
556 /* is the exception code; otherwise the return value is zero, */
557 /* regs->dat.asd field contains the ASCE or STD, and */
558 /* regs->dat.stid is set to TEA_ST_PRIMARY, TEA_ST_SECNDRY, */
559 /* TEA_ST_HOME, or TEA_ST_ARMODE. */
560 /*-------------------------------------------------------------------*/
ARCH_DEP(load_address_space_designator)561 _DAT_C_STATIC U16 ARCH_DEP(load_address_space_designator) (int arn,
562 REGS *regs, int acctype)
563 {
564 #if defined(FEATURE_ACCESS_REGISTERS)
565 U32 alet; /* Access list entry token */
566 U32 asteo; /* Real address of ASTE */
567 U32 aste[16]; /* ASN second table entry */
568 U16 eax; /* Authorization index */
569 #else
570 UNREFERENCED(acctype);
571 #endif /*defined(FEATURE_ACCESS_REGISTERS)*/
572
573 switch(arn) {
574
575 case USE_PRIMARY_SPACE:
576 regs->dat.stid = TEA_ST_PRIMARY;
577 regs->dat.asd = regs->CR(1);
578 break;
579
580 case USE_SECONDARY_SPACE:
581 regs->dat.stid = TEA_ST_SECNDRY;
582 regs->dat.asd = regs->CR(7);
583 break;
584
585 case USE_HOME_SPACE:
586 regs->dat.stid = TEA_ST_HOME;
587 regs->dat.asd = regs->CR(13);
588 break;
589
590 case USE_REAL_ADDR:
591 regs->dat.stid = 0;
592 regs->dat.asd = TLB_REAL_ASD;
593 break;
594
595 case USE_INST_SPACE:
596 switch(regs->aea_ar[USE_INST_SPACE]) {
597
598 case 1:
599 regs->dat.stid = TEA_ST_PRIMARY;
600 break;
601 #if defined(FEATURE_LINKAGE_STACK)
602 case 13:
603 regs->dat.stid = TEA_ST_HOME;
604 break;
605 #endif
606 default:
607 regs->dat.stid = 0;
608 } /* end switch(regs->aea_ar[USE_INST_SPACE]) */
609
610 regs->dat.asd = regs->CR(regs->aea_ar[USE_INST_SPACE]);
611 break;
612
613 default:
614
615 #if defined(FEATURE_ACCESS_REGISTERS)
616 if (ACCESS_REGISTER_MODE(®s->psw)
617 || (SIE_ACTIVE(regs) && MULTIPLE_CONTROLLED_DATA_SPACE(regs->guestregs))
618 || (arn & USE_ARMODE)
619 )
620 {
621 /* Remove flags giving access register number 0-15 */
622 arn &= 0xF;
623
624 /* [5.8.4.1] Select the access-list-entry token */
625 alet = (arn == 0) ? 0 :
626 /* Guest ALET if XC guest in AR mode */
627 (SIE_ACTIVE(regs) && MULTIPLE_CONTROLLED_DATA_SPACE(regs->guestregs))
628 ? regs->guestregs->AR(arn) :
629 /* If SIE host but not XC guest in AR mode then alet is 0 */
630 SIE_ACTIVE(regs) ? 0 :
631 /* Otherwise alet is in the access register */
632 regs->AR(arn);
633
634 /* Use the ALET to determine the segment table origin */
635 switch (alet) {
636
637 case ALET_PRIMARY:
638 /* [5.8.4.2] Obtain primary segment table designation */
639 regs->dat.stid = TEA_ST_PRIMARY;
640 regs->dat.asd = regs->CR(1);
641 break;
642
643 case ALET_SECONDARY:
644 /* [5.8.4.2] Obtain secondary segment table designation */
645 regs->dat.stid = TEA_ST_SECNDRY;
646 regs->dat.asd = regs->CR(7);
647 break;
648
649 default:
650 /* ALB Lookup */
651 if(regs->aea_ar[arn] >= CR_ALB_OFFSET && regs->aea_ar[arn] != CR_ASD_REAL)
652 {
653 regs->dat.asd = regs->CR(regs->aea_ar[arn]);
654 regs->dat.protect = regs->aea_aleprot[arn];
655 regs->dat.stid = TEA_ST_ARMODE;
656 }
657 else
658 {
659 /* Extract the extended AX from CR8 bits 0-15 (32-47) */
660 eax = regs->CR_LHH(8);
661
662 /* [5.8.4.3] Perform ALET translation to obtain ASTE */
663 if (ARCH_DEP(translate_alet) (alet, eax, acctype,
664 regs, &asteo, aste))
665 /* Exit if ALET translation error */
666 return regs->dat.xcode;
667
668 /* [5.8.4.9] Obtain the STD or ASCE from the ASTE */
669 regs->dat.asd = ASTE_AS_DESIGNATOR(aste);
670 regs->dat.stid = TEA_ST_ARMODE;
671 if(regs->dat.protect & 2)
672 {
673 #if defined(FEATURE_ESAME)
674 regs->dat.asd ^= ASCE_RESV;
675 regs->dat.asd |= ASCE_P;
676 #else
677 regs->dat.asd ^= STD_RESV;
678 regs->dat.asd |= STD_PRIVATE;
679 #endif
680 }
681
682 /* Update ALB */
683 regs->CR(CR_ALB_OFFSET + arn) = regs->dat.asd;
684 regs->aea_ar[arn] = CR_ALB_OFFSET + arn;
685 regs->aea_common[CR_ALB_OFFSET + arn] = (regs->dat.asd & ASD_PRIVATE) == 0;
686 regs->aea_aleprot[arn] = regs->dat.protect & 2;
687
688 }
689
690 } /* end switch(alet) */
691
692 break;
693
694 } /* end if(ACCESS_REGISTER_MODE) */
695 #endif /*defined(FEATURE_ACCESS_REGISTERS)*/
696
697 #if defined(FEATURE_DUAL_ADDRESS_SPACE)
698 if (SECONDARY_SPACE_MODE(®s->psw))
699 {
700 regs->dat.stid = TEA_ST_SECNDRY;
701 regs->dat.asd = regs->CR(7);
702 break;
703 }
704 #endif /* defined(FEATURE_DUAL_ADDRESS_SPACE) */
705
706 #if defined(FEATURE_LINKAGE_STACK)
707 if (HOME_SPACE_MODE(®s->psw))
708 {
709 regs->dat.stid = TEA_ST_HOME;
710 regs->dat.asd = regs->CR(13);
711 break;
712 }
713 #endif /* defined(FEATURE_LINKAGE_STACK) */
714
715 /* Primary space mode */
716 regs->dat.stid = TEA_ST_PRIMARY;
717 regs->dat.asd = regs->CR(1);
718 break;
719
720 } /* switch(arn) */
721
722 return 0;
723
724 } /* end function load_address_space_designator */
725
726
727 /*-------------------------------------------------------------------*/
728 /* Translate a virtual address to a real address */
729 /* */
730 /* Input: */
731 /* vaddr virtual address to be translated */
732 /* arn Access register number or special value (see */
733 /* load_address_space_designator function for a */
734 /* complete description of this parameter) */
735 /* regs Pointer to the CPU register context */
736 /* acctype Type of access requested: READ, WRITE, INSTFETCH, */
737 /* LRA, IVSK, TPROT, STACK, PTE, LPTEA */
738 /* */
739 /* Output: */
740 /* The return value is set to facilitate the setting of the */
741 /* condition code by the LRA instruction: */
742 /* 0 = Translation successful; real address field contains */
743 /* the real address corresponding to the virtual address */
744 /* supplied by the caller; exception code set to zero. */
745 /* 1 = Segment table entry invalid; real address field */
746 /* contains real address of segment table entry; */
747 /* exception code is set to X'0010'. */
748 /* 2 = Page table entry invalid; real address field contains */
749 /* real address of page table entry; exception code */
750 /* is set to X'0011'. */
751 /* 3 = Segment or page table length exceeded; real address */
752 /* field contains the real address of the entry that */
753 /* would have been fetched if length violation had not */
754 /* occurred; exception code is set to X'0010' or X'0011'. */
755 /* 4 = ALET translation error: real address field is not */
756 /* set; exception code is set to X'0028' through X'002D'. */
757 /* ASCE-type or region-translation error: real address */
758 /* is not set; exception code is X'0038' through X'003B'. */
759 /* The LRA instruction converts this to condition code 3. */
760 /* 5 = For ACCTYPE_EMC (Enhanced MC access only): */
761 /* A translation specification exception occured */
762 /* */
763 /* For ACCTYPE_LPTEA, the return value is set to facilitate */
764 /* setting the condition code by the LPTEA instruction: */
765 /* 0 = Page table entry found, and page protection bit in the */
766 /* segment table entry is zero; the real address field */
767 /* contains the real address of the page table entry; */
768 /* exception code is set to zero. */
769 /* 1 = Page table entry found, and page protection bit in the */
770 /* segment table entry is one; the real address field */
771 /* contains the real address of the page table entry; */
772 /* exception code is set to zero. */
773 /* 2 = Region table or segment table entry invalid bit is set; */
774 /* the real address field contains the real address of the */
775 /* region table entry or segment table entry, with the */
776 /* entry type in the low-order two bits of the address. */
777 /* 3 = Region table or segment table length exceeded; real */
778 /* address field is not set; exception code is set to */
779 /* X'0010' or X'0039' through X'003B'. */
780 /* ALET translation error: real address field is not */
781 /* set; exception code is set to X'0028' through X'002D'. */
782 /* ASCE-type error: real address is not set; exception */
783 /* exception code is X'0038'. */
784 /* */
785 /* regs->dat.raddr is set to the real address if translation */
786 /* was successful; otherwise it may contain the address of */
787 /* a page or segment table entry as described above. */
788 /* For ACCTYPE_PTE or ACCTYPE_LPTEA it contains the address of */
789 /* the page table entry if translation was successful. */
790 /* */
791 /* regs->dat.xcode is set to the exception code if translation */
792 /* was unsuccessful; otherwise it is set to zero. */
793 /* */
794 /* regs->dat.private is set to 1 if translation was */
795 /* successful and the STD indicates a private address space; */
796 /* otherwise it is set to zero. */
797 /* */
798 /* regs->dat.protect is set to 1 if translation was */
799 /* successful and page protection, segment protection, or */
800 /* segment controlled page protection is in effect; it is */
801 /* set to 2 if translation was successful and ALE controlled */
802 /* protection (but not page protection) is in effect; */
803 /* otherwise it is set to zero. */
804 /* */
805 /* regs->dat.stid is set to one of the following */
806 /* values TEA_ST_PRIMARY, TEA_ST_SECNDRY, TEA_ST_HOME, or */
807 /* TEA_ST_ARMODE if the translation was successful. This */
808 /* indication is used to set bits 30-31 of the translation */
809 /* exception address in the event of a protection exception */
810 /* when the suppression on protection facility is used. */
811 /* */
812 /* A program check may be generated for addressing and */
813 /* translation specification exceptions, in which case the */
814 /* function does not return. */
815 /*-------------------------------------------------------------------*/
ARCH_DEP(translate_addr)816 _DAT_C_STATIC int ARCH_DEP(translate_addr) (VADR vaddr, int arn,
817 REGS *regs, int acctype)
818 {
819 RADR sto = 0; /* Segment table origin */
820 RADR pto = 0; /* Page table origin */
821 int cc; /* Condition code */
822 int tlbix = TLBIX(vaddr); /* TLB entry index */
823
824 #if !defined(FEATURE_S390_DAT) && !defined(FEATURE_ESAME)
825 /*-----------------------------------*/
826 /* S/370 Dynamic Address Translation */
827 /*-----------------------------------*/
828 U32 stl; /* Segment table length */
829 RADR ste; /* Segment table entry */
830 U16 pte; /* Page table entry */
831 U32 ptl; /* Page table length */
832
833 regs->dat.private = regs->dat.protect = 0;
834
835 /* Load the effective segment table descriptor */
836 if (ARCH_DEP(load_address_space_designator) (arn, regs, acctype))
837 goto tran_alet_excp;
838
839 /* Check the translation format bits in CR0 */
840 if ((((regs->CR(0) & CR0_PAGE_SIZE) != CR0_PAGE_SZ_2K) &&
841 ((regs->CR(0) & CR0_PAGE_SIZE) != CR0_PAGE_SZ_4K)) ||
842 (((regs->CR(0) & CR0_SEG_SIZE) != CR0_SEG_SZ_64K) &&
843 ((regs->CR(0) & CR0_SEG_SIZE) != CR0_SEG_SZ_1M)))
844 goto tran_spec_excp;
845
846 /* Look up the address in the TLB */
847 if ( ((vaddr & TLBID_PAGEMASK) | regs->tlbID) == regs->tlb.TLB_VADDR(tlbix)
848 && (regs->tlb.common[tlbix] || regs->dat.asd == regs->tlb.TLB_ASD(tlbix))
849 && !(regs->tlb.common[tlbix] && regs->dat.private)
850 && !(acctype & ACC_NOTLB) )
851 {
852 pte = regs->tlb.TLB_PTE(tlbix);
853
854 #ifdef FEATURE_SEGMENT_PROTECTION
855 /* Set the protection indicator if segment is protected */
856 if (regs->tlb.protect[tlbix])
857 regs->dat.protect = regs->tlb.protect[tlbix];
858 #endif /*FEATURE_SEGMENT_PROTECTION*/
859 }
860 else
861 {
862 /* S/370 segment table lookup */
863
864 /* Calculate the real address of the segment table entry */
865 sto = regs->dat.asd & STD_370_STO;
866 stl = regs->dat.asd & STD_370_STL;
867 sto += ((regs->CR(0) & CR0_SEG_SIZE) == CR0_SEG_SZ_1M) ?
868 ((vaddr & 0x00F00000) >> 18) :
869 ((vaddr & 0x00FF0000) >> 14);
870
871 /* Check that virtual address is within the segment table */
872 if (((regs->CR(0) & CR0_SEG_SIZE) == CR0_SEG_SZ_64K) &&
873 ((vaddr << 4) & STD_370_STL) > stl)
874 goto seg_tran_length;
875
876 /* Generate addressing exception if outside real storage */
877 if (sto > regs->mainlim)
878 goto address_excp;
879
880 /* Fetch segment table entry from real storage. All bytes
881 must be fetched concurrently as observed by other CPUs */
882 sto = APPLY_PREFIXING (sto, regs->PX);
883 ste = ARCH_DEP(fetch_fullword_absolute) (sto, regs);
884
885 /* Generate segment translation exception if segment invalid */
886 if (ste & SEGTAB_370_INVL)
887 goto seg_tran_invalid;
888
889 /* Check that all the reserved bits in the STE are zero */
890 if (ste & SEGTAB_370_RSV)
891 goto tran_spec_excp;
892
893 /* Isolate page table origin and length */
894 pto = ste & SEGTAB_370_PTO;
895 ptl = ste & SEGTAB_370_PTL;
896
897 /* S/370 page table lookup */
898
899 /* Calculate the real address of the page table entry */
900 pto += ((regs->CR(0) & CR0_SEG_SIZE) == CR0_SEG_SZ_1M) ?
901 (((regs->CR(0) & CR0_PAGE_SIZE) == CR0_PAGE_SZ_4K) ?
902 ((vaddr & 0x000FF000) >> 11) :
903 ((vaddr & 0x000FF800) >> 10)) :
904 (((regs->CR(0) & CR0_PAGE_SIZE) == CR0_PAGE_SZ_4K) ?
905 ((vaddr & 0x0000F000) >> 11) :
906 ((vaddr & 0x0000F800) >> 10));
907
908 /* Generate addressing exception if outside real storage */
909 if (pto > regs->mainlim)
910 goto address_excp;
911
912 /* Check that the virtual address is within the page table */
913 if ((((regs->CR(0) & CR0_SEG_SIZE) == CR0_SEG_SZ_1M) &&
914 (((vaddr & 0x000F0000) >> 16) > ptl)) ||
915 (((regs->CR(0) & CR0_SEG_SIZE) == CR0_SEG_SZ_64K) &&
916 (((vaddr & 0x0000F000) >> 12) > ptl)))
917 goto page_tran_length;
918
919 /* Fetch the page table entry from real storage. All bytes
920 must be fetched concurrently as observed by other CPUs */
921 pto = APPLY_PREFIXING (pto, regs->PX);
922 pte = ARCH_DEP(fetch_halfword_absolute) (pto, regs);
923
924 /* Generate page translation exception if page invalid */
925 if ((((regs->CR(0) & CR0_PAGE_SIZE) == CR0_PAGE_SZ_4K) &&
926 (pte & PAGETAB_INV_4K)) ||
927 (((regs->CR(0) & CR0_PAGE_SIZE) == CR0_PAGE_SZ_2K) &&
928 (pte & PAGETAB_INV_2K)))
929 goto page_tran_invalid;
930
931 /* Check that all the reserved bits in the PTE are zero */
932 if (((regs->CR(0) & CR0_PAGE_SIZE) == CR0_PAGE_SZ_2K) &&
933 (pte & PAGETAB_RSV_2K))
934 goto tran_spec_excp;
935
936 #ifdef FEATURE_SEGMENT_PROTECTION
937 /* Set the protection indicator if segment is protected */
938 if (ste & SEGTAB_370_PROT)
939 regs->dat.protect |= 1;
940 #endif /*FEATURE_SEGMENT_PROTECTION*/
941
942 /* Place the translated address in the TLB */
943 if (!(acctype & ACC_NOTLB))
944 {
945 regs->tlb.TLB_ASD(tlbix) = regs->dat.asd;
946 regs->tlb.TLB_VADDR(tlbix) = (vaddr & TLBID_PAGEMASK) | regs->tlbID;
947 regs->tlb.TLB_PTE(tlbix) = pte;
948 regs->tlb.common[tlbix] = (ste & SEGTAB_370_CMN) ? 1 : 0;
949 regs->tlb.protect[tlbix] = regs->dat.protect;
950 regs->tlb.acc[tlbix] = 0;
951 regs->tlb.main[tlbix] = NULL;
952
953 /* Set adjacent TLB entry if 4K page sizes */
954 if ((regs->CR(0) & CR0_PAGE_SIZE) == CR0_PAGE_SZ_4K)
955 {
956 regs->tlb.TLB_ASD(tlbix^1) = regs->tlb.TLB_ASD(tlbix);
957 regs->tlb.TLB_VADDR(tlbix^1) = (vaddr & TLBID_PAGEMASK) | regs->tlbID;
958 regs->tlb.TLB_PTE(tlbix^1) = regs->tlb.TLB_PTE(tlbix);
959 regs->tlb.common[tlbix^1] = regs->tlb.common[tlbix];
960 regs->tlb.protect[tlbix^1] = regs->tlb.protect[tlbix];
961 regs->tlb.acc[tlbix^1] = 0;
962 regs->tlb.main[tlbix^1] = NULL;
963 }
964 }
965 } /* end if(!TLB) */
966
967 /* Combine the page frame real address with the byte
968 index of the virtual address to form the real address */
969 regs->dat.raddr = ((regs->CR(0) & CR0_PAGE_SIZE) == CR0_PAGE_SZ_4K) ?
970 #if defined(FEATURE_S370E_EXTENDED_ADDRESSING)
971 (((U32)pte & PAGETAB_EA_4K) << 23) |
972 #endif
973 (((U32)pte & PAGETAB_PFRA_4K) << 8) | (vaddr & 0xFFF) :
974 (((U32)pte & PAGETAB_PFRA_2K) << 8) | (vaddr & 0x7FF);
975
976 regs->dat.rpfra = regs->dat.raddr & PAGEFRAME_PAGEMASK;
977 #endif /*!defined(FEATURE_S390_DAT) && !defined(FEATURE_ESAME)*/
978
979 #if defined(FEATURE_S390_DAT)
980 /*-----------------------------------*/
981 /* S/390 Dynamic Address Translation */
982 /*-----------------------------------*/
983 U32 stl; /* Segment table length */
984 RADR ste; /* Segment table entry */
985 RADR pte; /* Page table entry */
986 U32 ptl; /* Page table length */
987
988 regs->dat.private = regs->dat.protect = 0;
989
990 /* [3.11.3.1] Load the effective segment table descriptor */
991 if (ARCH_DEP(load_address_space_designator) (arn, regs, acctype))
992 goto tran_alet_excp;
993
994 /* [3.11.3.2] Check the translation format bits in CR0 */
995 if ((regs->CR(0) & CR0_TRAN_FMT) != CR0_TRAN_ESA390)
996 goto tran_spec_excp;
997
998 /* Extract the private space bit from segment table descriptor */
999 regs->dat.private = ((regs->dat.asd & STD_PRIVATE) != 0);
1000
1001 /* [3.11.4] Look up the address in the TLB */
1002 if ( ((vaddr & TLBID_PAGEMASK) | regs->tlbID) == regs->tlb.TLB_VADDR(tlbix)
1003 && (regs->tlb.common[tlbix] || regs->dat.asd == regs->tlb.TLB_ASD(tlbix))
1004 && !(regs->tlb.common[tlbix] && regs->dat.private)
1005 && !(acctype & ACC_NOTLB) )
1006 {
1007 pte = regs->tlb.TLB_PTE(tlbix);
1008 if (regs->tlb.protect[tlbix])
1009 regs->dat.protect = regs->tlb.protect[tlbix];
1010 }
1011 else
1012 {
1013 /* [3.11.3.3] Segment table lookup */
1014
1015 /* Calculate the real address of the segment table entry */
1016 sto = regs->dat.asd & STD_STO;
1017 stl = regs->dat.asd & STD_STL;
1018 sto += (vaddr & 0x7FF00000) >> 18;
1019
1020 /* Check that virtual address is within the segment table */
1021 if ((vaddr >> 24) > stl)
1022 goto seg_tran_length;
1023
1024 /* Generate addressing exception if outside real storage */
1025 if (sto > regs->mainlim)
1026 goto address_excp;
1027
1028 /* Fetch segment table entry from real storage. All bytes
1029 must be fetched concurrently as observed by other CPUs */
1030 sto = APPLY_PREFIXING (sto, regs->PX);
1031 ste = ARCH_DEP(fetch_fullword_absolute) (sto, regs);
1032
1033 /* Generate segment translation exception if segment invalid */
1034 if (ste & SEGTAB_INVALID)
1035 goto seg_tran_invalid;
1036
1037 /* Check that all the reserved bits in the STE are zero */
1038 if (ste & SEGTAB_RESV)
1039 goto tran_spec_excp;
1040
1041 /* If the segment table origin register indicates a private
1042 address space then STE must not indicate a common segment */
1043 if (regs->dat.private && (ste & (SEGTAB_COMMON)))
1044 goto tran_spec_excp;
1045
1046 /* Isolate page table origin and length */
1047 pto = ste & SEGTAB_PTO;
1048 ptl = ste & SEGTAB_PTL;
1049
1050 /* [3.11.3.4] Page table lookup */
1051
1052 /* Calculate the real address of the page table entry */
1053 pto += (vaddr & 0x000FF000) >> 10;
1054
1055 /* Check that the virtual address is within the page table */
1056 if (((vaddr & 0x000FF000) >> 16) > ptl)
1057 goto page_tran_length;
1058
1059 /* Generate addressing exception if outside real storage */
1060 if (pto > regs->mainlim)
1061 goto address_excp;
1062
1063 /* Fetch the page table entry from real storage. All bytes
1064 must be fetched concurrently as observed by other CPUs */
1065 pto = APPLY_PREFIXING (pto, regs->PX);
1066 pte = ARCH_DEP(fetch_fullword_absolute) (pto, regs);
1067
1068 /* Generate page translation exception if page invalid */
1069 if (pte & PAGETAB_INVALID)
1070 goto page_tran_invalid;
1071
1072 /* Check that all the reserved bits in the PTE are zero */
1073 if (pte & PAGETAB_RESV)
1074 goto tran_spec_excp;
1075
1076 /* Set the protection indicator if page protection is active */
1077 if (pte & PAGETAB_PROT)
1078 regs->dat.protect |= 1;
1079
1080 /* [3.11.4.2] Place the translated address in the TLB */
1081 if (!(acctype & ACC_NOTLB))
1082 {
1083 regs->tlb.TLB_ASD(tlbix) = regs->dat.asd;
1084 regs->tlb.TLB_VADDR(tlbix) = (vaddr & TLBID_PAGEMASK) | regs->tlbID;
1085 regs->tlb.TLB_PTE(tlbix) = pte;
1086 regs->tlb.common[tlbix] = (ste & SEGTAB_COMMON) ? 1 : 0;
1087 regs->tlb.acc[tlbix] = 0;
1088 regs->tlb.protect[tlbix] = regs->dat.protect;
1089 regs->tlb.main[tlbix] = NULL;
1090 }
1091 } /* end if(!TLB) */
1092
1093 if(!(acctype & ACC_PTE))
1094 {
1095 /* [3.11.3.5] Combine the page frame real address with the byte
1096 index of the virtual address to form the real address */
1097 regs->dat.raddr = (pte & PAGETAB_PFRA) | (vaddr & 0xFFF);
1098 regs->dat.rpfra = (pte & PAGETAB_PFRA);
1099 }
1100 else
1101 /* In the case of lock page, return the address of the
1102 pagetable entry */
1103 regs->dat.raddr = pto;
1104
1105 #endif /*defined(FEATURE_S390_DAT)*/
1106
1107 #if defined(FEATURE_ESAME)
1108 /*-----------------------------------*/
1109 /* ESAME Dynamic Address Translation */
1110 /*-----------------------------------*/
1111 RADR rte; /* Region table entry */
1112 #define rto sto /* Region/seg table origin */
1113 RADR ste = 0; /* Segment table entry */
1114 RADR pte = 0; /* Page table entry */
1115 BYTE tt; /* Table type */
1116 BYTE tl; /* Table length */
1117 BYTE tf; /* Table offset */
1118 U16 rfx, rsx, rtx; /* Region first/second/third
1119 index + 3 low-order zeros */
1120 U16 sx, px; /* Segment and page index,
1121 + 3 low-order zero bits */
1122
1123 regs->dat.private = regs->dat.protect = 0;
1124
1125 /* Load the address space control element */
1126 if (ARCH_DEP(load_address_space_designator) (arn, regs, acctype))
1127 goto tran_alet_excp;
1128
1129 /* Extract the private space bit from the ASCE */
1130 regs->dat.private = ((regs->dat.asd & (ASCE_P|ASCE_R)) != 0);
1131
1132 // logmsg("asce=%16.16" I64_FMT "X\n",regs->dat.asd);
1133
1134 /* [3.11.4] Look up the address in the TLB */
1135 if ( ((vaddr & TLBID_PAGEMASK) | regs->tlbID) == regs->tlb.TLB_VADDR(tlbix)
1136 && (regs->tlb.common[tlbix] || regs->dat.asd == regs->tlb.TLB_ASD(tlbix))
1137 && !(regs->tlb.common[tlbix] && regs->dat.private)
1138 && !(acctype & ACC_NOTLB) )
1139 {
1140 pte = regs->tlb.TLB_PTE(tlbix);
1141 if (regs->tlb.protect[tlbix])
1142 regs->dat.protect = regs->tlb.protect[tlbix];
1143 }
1144 else
1145 {
1146 /* If ASCE indicates a real-space then real addr = virtual addr */
1147 if (regs->dat.asd & ASCE_R)
1148 {
1149 // logmsg("asce type = real\n");
1150
1151 /* Translation specification exception if LKPG for a real-space */
1152 if(acctype & ACC_PTE)
1153 goto tran_spec_excp;
1154
1155 /* Special operation exception if LPTEA for a real-space */
1156 if(acctype & ACC_LPTEA)
1157 goto spec_oper_excp;
1158
1159 /* Construct a fake page table entry for real = virtual */
1160 pte = vaddr & 0xFFFFFFFFFFFFF000ULL;
1161 }
1162 else
1163 {
1164 /* Extract the table origin, type, and length from the ASCE,
1165 and set the table offset to zero */
1166 rto = regs->dat.asd & ASCE_TO;
1167 tf = 0;
1168 tt = regs->dat.asd & ASCE_DT;
1169 tl = regs->dat.asd & ASCE_TL;
1170
1171 /* Extract the 11-bit region first index, region second index,
1172 and region third index from the virtual address, and shift
1173 each index into bits 2-12 of a 16-bit integer, ready for
1174 addition to the appropriate region table origin */
1175 rfx = (vaddr >> 50) & 0x3FF8;
1176 rsx = (vaddr >> 39) & 0x3FF8;
1177 rtx = (vaddr >> 28) & 0x3FF8;
1178
1179 /* Extract the 11-bit segment index from the virtual address,
1180 and shift it into bits 2-12 of a 16-bit integer, ready
1181 for addition to the segment table origin */
1182 sx = (vaddr >> 17) & 0x3FF8;
1183
1184 /* Extract the 8-bit page index from the virtual address,
1185 and shift it into bits 2-12 of a 16-bit integer, ready
1186 for addition to the page table origin */
1187 px = (vaddr >> 9) & 0x07F8;
1188
1189 /* ASCE-type exception if the virtual address is too large
1190 for the table type designated by the ASCE */
1191 if ((rfx != 0 && tt < TT_R1TABL)
1192 || (rsx != 0 && tt < TT_R2TABL)
1193 || (rtx != 0 && tt < TT_R3TABL))
1194 goto asce_type_excp;
1195
1196 /* Perform region translation */
1197 switch (tt) {
1198
1199 /* Perform region-first translation */
1200 case TT_R1TABL:
1201
1202 /* Region-first translation exception if table length is
1203 less than high-order 2 bits of region-first index */
1204 if (tl < (rfx >> 12))
1205 goto reg_first_excp;
1206
1207 /* Add the region-first index (with three low-order zeroes)
1208 to the region-first table origin, giving the address of
1209 the region-first table entry */
1210 rto += rfx;
1211
1212 /* Addressing exception if outside main storage */
1213 if (rto > regs->mainlim)
1214 goto address_excp;
1215
1216 /* Fetch region-first table entry from absolute storage.
1217 All bytes must be fetched concurrently as observed by
1218 other CPUs */
1219 rte = ARCH_DEP(fetch_doubleword_absolute) (rto, regs);
1220 // logmsg("r1te:%16.16" I64_FMT "X=>%16.16" I64_FMT "X\n",rto,rte);
1221
1222 /* Region-first translation exception if the bit 58 of
1223 the region-first table entry is set (region invalid) */
1224 if (rte & REGTAB_I)
1225 goto reg_first_invalid;
1226
1227 /* Translation specification exception if bits 60-61 of
1228 the region-first table entry do not indicate the
1229 correct type of region table */
1230 if ((rte & REGTAB_TT) != TT_R1TABL)
1231 goto tran_spec_excp;
1232
1233 #if defined(FEATURE_ENHANCED_DAT_FACILITY)
1234 if ((regs->CR_L(0) & CR0_ED)
1235 && (rte & REGTAB_P))
1236 regs->dat.protect |= 1;
1237 #endif /*defined(FEATURE_ENHANCED_DAT_FACILITY)*/
1238
1239 /* Extract the region-second table origin, offset, and
1240 length from the region-first table entry */
1241 rto = rte & REGTAB_TO;
1242 tf = (rte & REGTAB_TF) >> 6;
1243 tl = rte & REGTAB_TL;
1244
1245 /* Fall through to perform region-second translation */
1246
1247 /* Perform region-second translation */
1248 case TT_R2TABL:
1249
1250 /* Region-second translation exception if table offset is
1251 greater than high-order 2 bits of region-second index */
1252 if (tf > (rsx >> 12))
1253 goto reg_second_excp;
1254
1255 /* Region-second translation exception if table length is
1256 less than high-order 2 bits of region-second index */
1257 if (tl < (rsx >> 12))
1258 goto reg_second_excp;
1259
1260 /* Add the region-second index (with three low-order zeroes)
1261 to the region-second table origin, giving the address of
1262 the region-second table entry */
1263 rto += rsx;
1264
1265 /* Addressing exception if outside main storage */
1266 if (rto > regs->mainlim)
1267 goto address_excp;
1268
1269 /* Fetch region-second table entry from absolute storage.
1270 All bytes must be fetched concurrently as observed by
1271 other CPUs */
1272 rte = ARCH_DEP(fetch_doubleword_absolute) (rto, regs);
1273 // logmsg("r2te:%16.16" I64_FMT "X=>%16.16" I64_FMT "X\n",rto,rte);
1274
1275 /* Region-second translation exception if the bit 58 of
1276 the region-second table entry is set (region invalid) */
1277 if (rte & REGTAB_I)
1278 goto reg_second_invalid;
1279
1280 /* Translation specification exception if bits 60-61 of
1281 the region-second table entry do not indicate the
1282 correct type of region table */
1283 if ((rte & REGTAB_TT) != TT_R2TABL)
1284 goto tran_spec_excp;
1285
1286 #if defined(FEATURE_ENHANCED_DAT_FACILITY)
1287 if ((regs->CR_L(0) & CR0_ED)
1288 && (rte & REGTAB_P))
1289 regs->dat.protect |= 1;
1290 #endif /*defined(FEATURE_ENHANCED_DAT_FACILITY)*/
1291
1292 /* Extract the region-third table origin, offset, and
1293 length from the region-second table entry */
1294 rto = rte & REGTAB_TO;
1295 tf = (rte & REGTAB_TF) >> 6;
1296 tl = rte & REGTAB_TL;
1297
1298 /* Fall through to perform region-third translation */
1299
1300 /* Perform region-third translation */
1301 case TT_R3TABL:
1302
1303 /* Region-third translation exception if table offset is
1304 greater than high-order 2 bits of region-third index */
1305 if (tf > (rtx >> 12))
1306 goto reg_third_excp;
1307
1308 /* Region-third translation exception if table length is
1309 less than high-order 2 bits of region-third index */
1310 if (tl < (rtx >> 12))
1311 goto reg_third_excp;
1312
1313 /* Add the region-third index (with three low-order zeroes)
1314 to the region-third table origin, giving the address of
1315 the region-third table entry */
1316 rto += rtx;
1317
1318 /* Addressing exception if outside main storage */
1319 if (rto > regs->mainlim)
1320 goto address_excp;
1321
1322 /* Fetch region-third table entry from absolute storage.
1323 All bytes must be fetched concurrently as observed by
1324 other CPUs */
1325 rte = ARCH_DEP(fetch_doubleword_absolute) (rto, regs);
1326 // logmsg("r3te:%16.16" I64_FMT "X=>%16.16" I64_FMT "X\n",rto,rte);
1327
1328 /* Region-third translation exception if the bit 58 of
1329 the region-third table entry is set (region invalid) */
1330 if (rte & REGTAB_I)
1331 goto reg_third_invalid;
1332
1333 /* Translation specification exception if bits 60-61 of
1334 the region-third table entry do not indicate the
1335 correct type of region table */
1336 if ((rte & REGTAB_TT) != TT_R3TABL)
1337 goto tran_spec_excp;
1338
1339 #if defined(FEATURE_ENHANCED_DAT_FACILITY_2)
1340 if ((regs->CR_L(0) & CR0_ED))
1341 {
1342 /* Translation specification exception if the ASCE
1343 indicates a private space, and the region third
1344 table entry indicates a common region */
1345 if (regs->dat.private && (rte & REGTAB_CR))
1346 goto tran_spec_excp;
1347 }
1348 #endif /*defined(FEATURE_ENHANCED_DAT_FACILITY_2)*/
1349
1350 #if defined(FEATURE_ENHANCED_DAT_FACILITY)
1351 if ((regs->CR_L(0) & CR0_ED)
1352 && (rte & REGTAB_P))
1353 regs->dat.protect |= 1;
1354 #endif /*defined(FEATURE_ENHANCED_DAT_FACILITY)*/
1355
1356 #if defined(FEATURE_ENHANCED_DAT_FACILITY_2)
1357 if ((regs->CR_L(0) & CR0_ED)
1358 && (rte & REGTAB_FC))
1359 {
1360 /* For LPTEA instruction, return the address of the RTTE */
1361 if (unlikely(acctype & ACC_LPTEA))
1362 {
1363 regs->dat.raddr = rto | (regs->dat.protect ? 0x04 : 0);
1364 // logmsg("raddr:%16.16" I64_FMT "X cc=2\n",regs->dat.raddr);
1365 regs->dat.xcode = 0;
1366 cc = 2;
1367 return cc;
1368 } /* end if(ACCTYPE_LPTEA) */
1369
1370 /* Combine the region frame absolute address with the byte index
1371 of the virtual address to form the absolute address */
1372 regs->dat.raddr = (rte & REGTAB_RFAA) | (vaddr & ~REGTAB_RFAA);
1373 /* Fake 4K PFRA for TLB purposes */
1374 regs->dat.rpfra = ((rte & REGTAB_RFAA) | (vaddr & ~REGTAB_RFAA)) & PAGEFRAME_PAGEMASK;
1375
1376 // logmsg("raddr:%16.16" I64_FMT "X cc=0\n",regs->dat.raddr);
1377
1378 /* [3.11.4.2] Place the translated address in the TLB */
1379 if (!(acctype & ACC_NOTLB))
1380 {
1381 regs->tlb.TLB_ASD(tlbix) = regs->dat.asd;
1382 regs->tlb.TLB_VADDR(tlbix) = (vaddr & TLBID_PAGEMASK) | regs->tlbID;
1383 /* Fake 4K PTE for TLB purposes */
1384 regs->tlb.TLB_PTE(tlbix) = ((rte & REGTAB_RFAA) | (vaddr & ~REGTAB_RFAA)) & PAGEFRAME_PAGEMASK;
1385 regs->tlb.common[tlbix] = (rte & REGTAB_CR) ? 1 : 0;
1386 regs->tlb.protect[tlbix] = regs->dat.protect;
1387 regs->tlb.acc[tlbix] = 0;
1388 regs->tlb.main[tlbix] = NULL;
1389 }
1390
1391 /* Clear exception code and return with zero return code */
1392 regs->dat.xcode = 0;
1393 return 0;
1394 }
1395 #endif /*defined(FEATURE_ENHANCED_DAT_FACILITY_2)*/
1396
1397 /* Extract the segment table origin, offset, and
1398 length from the region-third table entry */
1399 sto = rte & REGTAB_TO;
1400 tf = (rte & REGTAB_TF) >> 6;
1401 tl = rte & REGTAB_TL;
1402
1403 /* Fall through to perform segment translation */
1404 } /* end switch(tt) */
1405
1406 /* Perform ESAME segment translation */
1407
1408 /* Add the segment index (with three low-order zeroes)
1409 to the segment table origin, giving the address of
1410 the segment table entry */
1411 sto += sx;
1412
1413 /* Segment translation exception if table offset is
1414 greater than high-order 2 bits of segment index */
1415 if (tf > (sx >> 12))
1416 goto seg_tran_length;
1417
1418 /* Segment translation exception if table length is
1419 less than high-order 2 bits of segment index */
1420 if (tl < (sx >> 12))
1421 goto seg_tran_length;
1422
1423 /* Addressing exception if outside real storage */
1424 if (sto > regs->mainlim)
1425 goto address_excp;
1426
1427 /* Fetch segment table entry from absolute storage. All bytes
1428 must be fetched concurrently as observed by other CPUs */
1429 ste = ARCH_DEP(fetch_doubleword_absolute) (sto, regs);
1430 // logmsg("ste:%16.16" I64_FMT "X=>%16.16" I64_FMT "X\n",sto,ste);
1431
1432 /* Segment translation exception if segment invalid */
1433 if (ste & ZSEGTAB_I)
1434 goto seg_tran_invalid;
1435
1436 /* Translation specification exception if bits 60-61 of
1437 the segment table entry do not indicate segment table */
1438 if ((ste & ZSEGTAB_TT) != TT_SEGTAB)
1439 goto tran_spec_excp;
1440
1441 /* Translation specification exception if the ASCE
1442 indicates a private space, and the segment table
1443 entry indicates a common segment */
1444 if (regs->dat.private && (ste & ZSEGTAB_C))
1445 goto tran_spec_excp;
1446
1447 #if defined(FEATURE_ENHANCED_DAT_FACILITY)
1448 if ((regs->CR_L(0) & CR0_ED)
1449 && (ste & ZSEGTAB_FC))
1450 {
1451
1452 /* Set protection indicator if page protection is indicated */
1453 if (ste & ZSEGTAB_P)
1454 regs->dat.protect |= 1;
1455
1456 /* For LPTEA instruction, return the address of the STE */
1457 if (unlikely(acctype & ACC_LPTEA))
1458 {
1459 regs->dat.raddr = sto | (regs->dat.protect ? 0x04 : 0);
1460 // logmsg("raddr:%16.16" I64_FMT "X cc=2\n",regs->dat.raddr);
1461 regs->dat.xcode = 0;
1462 cc = 2;
1463 return cc;
1464 } /* end if(ACCTYPE_LPTEA) */
1465
1466 /* Combine the page frame real address with the byte index
1467 of the virtual address to form the real address */
1468 regs->dat.raddr = (ste & ZSEGTAB_SFAA) | (vaddr & ~ZSEGTAB_SFAA);
1469 /* Fake 4K PFRA for TLB purposes */
1470 regs->dat.rpfra = ((ste & ZSEGTAB_SFAA) | (vaddr & ~ZSEGTAB_SFAA)) & PAGEFRAME_PAGEMASK;
1471
1472 // logmsg("raddr:%16.16" I64_FMT "X cc=0\n",regs->dat.raddr);
1473
1474 /* [3.11.4.2] Place the translated address in the TLB */
1475 if (!(acctype & ACC_NOTLB))
1476 {
1477 regs->tlb.TLB_ASD(tlbix) = regs->dat.asd;
1478 regs->tlb.TLB_VADDR(tlbix) = (vaddr & TLBID_PAGEMASK) | regs->tlbID;
1479 /* Fake 4K PTE for TLB purposes */
1480 regs->tlb.TLB_PTE(tlbix) = ((ste & ZSEGTAB_SFAA) | (vaddr & ~ZSEGTAB_SFAA)) & PAGEFRAME_PAGEMASK;
1481 regs->tlb.common[tlbix] = (ste & SEGTAB_COMMON) ? 1 : 0;
1482 regs->tlb.protect[tlbix] = regs->dat.protect;
1483 regs->tlb.acc[tlbix] = 0;
1484 regs->tlb.main[tlbix] = NULL;
1485 }
1486
1487 /* Clear exception code and return with zero return code */
1488 regs->dat.xcode = 0;
1489 return 0;
1490
1491 }
1492 #endif /*defined(FEATURE_ENHANCED_DAT_FACILITY)*/
1493
1494 /* Extract the page table origin from segment table entry */
1495 pto = ste & ZSEGTAB_PTO;
1496
1497 /* Perform ESAME page translation */
1498
1499 /* Add the page index (with three low-order zeroes) to the
1500 page table origin, giving address of page table entry */
1501 pto += px;
1502
1503 /* For LPTEA instruction, return the address of the PTE */
1504 if (acctype & ACC_LPTEA)
1505 {
1506 regs->dat.raddr = pto;
1507 regs->dat.xcode = 0;
1508 cc = (ste & ZSEGTAB_P) ? 1 : 0;
1509 #if defined(FEATURE_ENHANCED_DAT_FACILITY)
1510 if ((regs->CR_L(0) & CR0_ED)
1511 && regs->dat.protect)
1512 cc = 1;
1513 #endif /*defined(FEATURE_ENHANCED_DAT_FACILITY)*/
1514 return cc;
1515 } /* end if(ACCTYPE_LPTEA) */
1516
1517 /* Addressing exception if outside real storage */
1518 if (pto > regs->mainlim)
1519 goto address_excp;
1520
1521 /* Fetch the page table entry from absolute storage. All bytes
1522 must be fetched concurrently as observed by other CPUs */
1523 pte = ARCH_DEP(fetch_doubleword_absolute) (pto, regs);
1524 // logmsg("pte:%16.16" I64_FMT "X=>%16.16" I64_FMT "X\n",pto,pte);
1525
1526 /* Page translation exception if page invalid */
1527 if (pte & ZPGETAB_I)
1528 goto page_tran_invalid;
1529
1530 /* Check that all the reserved bits in the PTE are zero */
1531 if (pte & ZPGETAB_RESV)
1532 goto tran_spec_excp;
1533
1534 } /* end else(ASCE_R) */
1535
1536 /* Set protection indicator if page protection is indicated
1537 in either the segment table or the page table */
1538 if ((ste & ZSEGTAB_P) || (pte & ZPGETAB_P))
1539 regs->dat.protect |= 1;
1540
1541 /* [3.11.4.2] Place the translated address in the TLB */
1542 if (!(acctype & ACC_NOTLB))
1543 {
1544 regs->tlb.TLB_ASD(tlbix) = regs->dat.asd;
1545 regs->tlb.TLB_VADDR(tlbix) = (vaddr & TLBID_PAGEMASK) | regs->tlbID;
1546 regs->tlb.TLB_PTE(tlbix) = pte;
1547 regs->tlb.common[tlbix] = (ste & SEGTAB_COMMON) ? 1 : 0;
1548 regs->tlb.protect[tlbix] = regs->dat.protect;
1549 regs->tlb.acc[tlbix] = 0;
1550 regs->tlb.main[tlbix] = NULL;
1551 }
1552 }
1553
1554 if(!(acctype & ACC_PTE))
1555 {
1556 /* Combine the page frame real address with the byte index
1557 of the virtual address to form the real address */
1558 regs->dat.raddr = (pte & ZPGETAB_PFRA) | (vaddr & 0xFFF);
1559 regs->dat.rpfra = (pte & ZPGETAB_PFRA);
1560 }
1561 else
1562 regs->dat.raddr = pto;
1563 #endif /*defined(FEATURE_ESAME)*/
1564
1565 /* The following code is common to S/370, ESA/390, and ESAME */
1566
1567 /* Clear exception code and return with zero return code */
1568 regs->dat.xcode = 0;
1569 return 0;
1570
1571 /* Conditions which always cause program check, except
1572 when performing translation for the control panel */
1573 address_excp:
1574 // logmsg("dat.c: addressing exception: %8.8X %8.8X %4.4X %8.8X\n",
1575 // regs->CR(0),regs->dat.asd,pte,vaddr);
1576 regs->dat.xcode = PGM_ADDRESSING_EXCEPTION;
1577 goto tran_prog_check;
1578
1579 tran_spec_excp:
1580 #if defined(FEATURE_ESAME)
1581 // logmsg("dat.c: translation specification exception...\n");
1582 // logmsg(" pte = %16.16" I64_FMT "X, ste = %16.16" I64_FMT "X, rte=%16.16" I64_FMT "X\n",
1583 // pte, ste, rte);
1584 #else
1585 // logmsg("dat.c: translation specification exception...\n");
1586 // logmsg(" cr0=%8.8X ste=%8.8X pte=%4.4X vaddr=%8.8X\n",
1587 // regs->CR(0),ste,pte,vaddr);
1588 #endif
1589 regs->dat.xcode = PGM_TRANSLATION_SPECIFICATION_EXCEPTION;
1590 goto tran_prog_check;
1591
1592 #if defined(FEATURE_ESAME)
1593 spec_oper_excp:
1594 regs->dat.xcode = PGM_SPECIAL_OPERATION_EXCEPTION;
1595 goto tran_prog_check;
1596 #endif /*defined(FEATURE_ESAME)*/
1597
1598 tran_prog_check:
1599 #if defined(FEATURE_ENHANCED_MONITOR_FACILITY)
1600 /* No program interrupt for enhanced MC */
1601 if(acctype & ACC_ENH_MC)
1602 {
1603 cc = 5;
1604 return cc;
1605 }
1606 #endif /*defined(FEATURE_ENHANCED_MONITOR_FACILITY)*/
1607 regs->program_interrupt (regs, regs->dat.xcode);
1608
1609 /* Conditions which the caller may or may not program check */
1610 seg_tran_invalid:
1611 /* For LPTEA, return segment table entry address with cc 2 */
1612 if (acctype & ACC_LPTEA)
1613 {
1614 regs->dat.raddr = sto;
1615 cc = 2;
1616 return cc;
1617 } /* end if(ACCTYPE_LPTEA) */
1618
1619 /* Otherwise set translation exception code */
1620 regs->dat.xcode = PGM_SEGMENT_TRANSLATION_EXCEPTION;
1621 regs->dat.raddr = sto;
1622 cc = 1;
1623 goto tran_excp_addr;
1624
1625 page_tran_invalid:
1626 regs->dat.xcode = PGM_PAGE_TRANSLATION_EXCEPTION;
1627 regs->dat.raddr = pto;
1628 if(acctype & ACC_PTE) return 0;
1629 cc = 2;
1630 goto tran_excp_addr;
1631
1632 #if !defined(FEATURE_ESAME)
1633 page_tran_length:
1634 regs->dat.xcode = PGM_PAGE_TRANSLATION_EXCEPTION;
1635 regs->dat.raddr = pto;
1636 cc = 3;
1637 goto tran_excp_addr;
1638 #endif /*!defined(FEATURE_ESAME)*/
1639
1640 seg_tran_length:
1641 // logmsg("dat.c: segment translation exception due to segment length\n");
1642 // logmsg(" cr0=" F_RADR " sto=" F_RADR "\n",regs->CR(0),sto);
1643 regs->dat.xcode = PGM_SEGMENT_TRANSLATION_EXCEPTION;
1644 regs->dat.raddr = sto;
1645 cc = 3;
1646 goto tran_excp_addr;
1647
1648 tran_alet_excp:
1649 regs->excarid = arn;
1650 cc = (acctype & ACC_LPTEA) ? 3 : 4;
1651 return cc;
1652
1653 #if defined(FEATURE_ESAME)
1654 reg_first_invalid:
1655 /* For LPTEA, return region table entry address with cc 2 */
1656 if (acctype & ACC_LPTEA)
1657 {
1658 regs->dat.raddr = rto | (TT_R1TABL >> 2);
1659 cc = 2;
1660 return cc;
1661 } /* end if(ACCTYPE_LPTEA) */
1662
1663 /* Otherwise set translation exception code */
1664 goto reg_first_excp;
1665
1666 reg_second_invalid:
1667 /* For LPTEA, return region table entry address with cc 2 */
1668 if (acctype & ACC_LPTEA)
1669 {
1670 regs->dat.raddr = rto | (TT_R2TABL >> 2);
1671 cc = 2;
1672 return cc;
1673 } /* end if(ACCTYPE_LPTEA) */
1674
1675 /* Otherwise set translation exception code */
1676 goto reg_second_excp;
1677
1678 reg_third_invalid:
1679 /* For LPTEA, return region table entry address with cc 2 */
1680 if (acctype & ACC_LPTEA)
1681 {
1682 regs->dat.raddr = rto | (TT_R3TABL >> 2);
1683 cc = 2;
1684 return cc;
1685 } /* end if(ACCTYPE_LPTEA) */
1686
1687 /* Otherwise set translation exception code */
1688 goto reg_third_excp;
1689
1690 asce_type_excp:
1691 // logmsg("rfx = %4.4X, rsx %4.4X, rtx = %4.4X, tt = %1.1X\n",
1692 // rfx, rsx, rtx, tt);
1693 regs->dat.xcode = PGM_ASCE_TYPE_EXCEPTION;
1694 cc = 4;
1695 goto tran_excp_addr;
1696
1697 reg_first_excp:
1698 regs->dat.xcode = PGM_REGION_FIRST_TRANSLATION_EXCEPTION;
1699 cc = 4;
1700 goto tran_excp_addr;
1701
1702 reg_second_excp:
1703 regs->dat.xcode = PGM_REGION_SECOND_TRANSLATION_EXCEPTION;
1704 cc = 4;
1705 goto tran_excp_addr;
1706
1707 reg_third_excp:
1708 regs->dat.xcode = PGM_REGION_THIRD_TRANSLATION_EXCEPTION;
1709 cc = 4;
1710 goto tran_excp_addr;
1711 #endif /*defined(FEATURE_ESAME)*/
1712
1713 tran_excp_addr:
1714 /* For LPTEA instruction, return xcode with cc = 3 */
1715 if (acctype & ACC_LPTEA)
1716 return 3;
1717
1718 /* Set the translation exception address */
1719 regs->TEA = vaddr & PAGEFRAME_PAGEMASK;
1720
1721 /* Set the address space indication in the exception address */
1722 #if defined(FEATURE_ESAME)
1723 if(regs->dat.stid == TEA_ST_ARMODE)
1724 {
1725 if ((regs->dat.asd & ASCE_TO) == (regs->CR(1) & ASCE_TO))
1726 regs->TEA |= TEA_ST_PRIMARY;
1727 else if ((regs->dat.asd & ASCE_TO) == (regs->CR(7) & ASCE_TO))
1728 regs->TEA |= TEA_ST_SECNDRY;
1729 else if ((regs->dat.asd & ASCE_TO) == (regs->CR(13) & ASCE_TO))
1730 regs->TEA |= TEA_ST_HOME;
1731 else
1732 regs->TEA |= TEA_ST_ARMODE;
1733 }
1734 else
1735 regs->TEA |= regs->dat.stid;
1736 #else /*!defined(FEATURE_ESAME)*/
1737 if(regs->dat.stid == TEA_ST_ARMODE)
1738 {
1739 if ((regs->dat.asd & STD_STO) == (regs->CR(1) & STD_STO))
1740 regs->TEA |= TEA_ST_PRIMARY;
1741 else if ((regs->dat.asd & STD_STO) == (regs->CR(7) & STD_STO))
1742 regs->TEA |= TEA_ST_SECNDRY;
1743 else if ((regs->dat.asd & STD_STO) == (regs->CR(13) & STD_STO))
1744 regs->TEA |= TEA_ST_HOME;
1745 else
1746 regs->TEA |= TEA_ST_ARMODE;
1747 }
1748 else
1749 if((regs->dat.stid == TEA_ST_SECNDRY)
1750 && (PRIMARY_SPACE_MODE(®s->psw)
1751 || SECONDARY_SPACE_MODE(®s->psw)))
1752 regs->TEA |= TEA_ST_SECNDRY | TEA_SECADDR;
1753 else
1754 regs->TEA |= regs->dat.stid;
1755 #endif /*!defined(FEATURE_ESAME)*/
1756
1757 #if defined(FEATURE_ACCESS_EXCEPTION_FETCH_STORE_INDICATION) /*810*/
1758 /* Set the fetch/store indication bits 52-53 in the TEA */
1759 if (acctype & ACC_READ) {
1760 regs->TEA |= TEA_FETCH;
1761 } else if (acctype & (ACC_WRITE|ACC_CHECK)) {
1762 regs->TEA |= TEA_STORE;
1763 }
1764 #endif /*defined(FEATURE_ACCESS_EXCEPTION_FETCH_STORE_INDICATION)*/ /*810*/
1765
1766 /* Set the exception access identification */
1767 if (ACCESS_REGISTER_MODE(®s->psw)
1768 || (SIE_ACTIVE(regs) && MULTIPLE_CONTROLLED_DATA_SPACE(regs->guestregs))
1769 )
1770 regs->excarid = arn > 15 ? 0 : arn;
1771
1772 /* Return condition code */
1773 return cc;
1774
1775 } /* end function translate_addr */
1776
1777
1778 /*-------------------------------------------------------------------*/
1779 /* Purge the translation lookaside buffer */
1780 /*-------------------------------------------------------------------*/
ARCH_DEP(purge_tlb)1781 _DAT_C_STATIC void ARCH_DEP(purge_tlb) (REGS *regs)
1782 {
1783 INVALIDATE_AIA(regs);
1784 if (((++regs->tlbID) & TLBID_BYTEMASK) == 0)
1785 {
1786 memset (®s->tlb.vaddr, 0, TLBN * sizeof(DW));
1787 regs->tlbID = 1;
1788 }
1789 #if defined(_FEATURE_SIE)
1790 /* Also clear the guest registers in the SIE copy */
1791 if(regs->host && regs->guestregs)
1792 {
1793 INVALIDATE_AIA(regs->guestregs);
1794 if (((++regs->guestregs->tlbID) & TLBID_BYTEMASK) == 0)
1795 {
1796 memset (®s->guestregs->tlb.vaddr, 0, TLBN * sizeof(DW));
1797 regs->guestregs->tlbID = 1;
1798 }
1799 }
1800 #endif /*defined(_FEATURE_SIE)*/
1801 } /* end function purge_tlb */
1802
1803
1804 /*-------------------------------------------------------------------*/
1805 /* Purge the translation lookaside buffer for all CPUs */
1806 /*-------------------------------------------------------------------*/
ARCH_DEP(purge_tlb_all)1807 _DAT_C_STATIC void ARCH_DEP(purge_tlb_all) ()
1808 {
1809 int i;
1810
1811 for (i = 0; i < MAX_CPU; i++)
1812 if (IS_CPU_ONLINE(i)
1813 && (sysblk.regs[i]->cpubit & sysblk.started_mask))
1814 ARCH_DEP(purge_tlb) (sysblk.regs[i]);
1815
1816 } /* end function purge_tlb_all */
1817
1818
1819 /*-------------------------------------------------------------------*/
1820 /* Purge translation lookaside buffer entries */
1821 /*-------------------------------------------------------------------*/
ARCH_DEP(purge_tlbe)1822 _DAT_C_STATIC void ARCH_DEP(purge_tlbe) (REGS *regs, RADR pfra)
1823 {
1824 int i;
1825 RADR pte;
1826 RADR ptemask;
1827
1828 #if !defined(FEATURE_S390_DAT) && !defined(FEATURE_ESAME)
1829 ptemask = ((regs->CR(0) & CR0_PAGE_SIZE) == CR0_PAGE_SZ_4K) ?
1830 PAGETAB_PFRA_4K : PAGETAB_PFRA_2K;
1831 pte = ((pfra & 0xFFFFFF) >> 8) & ptemask;
1832 #endif
1833
1834 #if defined(FEATURE_S390_DAT)
1835 ptemask = PAGETAB_PFRA;
1836 pte = pfra & ptemask;
1837 #endif /* defined(FEATURE_S390_DAT) */
1838
1839 #if defined(FEATURE_ESAME)
1840 ptemask = (RADR)ZPGETAB_PFRA;
1841 pte = pfra & ptemask;
1842 #endif /* defined(FEATURE_ESAME) */
1843
1844 INVALIDATE_AIA(regs);
1845 for (i = 0; i < TLBN; i++)
1846 if ((regs->tlb.TLB_PTE(i) & ptemask) == pte)
1847 regs->tlb.TLB_VADDR(i) &= TLBID_PAGEMASK;
1848
1849 #if defined(_FEATURE_SIE)
1850 /* Also clear the guest registers in the SIE copy */
1851 if (regs->host && regs->guestregs)
1852 {
1853 INVALIDATE_AIA(regs->guestregs);
1854 for (i = 0; i < TLBN; i++)
1855 /************************************************************************** @PJJ */
1856 /* The guest registers in the SIE copy TLB PTE entries for DAT-OFF guests * @PJJ */
1857 /* like CMS do NOT actually contain the PTE (but rather the host primary * @PJJ */
1858 /* virtual address, both masked with TBLID_PAGEMASK). In order to check * @PJJ */
1859 /* if such guest TLB entry needs to be cleared, one needs to check the * @PJJ */
1860 /* parallel host registers TLB PTE entry. Hence that the if-test that * @PJJ */
1861 /* follows needed to be expanded. Originally it was just : * @PJJ */
1862 /* * @PJJ */
1863 /* if ((regs->guestregs->tlb.TLB_PTE(i) & ptemask) == pte) * @PJJ */
1864 /* * @PJJ */
1865 /* and it is now expanded with the additional test as follows : * @PJJ */
1866 /* * @PJJ */
1867 /* (Peter J. Jansen, 26-Jul-2016) * @PJJ */
1868 /************************************************************************** @PJJ */
1869 if ((regs->guestregs->tlb.TLB_PTE(i) & ptemask) == pte || /* @PJJ */
1870 (regs->hostregs->tlb.TLB_PTE(i) & ptemask) == pte) /* @PJJ */
1871 regs->guestregs->tlb.TLB_VADDR(i) &= TLBID_PAGEMASK;
1872 }
1873 else
1874 /* For guests, clear any host entries */
1875 if (regs->guest)
1876 {
1877 INVALIDATE_AIA(regs->hostregs);
1878 for (i = 0; i < TLBN; i++)
1879 if ((regs->hostregs->tlb.TLB_PTE(i) & ptemask) == pte)
1880 regs->hostregs->tlb.TLB_VADDR(i) &= TLBID_PAGEMASK;
1881 }
1882 #endif /*defined(_FEATURE_SIE)*/
1883
1884 } /* end function purge_tlbe */
1885
1886
1887 /*-------------------------------------------------------------------*/
1888 /* Purge translation lookaside buffer entries for all CPUs */
1889 /*-------------------------------------------------------------------*/
ARCH_DEP(purge_tlbe_all)1890 _DAT_C_STATIC void ARCH_DEP(purge_tlbe_all) (RADR pfra)
1891 {
1892 int i;
1893
1894 for (i = 0; i < MAX_CPU; i++)
1895 if (IS_CPU_ONLINE(i)
1896 && (sysblk.regs[i]->cpubit & sysblk.started_mask))
1897 ARCH_DEP(purge_tlbe) (sysblk.regs[i], pfra);
1898
1899 } /* end function purge_tlbe_all */
1900
1901
1902 /*-------------------------------------------------------------------*/
1903 /* Invalidate all translation lookaside buffer entries */
1904 /*-------------------------------------------------------------------*/
ARCH_DEP(invalidate_tlb)1905 _DAT_C_STATIC void ARCH_DEP(invalidate_tlb) (REGS *regs, BYTE mask)
1906 {
1907 int i;
1908
1909 INVALIDATE_AIA(regs);
1910 if (mask == 0)
1911 memset(®s->tlb.acc, 0, TLBN);
1912 else
1913 for (i = 0; i < TLBN; i++)
1914 if ((regs->tlb.TLB_VADDR(i) & TLBID_BYTEMASK) == regs->tlbID)
1915 regs->tlb.acc[i] &= mask;
1916
1917 #if defined(_FEATURE_SIE)
1918 /* Also invalidate the guest registers in the SIE copy */
1919 if(regs->host && regs->guestregs)
1920 {
1921 INVALIDATE_AIA(regs->guestregs);
1922 if (mask == 0)
1923 memset(®s->guestregs->tlb.acc, 0, TLBN);
1924 else
1925 for (i = 0; i < TLBN; i++)
1926 if ((regs->guestregs->tlb.TLB_VADDR(i) & TLBID_BYTEMASK) == regs->guestregs->tlbID)
1927 regs->guestregs->tlb.acc[i] &= mask;
1928 }
1929 else
1930 /* Also invalidate the guest registers in the SIE copy */
1931 if(regs->guest)
1932 {
1933 INVALIDATE_AIA(regs->hostregs);
1934 if (mask == 0)
1935 memset(®s->hostregs->tlb.acc, 0, TLBN);
1936 else
1937 for (i = 0; i < TLBN; i++)
1938 if ((regs->hostregs->tlb.TLB_VADDR(i) & TLBID_BYTEMASK) == regs->hostregs->tlbID)
1939 regs->hostregs->tlb.acc[i] &= mask;
1940 }
1941
1942 #endif /*defined(_FEATURE_SIE)*/
1943 } /* end function invalidate_tlb */
1944
1945
1946 /*-------------------------------------------------------------------*/
1947 /* Invalidate matching translation lookaside buffer entries */
1948 /* */
1949 /* Input: */
1950 /* main mainstore address to match on. This is mainstore */
1951 /* base plus absolute address (regs->mainstor+aaddr) */
1952 /* */
1953 /* This function is called by the SSK(E) instructions to purge */
1954 /* TLB entries that match the mainstore address. The "main" */
1955 /* field in the TLB contains the mainstore address plus an */
1956 /* XOR hash with effective address (regs->mainstor+aaddr^addr). */
1957 /* Before the compare can happen, the effective address from */
1958 /* the tlb (TLB_VADDR) must be XORed with the "main" field from */
1959 /* the tlb (removing hash). This is done using MAINADDR() macro. */
1960 /* NOTES: */
1961 /* TLB_VADDR does not contain all the effective address bits and */
1962 /* must be created on-the-fly using the tlb index (i << shift). */
1963 /* TLB_VADDR also contains the tlbid, so the regs->tlbid is merged */
1964 /* with the main input variable before the search is begun. */
1965 /*-------------------------------------------------------------------*/
ARCH_DEP(invalidate_tlbe)1966 _DAT_C_STATIC void ARCH_DEP(invalidate_tlbe) (REGS *regs, BYTE *main)
1967 {
1968 int i; /* index into TLB */
1969 int shift; /* Number of bits to shift */
1970 BYTE *mainwid; /* mainstore with tlbid */
1971
1972 if (main == NULL)
1973 {
1974 ARCH_DEP(invalidate_tlb)(regs, 0);
1975 return;
1976 }
1977
1978 mainwid = main + regs->tlbID;
1979
1980 INVALIDATE_AIA_MAIN(regs, main);
1981 shift = regs->arch_mode == ARCH_370 ? 11 : 12;
1982 for (i = 0; i < TLBN; i++)
1983 if (MAINADDR(regs->tlb.main[i],
1984 (regs->tlb.TLB_VADDR(i) | (i << shift)))
1985 == mainwid)
1986 {
1987 regs->tlb.acc[i] = 0;
1988 #if !defined(FEATURE_S390_DAT) && !defined(FEATURE_ESAME)
1989 if ((regs->CR(0) & CR0_PAGE_SIZE) == CR0_PAGE_SZ_4K)
1990 regs->tlb.acc[i^1] = 0;
1991 #endif
1992 }
1993
1994 #if defined(_FEATURE_SIE)
1995 /* Also clear the guest registers in the SIE copy */
1996 if (regs->host && regs->guestregs)
1997 {
1998 INVALIDATE_AIA_MAIN(regs->guestregs, main);
1999 shift = regs->guestregs->arch_mode == ARCH_370 ? 11 : 12;
2000 for (i = 0; i < TLBN; i++)
2001 if (MAINADDR(regs->guestregs->tlb.main[i],
2002 (regs->guestregs->tlb.TLB_VADDR(i) | (i << shift)))
2003 == mainwid)
2004 {
2005 regs->guestregs->tlb.acc[i] = 0;
2006 #if !defined(FEATURE_S390_DAT) && !defined(FEATURE_ESAME)
2007 if ((regs->guestregs->CR(0) & CR0_PAGE_SIZE) == CR0_PAGE_SZ_4K)
2008 regs->guestregs->tlb.acc[i^1] = 0;
2009 #endif
2010 }
2011 }
2012
2013 /* Also clear the host registers in the SIE copy */
2014 if (regs->guest)
2015 {
2016 INVALIDATE_AIA_MAIN(regs->hostregs, main);
2017 shift = regs->hostregs->arch_mode == ARCH_370 ? 11 : 12;
2018 for (i = 0; i < TLBN; i++)
2019 if (MAINADDR(regs->hostregs->tlb.main[i],
2020 (regs->hostregs->tlb.TLB_VADDR(i) | (i << shift)))
2021 == mainwid)
2022 {
2023 regs->hostregs->tlb.acc[i] = 0;
2024 #if !defined(FEATURE_S390_DAT) && !defined(FEATURE_ESAME)
2025 if ((regs->hostregs->CR(0) & CR0_PAGE_SIZE) == CR0_PAGE_SZ_4K)
2026 regs->hostregs->tlb.acc[i^1] = 0;
2027 #endif
2028 }
2029 }
2030
2031 #endif /*defined(_FEATURE_SIE)*/
2032
2033 } /* end function invalidate_tlbe */
2034
2035
2036 /*-------------------------------------------------------------------*/
2037 /* Invalidate page table entry */
2038 /* */
2039 /* Input: */
2040 /* ibyte 0x21=IPTE instruction, 0x59=IESBE instruction */
2041 /* r1 First operand register number */
2042 /* r2 Second operand register number */
2043 /* regs CPU register context */
2044 /* */
2045 /* This function is called by the IPTE and IESBE instructions. */
2046 /* It sets the PAGETAB_INVALID bit (for IPTE) or resets the */
2047 /* PAGETAB_ESVALID bit (for IESBE) in the page table entry */
2048 /* addressed by the page table origin in the R1 register and */
2049 /* the page index in the R2 register. It clears the TLB of */
2050 /* all entries whose PFRA matches the page table entry. */
2051 /* */
2052 /* invalidate_pte should be called with the intlock held and */
2053 /* SYNCHRONIZE_CPUS issued while intlock is held. */
2054 /* */
2055 /*-------------------------------------------------------------------*/
ARCH_DEP(invalidate_pte)2056 _DAT_C_STATIC void ARCH_DEP(invalidate_pte) (BYTE ibyte, RADR op1,
2057 U32 op2, REGS *regs)
2058 {
2059 RADR raddr; /* Addr of page table entry */
2060 RADR pte;
2061 RADR pfra;
2062
2063 UNREFERENCED_370(ibyte);
2064
2065 #if !defined(FEATURE_S390_DAT) && !defined(FEATURE_ESAME)
2066 {
2067 /* Program check if translation format is invalid */
2068 if ((((regs->CR(0) & CR0_PAGE_SIZE) != CR0_PAGE_SZ_2K) &&
2069 ((regs->CR(0) & CR0_PAGE_SIZE) != CR0_PAGE_SZ_4K)) ||
2070 (((regs->CR(0) & CR0_SEG_SIZE) != CR0_SEG_SZ_64K) &&
2071 ((regs->CR(0) & CR0_SEG_SIZE) != CR0_SEG_SZ_1M)))
2072 regs->program_interrupt (regs,
2073 PGM_TRANSLATION_SPECIFICATION_EXCEPTION);
2074
2075 /* Combine the page table origin in the R1 register with
2076 the page index in the R2 register, ignoring carry, to
2077 form the 31-bit real address of the page table entry */
2078 raddr = (op1 & SEGTAB_370_PTO)
2079 + (((regs->CR(0) & CR0_SEG_SIZE) == CR0_SEG_SZ_1M) ?
2080 (((regs->CR(0) & CR0_PAGE_SIZE) == CR0_PAGE_SZ_4K) ?
2081 ((op2 & 0x000FF000) >> 11) :
2082 ((op2 & 0x000FF800) >> 10)) :
2083 (((regs->CR(0) & CR0_PAGE_SIZE) == CR0_PAGE_SZ_4K) ?
2084 ((op2 & 0x0000F000) >> 11) :
2085 ((op2 & 0x0000F800) >> 10)));
2086 raddr &= 0x00FFFFFF;
2087
2088 /* Fetch the page table entry from real storage, subject
2089 to normal storage protection mechanisms */
2090 pte = ARCH_DEP(vfetch2) ( raddr, USE_REAL_ADDR, regs );
2091
2092 /* Set the page invalid bit in the page table entry,
2093 again subject to storage protection mechansims */
2094 // /*debug*/ logmsg("dat.c: IPTE issued for entry %4.4X at %8.8X...\n"
2095 // " page table %8.8X, page index %8.8X, cr0 %8.8X\n",
2096 // pte, raddr, regs->GR_L(r1), regs->GR_L(r2), regs->CR(0));
2097 if ((regs->CR(0) & CR0_PAGE_SIZE) == CR0_PAGE_SZ_2K)
2098 pte |= PAGETAB_INV_2K;
2099 else
2100 pte |= PAGETAB_INV_4K;
2101 ARCH_DEP(vstore2) ( pte, raddr, USE_REAL_ADDR, regs );
2102 pfra = ((regs->CR(0) & CR0_PAGE_SIZE) == CR0_PAGE_SZ_4K) ?
2103 #if defined(FEATURE_S370E_EXTENDED_ADDRESSING)
2104 (((U32)pte & PAGETAB_EA_4K) << 23) |
2105 #endif
2106 (((U32)pte & PAGETAB_PFRA_4K) << 8) :
2107 (((U32)pte & PAGETAB_PFRA_2K) << 8);
2108 }
2109 #elif defined(FEATURE_S390_DAT)
2110 {
2111 /* Program check if translation format is invalid */
2112 if ((regs->CR(0) & CR0_TRAN_FMT) != CR0_TRAN_ESA390)
2113 regs->program_interrupt (regs,
2114 PGM_TRANSLATION_SPECIFICATION_EXCEPTION);
2115
2116 /* Combine the page table origin in the R1 register with
2117 the page index in the R2 register, ignoring carry, to
2118 form the 31-bit real address of the page table entry */
2119 raddr = (op1 & SEGTAB_PTO)
2120 + ((op2 & 0x000FF000) >> 10);
2121 raddr &= 0x7FFFFFFF;
2122
2123 /* Fetch the page table entry from real storage, subject
2124 to normal storage protection mechanisms */
2125 pte = ARCH_DEP(vfetch4) ( raddr, USE_REAL_ADDR, regs );
2126
2127 /* Set the page invalid bit in the page table entry,
2128 again subject to storage protection mechansims */
2129 #if defined(FEATURE_MOVE_PAGE_FACILITY_2) && defined(FEATURE_EXPANDED_STORAGE)
2130 if(ibyte == 0x59)
2131 pte &= ~PAGETAB_ESVALID;
2132 else
2133 #endif /*defined(FEATURE_MOVE_PAGE_FACILITY_2)*/
2134 pte |= PAGETAB_INVALID;
2135 ARCH_DEP(vstore4) ( pte, raddr, USE_REAL_ADDR, regs );
2136 pfra = pte & PAGETAB_PFRA;
2137 }
2138 #else /*defined(FEATURE_ESAME)*/
2139 {
2140 /* Combine the page table origin in the R1 register with
2141 the page index in the R2 register, ignoring carry, to
2142 form the 64-bit real address of the page table entry */
2143 raddr = (op1 & ZSEGTAB_PTO)
2144 + ((op2 & 0x000FF000) >> 9);
2145
2146 #if defined(MODEL_DEPENDENT)
2147 raddr = APPLY_PREFIXING (raddr, regs->PX);
2148 #endif /*defined(MODEL_DEPENDENT)*/
2149
2150 /* Fetch the page table entry from real storage, subject
2151 to normal storage protection mechanisms */
2152 pte = ARCH_DEP(vfetch8) ( raddr, USE_REAL_ADDR, regs );
2153
2154 /* Set the page invalid bit in the page table entry,
2155 again subject to storage protection mechansims */
2156 #if defined(FEATURE_MOVE_PAGE_FACILITY_2) && defined(FEATURE_EXPANDED_STORAGE)
2157 if(ibyte == 0x59)
2158 pte &= ~ZPGETAB_ESVALID;
2159 else
2160 #endif /*defined(FEATURE_MOVE_PAGE_FACILITY_2)*/
2161 pte |= ZPGETAB_I;
2162 ARCH_DEP(vstore8) ( pte, raddr, USE_REAL_ADDR, regs );
2163 pfra = pte & ZPGETAB_PFRA;
2164 }
2165 #endif /*defined(FEATURE_ESAME)*/
2166
2167 /* Invalidate TLB entries */
2168 ARCH_DEP(purge_tlbe_all) (pfra);
2169
2170 } /* end function invalidate_pte */
2171
2172 #endif /*!defined(OPTION_NO_INLINE_DAT) || defined(_DAT_C) */
2173
2174
2175 #if defined(FEATURE_PER2)
2176 /*-------------------------------------------------------------------*/
2177 /* Check for a storage alteration PER2 event */
2178 /* Returns 1 if true, 0 if false */
2179 /*-------------------------------------------------------------------*/
ARCH_DEP(check_sa_per2)2180 static inline int ARCH_DEP(check_sa_per2) (int arn, int acctype, REGS *regs)
2181 {
2182 UNREFERENCED(acctype);
2183 if((regs->dat.asd & SAEVENT_BIT) || !(regs->CR(9) & CR9_SAC))
2184 {
2185 regs->peraid = arn > 0 && arn < 16 ? arn : 0;
2186 regs->perc |= regs->dat.stid;
2187 return 1;
2188 }
2189 return 0;
2190 } /* end function check_sa_per2 */
2191 #endif /*defined(FEATURE_PER2)*/
2192
2193
2194 #if !defined(OPTION_NO_INLINE_LOGICAL) || defined(_DAT_C)
2195 /*-------------------------------------------------------------------*/
2196 /* Convert logical address to absolute address and check protection */
2197 /* */
2198 /* Input: */
2199 /* addr Logical address to be translated */
2200 /* arn Access register number (or USE_REAL_ADDR, */
2201 /* USE_PRIMARY_SPACE, USE_SECONDARY_SPACE) */
2202 /* regs CPU register context */
2203 /* acctype Type of access requested: READ, WRITE, or instfetch */
2204 /* akey Bits 0-3=access key, 4-7=zeroes */
2205 /* len Length of data access for PER SA purpose */
2206 /* Returns: */
2207 /* Absolute storage address. */
2208 /* */
2209 /* If the PSW indicates DAT-off, or if the access register */
2210 /* number parameter is the special value USE_REAL_ADDR, */
2211 /* then the addr parameter is treated as a real address. */
2212 /* Otherwise addr is a virtual address, so dynamic address */
2213 /* translation is called to convert it to a real address. */
2214 /* Prefixing is then applied to convert the real address to */
2215 /* an absolute address, and then low-address protection, */
2216 /* access-list controlled protection, page protection, and */
2217 /* key controlled protection checks are applied to the address. */
2218 /* If successful, the reference and change bits of the storage */
2219 /* key are updated, and the absolute address is returned. */
2220 /* */
2221 /* If the logical address causes an addressing, protection, */
2222 /* or translation exception then a program check is generated */
2223 /* and the function does not return. */
2224 /*-------------------------------------------------------------------*/
ARCH_DEP(logical_to_main_l)2225 _LOGICAL_C_STATIC BYTE *ARCH_DEP(logical_to_main_l) (VADR addr, int arn,
2226 REGS *regs, int acctype, BYTE akey,
2227 size_t len)
2228 {
2229 RADR aaddr; /* Absolute address */
2230 RADR apfra; /* Abs page frame address */
2231 int ix = TLBIX(addr); /* TLB index */
2232
2233 /* Convert logical address to real address */
2234 if ( (REAL_MODE(®s->psw) || arn == USE_REAL_ADDR)
2235 #if defined(FEATURE_INTERPRETIVE_EXECUTION)
2236 /* Under SIE guest real is always host primary, regardless
2237 of the DAT mode */
2238 && !(regs->sie_active
2239 #if !defined(_FEATURE_MULTIPLE_CONTROLLED_DATA_SPACE)
2240 && arn == USE_PRIMARY_SPACE
2241 #else
2242 // && ( (arn == USE_PRIMARY_SPACE)
2243 // || SIE_STATB(regs->guestregs, MX, XC) )
2244 #endif /*defined(_FEATURE_MULTIPLE_CONTROLLED_DATA_SPACE)*/
2245 )
2246 #endif /*defined(FEATURE_INTERPRETIVE_EXECUTION)*/
2247 )
2248 {
2249 regs->dat.private = regs->dat.protect = 0;
2250 regs->dat.raddr = addr;
2251 regs->dat.rpfra = addr & PAGEFRAME_PAGEMASK;
2252
2253 /* Setup `real' TLB entry (for MADDR) */
2254 regs->tlb.TLB_ASD(ix) = TLB_REAL_ASD;
2255 regs->tlb.TLB_VADDR(ix) = (addr & TLBID_PAGEMASK) | regs->tlbID;
2256 regs->tlb.TLB_PTE(ix) = addr & TLBID_PAGEMASK;
2257 regs->tlb.acc[ix] =
2258 regs->tlb.common[ix] =
2259 regs->tlb.protect[ix] = 0;
2260 }
2261 else {
2262 if (ARCH_DEP(translate_addr) (addr, arn, regs, acctype))
2263 goto vabs_prog_check;
2264 }
2265
2266 if (regs->dat.protect
2267 && (acctype & (ACC_WRITE|ACC_CHECK)))
2268 goto vabs_prot_excp;
2269
2270 /* Convert real address to absolute address */
2271 regs->dat.aaddr = aaddr = APPLY_PREFIXING (regs->dat.raddr, regs->PX);
2272 apfra=APPLY_PREFIXING(regs->dat.rpfra,regs->PX);
2273
2274 /* Program check if absolute address is outside main storage */
2275 if (regs->dat.aaddr > regs->mainlim)
2276 goto vabs_addr_excp;
2277
2278 #if defined(_FEATURE_SIE)
2279 if(SIE_MODE(regs)) regs->hostregs->dat.protect = 0;
2280 if(SIE_MODE(regs) && !regs->sie_pref)
2281 {
2282
2283 if (SIE_TRANSLATE_ADDR (regs->sie_mso + regs->dat.aaddr,
2284 (arn > 0 && arn < 16 && MULTIPLE_CONTROLLED_DATA_SPACE(regs)) ? arn : USE_PRIMARY_SPACE,
2285 regs->hostregs, ACCTYPE_SIE))
2286 (regs->hostregs->program_interrupt) (regs->hostregs, regs->hostregs->dat.xcode);
2287
2288 regs->dat.protect |= regs->hostregs->dat.protect;
2289 regs->tlb.protect[ix] |= regs->hostregs->dat.protect;
2290
2291 if ( REAL_MODE(®s->psw) || (arn == USE_REAL_ADDR) )
2292 regs->tlb.TLB_PTE(ix) = addr & TLBID_PAGEMASK;
2293
2294 /* Indicate a host real space entry for a XC dataspace */
2295 if (arn > 0 && arn < 16 && MULTIPLE_CONTROLLED_DATA_SPACE(regs))
2296 {
2297 regs->tlb.TLB_ASD(ix) = regs->dat.asd;
2298 /* Ensure that the private bit is percolated to the guest such that LAP is applied correctly */
2299 regs->dat.private = regs->hostregs->dat.private;
2300
2301 /* Build tlb entry of XC dataspace */
2302 regs->dat.asd = regs->hostregs->dat.asd ^ TLB_HOST_ASD;
2303 regs->CR(CR_ALB_OFFSET + arn) = regs->dat.asd;
2304 regs->aea_ar[arn] = CR_ALB_OFFSET + arn;
2305 regs->aea_common[CR_ALB_OFFSET + arn] = (regs->dat.asd & ASD_PRIVATE) == 0;
2306 regs->aea_aleprot[arn] = regs->hostregs->dat.protect & 2;
2307 }
2308
2309 /* Convert host real address to host absolute address */
2310 regs->hostregs->dat.aaddr = aaddr =
2311 APPLY_PREFIXING (regs->hostregs->dat.raddr, regs->hostregs->PX);
2312 apfra = APPLY_PREFIXING(regs->hostregs->dat.rpfra, regs->hostregs->PX);
2313
2314 if(regs->hostregs->dat.aaddr > regs->hostregs->mainlim)
2315 goto vabs_addr_excp;
2316 /* Take into account SIE guests with a 2K page scheme
2317 because the SIE host may be operating with a 4K page
2318 system */
2319 #if defined(FEATURE_2K_STORAGE_KEYS)
2320 if((addr & PAGEFRAME_PAGEMASK) & 0x800)
2321 {
2322 apfra|=0x800;
2323 }
2324 #endif
2325 }
2326 #endif /*defined(_FEATURE_SIE)*/
2327
2328 /* Check protection and set reference and change bits */
2329 regs->dat.storkey = &(STORAGE_KEY(aaddr, regs));
2330
2331 #if defined(_FEATURE_SIE)
2332 /* Do not apply host key access when SIE fetches/stores data */
2333 if (unlikely(SIE_ACTIVE(regs)))
2334 return regs->mainstor + aaddr;
2335 #endif /*defined(_FEATURE_SIE)*/
2336
2337 if (likely(acctype & ACC_READ))
2338 {
2339 /* Program check if fetch protected location */
2340 if (unlikely(ARCH_DEP(is_fetch_protected) (addr, *regs->dat.storkey, akey, regs)))
2341 {
2342 if (SIE_MODE(regs)) regs->hostregs->dat.protect = 0;
2343 goto vabs_prot_excp;
2344 }
2345
2346 /* Set the reference bit in the storage key */
2347 *regs->dat.storkey |= STORKEY_REF;
2348
2349 /* Update accelerated lookup TLB fields */
2350 regs->tlb.storkey[ix] = regs->dat.storkey;
2351 regs->tlb.skey[ix] = *regs->dat.storkey & STORKEY_KEY;
2352 regs->tlb.acc[ix] = ACC_READ;
2353 regs->tlb.main[ix] = NEW_MAINADDR (regs, addr, apfra);
2354
2355 }
2356 else /* if(acctype & (ACC_WRITE|ACC_CHECK)) */
2357 {
2358 /* Program check if store protected location */
2359 if (unlikely(ARCH_DEP(is_store_protected) (addr, *regs->dat.storkey, akey, regs)))
2360 {
2361 if (SIE_MODE(regs)) regs->hostregs->dat.protect = 0;
2362 goto vabs_prot_excp;
2363 }
2364 if (SIE_MODE(regs) && regs->hostregs->dat.protect)
2365 goto vabs_prot_excp;
2366
2367 /* Set the reference and change bits in the storage key */
2368 if (acctype & ACC_WRITE)
2369 *regs->dat.storkey |= (STORKEY_REF | STORKEY_CHANGE);
2370
2371 /* Update accelerated lookup TLB fields */
2372 regs->tlb.storkey[ix] = regs->dat.storkey;
2373 regs->tlb.skey[ix] = *regs->dat.storkey & STORKEY_KEY;
2374 regs->tlb.acc[ix] = (addr >= PSA_SIZE || regs->dat.private)
2375 ? (ACC_READ|ACC_CHECK|acctype)
2376 : ACC_READ;
2377 regs->tlb.main[ix] = NEW_MAINADDR (regs, addr, apfra);
2378
2379 #if defined(FEATURE_PER)
2380 if (EN_IC_PER_SA(regs))
2381 {
2382 regs->tlb.acc[ix] = ACC_READ;
2383 if (arn != USE_REAL_ADDR
2384 #if defined(FEATURE_PER2)
2385 && ( REAL_MODE(®s->psw) ||
2386 ARCH_DEP(check_sa_per2) (arn, acctype, regs)
2387 )
2388 #endif /*defined(FEATURE_PER2)*/
2389 /* Check the range altered enters the SA PER range */
2390 && PER_RANGE_CHECK2(addr,addr+(len-1),regs->CR(10),regs->CR(11))
2391 )
2392 ON_IC_PER_SA(regs);
2393 }
2394 #endif /*defined(FEATURE_PER)*/
2395 } /* acctype & ACC_WRITE|CHECK */
2396
2397 /* Return mainstor address */
2398 return regs->mainstor + aaddr;
2399
2400 vabs_addr_excp:
2401 regs->program_interrupt (regs, PGM_ADDRESSING_EXCEPTION);
2402
2403 vabs_prot_excp:
2404 #ifdef FEATURE_SUPPRESSION_ON_PROTECTION
2405 regs->TEA = addr & STORAGE_KEY_PAGEMASK;
2406 if (regs->dat.protect && (acctype & (ACC_WRITE|ACC_CHECK)) )
2407 {
2408 regs->TEA |= TEA_PROT_AP;
2409 #if defined(FEATURE_ESAME)
2410 if (regs->dat.protect & 2)
2411 regs->TEA |= TEA_PROT_A;
2412 #endif /*defined(FEATURE_ESAME)*/
2413 }
2414 regs->TEA |= regs->dat.stid;
2415 regs->excarid = (arn > 0 && arn < 16 ? arn : 0);
2416 #endif /*FEATURE_SUPPRESSION_ON_PROTECTION*/
2417
2418 #if defined(_FEATURE_PROTECTION_INTERCEPTION_CONTROL)
2419 if(SIE_MODE(regs) && regs->hostregs->dat.protect)
2420 {
2421 #ifdef FEATURE_SUPPRESSION_ON_PROTECTION
2422 regs->hostregs->TEA = regs->TEA;
2423 regs->hostregs->excarid = regs->excarid;
2424 #endif /*FEATURE_SUPPRESSION_ON_PROTECTION*/
2425 (regs->hostregs->program_interrupt) (regs->hostregs, PGM_PROTECTION_EXCEPTION);
2426 }
2427 else
2428 #endif /*defined(_FEATURE_PROTECTION_INTERCEPTION_CONTROL)*/
2429 regs->program_interrupt (regs, PGM_PROTECTION_EXCEPTION);
2430
2431 vabs_prog_check:
2432 regs->program_interrupt (regs, regs->dat.xcode);
2433
2434 return NULL; /* prevent warning from compiler */
2435 } /* end function ARCH_DEP(logical_to_main_l) */
2436
2437 /* Original logical_to_main() for compatiblity purpose */
ARCH_DEP(logical_to_main)2438 _LOGICAL_C_STATIC BYTE *ARCH_DEP(logical_to_main) (VADR addr, int arn,
2439 REGS *regs, int acctype, BYTE akey)
2440 {
2441 return ARCH_DEP(logical_to_main_l)(addr,arn,regs,acctype,akey,1);
2442 }
2443
2444 #endif /*!defined(OPTION_NO_INLINE_LOGICAL) || defined(_DAT_C) */
2445
2446 /* end of DAT.H */
2447