1 /* pp_hot.c 2 * 3 * Copyright (C) 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 4 * 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 by Larry Wall and others 5 * 6 * You may distribute under the terms of either the GNU General Public 7 * License or the Artistic License, as specified in the README file. 8 * 9 */ 10 11 /* 12 * Then he heard Merry change the note, and up went the Horn-cry of Buckland, 13 * shaking the air. 14 * 15 * Awake! Awake! Fear, Fire, Foes! Awake! 16 * Fire, Foes! Awake! 17 * 18 * [p.1007 of _The Lord of the Rings_, VI/viii: "The Scouring of the Shire"] 19 */ 20 21 /* This file contains 'hot' pp ("push/pop") functions that 22 * execute the opcodes that make up a perl program. A typical pp function 23 * expects to find its arguments on the stack, and usually pushes its 24 * results onto the stack, hence the 'pp' terminology. Each OP structure 25 * contains a pointer to the relevant pp_foo() function. 26 * 27 * By 'hot', we mean common ops whose execution speed is critical. 28 * By gathering them together into a single file, we encourage 29 * CPU cache hits on hot code. Also it could be taken as a warning not to 30 * change any code in this file unless you're sure it won't affect 31 * performance. 32 */ 33 34 #include "EXTERN.h" 35 #define PERL_IN_PP_HOT_C 36 #include "perl.h" 37 38 /* Hot code. */ 39 40 PP(pp_const) 41 { 42 dSP; 43 XPUSHs(cSVOP_sv); 44 RETURN; 45 } 46 47 PP(pp_nextstate) 48 { 49 PL_curcop = (COP*)PL_op; 50 TAINT_NOT; /* Each statement is presumed innocent */ 51 PL_stack_sp = PL_stack_base + CX_CUR()->blk_oldsp; 52 FREETMPS; 53 PERL_ASYNC_CHECK(); 54 return NORMAL; 55 } 56 57 PP(pp_gvsv) 58 { 59 dSP; 60 EXTEND(SP,1); 61 if (UNLIKELY(PL_op->op_private & OPpLVAL_INTRO)) 62 PUSHs(save_scalar(cGVOP_gv)); 63 else 64 PUSHs(GvSVn(cGVOP_gv)); 65 RETURN; 66 } 67 68 69 /* also used for: pp_lineseq() pp_regcmaybe() pp_scalar() pp_scope() */ 70 71 PP(pp_null) 72 { 73 return NORMAL; 74 } 75 76 /* This is sometimes called directly by pp_coreargs, pp_grepstart and 77 amagic_call. */ 78 PP(pp_pushmark) 79 { 80 PUSHMARK(PL_stack_sp); 81 return NORMAL; 82 } 83 84 PP(pp_stringify) 85 { 86 dSP; dTARGET; 87 SV * const sv = TOPs; 88 SETs(TARG); 89 sv_copypv(TARG, sv); 90 SvSETMAGIC(TARG); 91 /* no PUTBACK, SETs doesn't inc/dec SP */ 92 return NORMAL; 93 } 94 95 PP(pp_gv) 96 { 97 dSP; 98 XPUSHs(MUTABLE_SV(cGVOP_gv)); 99 RETURN; 100 } 101 102 103 /* also used for: pp_andassign() */ 104 105 PP(pp_and) 106 { 107 PERL_ASYNC_CHECK(); 108 { 109 /* SP is not used to remove a variable that is saved across the 110 sv_2bool_flags call in SvTRUE_NN, if a RISC/CISC or low/high machine 111 register or load/store vs direct mem ops macro is introduced, this 112 should be a define block between direct PL_stack_sp and dSP operations, 113 presently, using PL_stack_sp is bias towards CISC cpus */ 114 SV * const sv = *PL_stack_sp; 115 if (!SvTRUE_NN(sv)) 116 return NORMAL; 117 else { 118 if (PL_op->op_type == OP_AND) 119 --PL_stack_sp; 120 return cLOGOP->op_other; 121 } 122 } 123 } 124 125 PP(pp_sassign) 126 { 127 dSP; 128 /* sassign keeps its args in the optree traditionally backwards. 129 So we pop them differently. 130 */ 131 SV *left = POPs; SV *right = TOPs; 132 133 if (PL_op->op_private & OPpASSIGN_BACKWARDS) { /* {or,and,dor}assign */ 134 SV * const temp = left; 135 left = right; right = temp; 136 } 137 assert(TAINTING_get || !TAINT_get); 138 if (UNLIKELY(TAINT_get) && !SvTAINTED(right)) 139 TAINT_NOT; 140 if (UNLIKELY(PL_op->op_private & OPpASSIGN_CV_TO_GV)) { 141 /* *foo =\&bar */ 142 SV * const cv = SvRV(right); 143 const U32 cv_type = SvTYPE(cv); 144 const bool is_gv = isGV_with_GP(left); 145 const bool got_coderef = cv_type == SVt_PVCV || cv_type == SVt_PVFM; 146 147 if (!got_coderef) { 148 assert(SvROK(cv)); 149 } 150 151 /* Can do the optimisation if left (LVALUE) is not a typeglob, 152 right (RVALUE) is a reference to something, and we're in void 153 context. */ 154 if (!got_coderef && !is_gv && GIMME_V == G_VOID) { 155 /* Is the target symbol table currently empty? */ 156 GV * const gv = gv_fetchsv_nomg(left, GV_NOINIT, SVt_PVGV); 157 if (SvTYPE(gv) != SVt_PVGV && !SvOK(gv)) { 158 /* Good. Create a new proxy constant subroutine in the target. 159 The gv becomes a(nother) reference to the constant. */ 160 SV *const value = SvRV(cv); 161 162 SvUPGRADE(MUTABLE_SV(gv), SVt_IV); 163 SvPCS_IMPORTED_on(gv); 164 SvRV_set(gv, value); 165 SvREFCNT_inc_simple_void(value); 166 SETs(left); 167 RETURN; 168 } 169 } 170 171 /* Need to fix things up. */ 172 if (!is_gv) { 173 /* Need to fix GV. */ 174 left = MUTABLE_SV(gv_fetchsv_nomg(left,GV_ADD, SVt_PVGV)); 175 } 176 177 if (!got_coderef) { 178 /* We've been returned a constant rather than a full subroutine, 179 but they expect a subroutine reference to apply. */ 180 if (SvROK(cv)) { 181 ENTER_with_name("sassign_coderef"); 182 SvREFCNT_inc_void(SvRV(cv)); 183 /* newCONSTSUB takes a reference count on the passed in SV 184 from us. We set the name to NULL, otherwise we get into 185 all sorts of fun as the reference to our new sub is 186 donated to the GV that we're about to assign to. 187 */ 188 SvRV_set(right, MUTABLE_SV(newCONSTSUB(GvSTASH(left), NULL, 189 SvRV(cv)))); 190 SvREFCNT_dec_NN(cv); 191 LEAVE_with_name("sassign_coderef"); 192 } else { 193 /* What can happen for the corner case *{"BONK"} = \&{"BONK"}; 194 is that 195 First: ops for \&{"BONK"}; return us the constant in the 196 symbol table 197 Second: ops for *{"BONK"} cause that symbol table entry 198 (and our reference to it) to be upgraded from RV 199 to typeblob) 200 Thirdly: We get here. cv is actually PVGV now, and its 201 GvCV() is actually the subroutine we're looking for 202 203 So change the reference so that it points to the subroutine 204 of that typeglob, as that's what they were after all along. 205 */ 206 GV *const upgraded = MUTABLE_GV(cv); 207 CV *const source = GvCV(upgraded); 208 209 assert(source); 210 assert(CvFLAGS(source) & CVf_CONST); 211 212 SvREFCNT_inc_simple_void_NN(source); 213 SvREFCNT_dec_NN(upgraded); 214 SvRV_set(right, MUTABLE_SV(source)); 215 } 216 } 217 218 } 219 if ( 220 UNLIKELY(SvTEMP(left)) && !SvSMAGICAL(left) && SvREFCNT(left) == 1 && 221 (!isGV_with_GP(left) || SvFAKE(left)) && ckWARN(WARN_MISC) 222 ) 223 Perl_warner(aTHX_ 224 packWARN(WARN_MISC), "Useless assignment to a temporary" 225 ); 226 SvSetMagicSV(left, right); 227 SETs(left); 228 RETURN; 229 } 230 231 PP(pp_cond_expr) 232 { 233 dSP; 234 SV *sv; 235 236 PERL_ASYNC_CHECK(); 237 sv = POPs; 238 RETURNOP(SvTRUE_NN(sv) ? cLOGOP->op_other : cLOGOP->op_next); 239 } 240 241 PP(pp_unstack) 242 { 243 PERL_CONTEXT *cx; 244 PERL_ASYNC_CHECK(); 245 TAINT_NOT; /* Each statement is presumed innocent */ 246 cx = CX_CUR(); 247 PL_stack_sp = PL_stack_base + cx->blk_oldsp; 248 FREETMPS; 249 if (!(PL_op->op_flags & OPf_SPECIAL)) { 250 assert(CxTYPE(cx) == CXt_BLOCK || CxTYPE_is_LOOP(cx)); 251 CX_LEAVE_SCOPE(cx); 252 } 253 return NORMAL; 254 } 255 256 257 /* The main body of pp_concat, not including the magic/overload and 258 * stack handling. 259 * It does targ = left . right. 260 * Moved into a separate function so that pp_multiconcat() can use it 261 * too. 262 */ 263 264 PERL_STATIC_INLINE void 265 S_do_concat(pTHX_ SV *left, SV *right, SV *targ, U8 targmy) 266 { 267 bool lbyte; 268 STRLEN rlen; 269 const char *rpv = NULL; 270 bool rbyte = FALSE; 271 bool rcopied = FALSE; 272 273 if (TARG == right && right != left) { /* $r = $l.$r */ 274 rpv = SvPV_nomg_const(right, rlen); 275 rbyte = !DO_UTF8(right); 276 right = newSVpvn_flags(rpv, rlen, SVs_TEMP); 277 rpv = SvPV_const(right, rlen); /* no point setting UTF-8 here */ 278 rcopied = TRUE; 279 } 280 281 if (TARG != left) { /* not $l .= $r */ 282 STRLEN llen; 283 const char* const lpv = SvPV_nomg_const(left, llen); 284 lbyte = !DO_UTF8(left); 285 sv_setpvn(TARG, lpv, llen); 286 if (!lbyte) 287 SvUTF8_on(TARG); 288 else 289 SvUTF8_off(TARG); 290 } 291 else { /* $l .= $r and left == TARG */ 292 if (!SvOK(left)) { 293 if ((left == right /* $l .= $l */ 294 || targmy) /* $l = $l . $r */ 295 && ckWARN(WARN_UNINITIALIZED) 296 ) 297 report_uninit(left); 298 SvPVCLEAR(left); 299 } 300 else { 301 SvPV_force_nomg_nolen(left); 302 } 303 lbyte = !DO_UTF8(left); 304 if (IN_BYTES) 305 SvUTF8_off(left); 306 } 307 308 if (!rcopied) { 309 rpv = SvPV_nomg_const(right, rlen); 310 rbyte = !DO_UTF8(right); 311 } 312 if (lbyte != rbyte) { 313 if (lbyte) 314 sv_utf8_upgrade_nomg(TARG); 315 else { 316 if (!rcopied) 317 right = newSVpvn_flags(rpv, rlen, SVs_TEMP); 318 sv_utf8_upgrade_nomg(right); 319 rpv = SvPV_nomg_const(right, rlen); 320 } 321 } 322 sv_catpvn_nomg(TARG, rpv, rlen); 323 SvSETMAGIC(TARG); 324 } 325 326 327 PP(pp_concat) 328 { 329 dSP; dATARGET; tryAMAGICbin_MG(concat_amg, AMGf_assign); 330 { 331 dPOPTOPssrl; 332 S_do_concat(aTHX_ left, right, targ, PL_op->op_private & OPpTARGET_MY); 333 SETs(TARG); 334 RETURN; 335 } 336 } 337 338 339 /* pp_multiconcat() 340 341 Concatenate one or more args, possibly interleaved with constant string 342 segments. The result may be assigned to, or appended to, a variable or 343 expression. 344 345 Several op_flags and/or op_private bits indicate what the target is, and 346 whether it's appended to. Valid permutations are: 347 348 - (PADTMP) = (A.B.C....) 349 OPpTARGET_MY $lex = (A.B.C....) 350 OPpTARGET_MY,OPpLVAL_INTRO my $lex = (A.B.C....) 351 OPpTARGET_MY,OPpMULTICONCAT_APPEND $lex .= (A.B.C....) 352 OPf_STACKED expr = (A.B.C....) 353 OPf_STACKED,OPpMULTICONCAT_APPEND expr .= (A.B.C....) 354 355 Other combinations like (A.B).(C.D) are not optimised into a multiconcat 356 op, as it's too hard to get the correct ordering of ties, overload etc. 357 358 In addition: 359 360 OPpMULTICONCAT_FAKE: not a real concat, instead an optimised 361 sprintf "...%s...". Don't call '.' 362 overloading: only use '""' overloading. 363 364 OPpMULTICONCAT_STRINGIFY: the RHS was of the form 365 "...$a...$b..." rather than 366 "..." . $a . "..." . $b . "..." 367 368 An OP_MULTICONCAT is of type UNOP_AUX. The fixed slots of the aux array are 369 defined with PERL_MULTICONCAT_IX_FOO constants, where: 370 371 372 FOO index description 373 -------- ----- ---------------------------------- 374 NARGS 0 number of arguments 375 PLAIN_PV 1 non-utf8 constant string 376 PLAIN_LEN 2 non-utf8 constant string length 377 UTF8_PV 3 utf8 constant string 378 UTF8_LEN 4 utf8 constant string length 379 LENGTHS 5 first of nargs+1 const segment lengths 380 381 The idea is that a general string concatenation will have a fixed (known 382 at compile time) number of variable args, interspersed with constant 383 strings, e.g. "a=$a b=$b\n" 384 385 All the constant string segments "a=", " b=" and "\n" are stored as a 386 single string "a= b=\n", pointed to from the PLAIN_PV/UTF8_PV slot, along 387 with a series of segment lengths: e.g. 2,3,1. In the case where the 388 constant string is plain but has a different utf8 representation, both 389 variants are stored, and two sets of (nargs+1) segments lengths are stored 390 in the slots beginning at PERL_MULTICONCAT_IX_LENGTHS. 391 392 A segment length of -1 indicates that there is no constant string at that 393 point; this distinguishes between e.g. ($a . $b) and ($a . "" . $b), which 394 have differing overloading behaviour. 395 396 */ 397 398 PP(pp_multiconcat) 399 { 400 dSP; 401 SV *targ; /* The SV to be assigned or appended to */ 402 char *targ_pv; /* where within SvPVX(targ) we're writing to */ 403 STRLEN targ_len; /* SvCUR(targ) */ 404 SV **toparg; /* the highest arg position on the stack */ 405 UNOP_AUX_item *aux; /* PL_op->op_aux buffer */ 406 UNOP_AUX_item *const_lens; /* the segment length array part of aux */ 407 const char *const_pv; /* the current segment of the const string buf */ 408 SSize_t nargs; /* how many args were expected */ 409 SSize_t stack_adj; /* how much to adjust SP on return */ 410 STRLEN grow; /* final size of destination string (targ) */ 411 UV targ_count; /* how many times targ has appeared on the RHS */ 412 bool is_append; /* OPpMULTICONCAT_APPEND flag is set */ 413 bool slow_concat; /* args too complex for quick concat */ 414 U32 dst_utf8; /* the result will be utf8 (indicate this with 415 SVf_UTF8 in a U32, rather than using bool, 416 for ease of testing and setting) */ 417 /* for each arg, holds the result of an SvPV() call */ 418 struct multiconcat_svpv { 419 char *pv; 420 SSize_t len; 421 } 422 *targ_chain, /* chain of slots where targ has appeared on RHS */ 423 *svpv_p, /* ptr for looping through svpv_buf */ 424 *svpv_base, /* first slot (may be greater than svpv_buf), */ 425 *svpv_end, /* and slot after highest result so far, of: */ 426 svpv_buf[PERL_MULTICONCAT_MAXARG]; /* buf for storing SvPV() results */ 427 428 aux = cUNOP_AUXx(PL_op)->op_aux; 429 stack_adj = nargs = aux[PERL_MULTICONCAT_IX_NARGS].ssize; 430 is_append = cBOOL(PL_op->op_private & OPpMULTICONCAT_APPEND); 431 432 /* get targ from the stack or pad */ 433 434 if (PL_op->op_flags & OPf_STACKED) { 435 if (is_append) { 436 /* for 'expr .= ...', expr is the bottom item on the stack */ 437 targ = SP[-nargs]; 438 stack_adj++; 439 } 440 else 441 /* for 'expr = ...', expr is the top item on the stack */ 442 targ = POPs; 443 } 444 else { 445 SV **svp = &(PAD_SVl(PL_op->op_targ)); 446 targ = *svp; 447 if (PL_op->op_private & OPpLVAL_INTRO) { 448 assert(PL_op->op_private & OPpTARGET_MY); 449 save_clearsv(svp); 450 } 451 if (!nargs) 452 /* $lex .= "const" doesn't cause anything to be pushed */ 453 EXTEND(SP,1); 454 } 455 456 toparg = SP; 457 SP -= (nargs - 1); 458 grow = 1; /* allow for '\0' at minimum */ 459 targ_count = 0; 460 targ_chain = NULL; 461 targ_len = 0; 462 svpv_end = svpv_buf; 463 /* only utf8 variants of the const strings? */ 464 dst_utf8 = aux[PERL_MULTICONCAT_IX_PLAIN_PV].pv ? 0 : SVf_UTF8; 465 466 467 /* -------------------------------------------------------------- 468 * Phase 1: 469 * 470 * stringify (i.e. SvPV()) every arg and store the resultant pv/len/utf8 471 * triplets in svpv_buf[]. Also increment 'grow' by the args' lengths. 472 * 473 * utf8 is indicated by storing a negative length. 474 * 475 * Where an arg is actually targ, the stringification is deferred: 476 * the length is set to 0, and the slot is added to targ_chain. 477 * 478 * If a magic, overloaded, or otherwise weird arg is found, which 479 * might have side effects when stringified, the loop is abandoned and 480 * we goto a code block where a more basic 'emulate calling 481 * pp_cpncat() on each arg in turn' is done. 482 */ 483 484 for (; SP <= toparg; SP++, svpv_end++) { 485 U32 utf8; 486 STRLEN len; 487 SV *sv; 488 489 assert(svpv_end - svpv_buf < PERL_MULTICONCAT_MAXARG); 490 491 sv = *SP; 492 493 /* this if/else chain is arranged so that common/simple cases 494 * take few conditionals */ 495 496 if (LIKELY((SvFLAGS(sv) & (SVs_GMG|SVf_ROK|SVf_POK)) == SVf_POK)) { 497 /* common case: sv is a simple non-magical PV */ 498 if (targ == sv) { 499 /* targ appears on RHS. 500 * Delay storing PV pointer; instead, add slot to targ_chain 501 * so it can be populated later, after targ has been grown and 502 * we know its final SvPVX() address. 503 */ 504 targ_on_rhs: 505 svpv_end->len = 0; /* zerojng here means we can skip 506 updating later if targ_len == 0 */ 507 svpv_end->pv = (char*)targ_chain; 508 targ_chain = svpv_end; 509 targ_count++; 510 continue; 511 } 512 513 len = SvCUR(sv); 514 svpv_end->pv = SvPVX(sv); 515 } 516 else if (UNLIKELY(SvFLAGS(sv) & (SVs_GMG|SVf_ROK))) 517 /* may have side effects: tie, overload etc. 518 * Abandon 'stringify everything first' and handle 519 * args in strict order. Note that already-stringified args 520 * will be reprocessed, which is safe because the each first 521 * stringification would have been idempotent. 522 */ 523 goto do_magical; 524 else if (SvNIOK(sv)) { 525 if (targ == sv) 526 goto targ_on_rhs; 527 /* stringify general valid scalar */ 528 svpv_end->pv = sv_2pv_flags(sv, &len, 0); 529 } 530 else if (!SvOK(sv)) { 531 if (ckWARN(WARN_UNINITIALIZED)) 532 /* an undef value in the presence of warnings may trigger 533 * side affects */ 534 goto do_magical; 535 svpv_end->pv = (char*)""; 536 len = 0; 537 } 538 else 539 goto do_magical; /* something weird */ 540 541 utf8 = (SvFLAGS(sv) & SVf_UTF8); 542 dst_utf8 |= utf8; 543 ASSUME(len < SSize_t_MAX); 544 svpv_end->len = utf8 ? -(SSize_t)len : (SSize_t)len; 545 grow += len; 546 } 547 548 /* -------------------------------------------------------------- 549 * Phase 2: 550 * 551 * Stringify targ: 552 * 553 * if targ appears on the RHS or is appended to, force stringify it; 554 * otherwise set it to "". Then set targ_len. 555 */ 556 557 if (is_append) { 558 /* abandon quick route if using targ might have side effects */ 559 if (UNLIKELY(SvFLAGS(targ) & (SVs_GMG|SVf_ROK))) 560 goto do_magical; 561 562 if (SvOK(targ)) { 563 U32 targ_utf8; 564 stringify_targ: 565 SvPV_force_nomg_nolen(targ); 566 targ_utf8 = SvFLAGS(targ) & SVf_UTF8; 567 if (UNLIKELY(dst_utf8 & ~targ_utf8)) { 568 if (LIKELY(!IN_BYTES)) 569 sv_utf8_upgrade_nomg(targ); 570 } 571 else 572 dst_utf8 |= targ_utf8; 573 574 targ_len = SvCUR(targ); 575 grow += targ_len * (targ_count + is_append); 576 goto phase3; 577 } 578 else if (ckWARN(WARN_UNINITIALIZED)) 579 /* warning might have side effects */ 580 goto do_magical; 581 /* the undef targ will be silently SvPVCLEAR()ed below */ 582 } 583 else if (UNLIKELY(SvTYPE(targ) >= SVt_REGEXP)) { 584 /* Assigning to some weird LHS type. Don't force the LHS to be an 585 * empty string; instead, do things 'long hand' by using the 586 * overload code path, which concats to a TEMP sv and does 587 * sv_catsv() calls rather than COPY()s. This ensures that even 588 * bizarre code like this doesn't break or crash: 589 * *F = *F . *F. 590 * (which makes the 'F' typeglob an alias to the 591 * '*main::F*main::F' typeglob). 592 */ 593 goto do_magical; 594 } 595 else if (targ_chain) 596 /* targ was found on RHS. 597 * Force stringify it, using the same code as the append branch 598 * above, except that we don't need the magic/overload/undef 599 * checks as these will already have been done in the phase 1 600 * loop. 601 */ 602 goto stringify_targ; 603 604 /* unrolled SvPVCLEAR() - mostly: no need to grow or set SvCUR() to 0; 605 * those will be done later. */ 606 SV_CHECK_THINKFIRST_COW_DROP(targ); 607 SvUPGRADE(targ, SVt_PV); 608 SvFLAGS(targ) &= ~(SVf_OK|SVf_IVisUV|SVf_UTF8); 609 SvFLAGS(targ) |= (SVf_POK|SVp_POK|dst_utf8); 610 611 phase3: 612 613 /* -------------------------------------------------------------- 614 * Phase 3: 615 * 616 * UTF-8 tweaks and grow targ: 617 * 618 * Now that we know the length and utf8-ness of both the targ and 619 * args, grow targ to the size needed to accumulate all the args, based 620 * on whether targ appears on the RHS, whether we're appending, and 621 * whether any non-utf8 args expand in size if converted to utf8. 622 * 623 * For the latter, if dst_utf8 we scan non-utf8 args looking for 624 * variant chars, and adjust the svpv->len value of those args to the 625 * utf8 size and negate it to flag them. At the same time we un-negate 626 * the lens of any utf8 args since after this phase we no longer care 627 * whether an arg is utf8 or not. 628 * 629 * Finally, initialise const_lens and const_pv based on utf8ness. 630 * Note that there are 3 permutations: 631 * 632 * * If the constant string is invariant whether utf8 or not (e.g. "abc"), 633 * then aux[PERL_MULTICONCAT_IX_PLAIN_PV/LEN] are the same as 634 * aux[PERL_MULTICONCAT_IX_UTF8_PV/LEN] and there is one set of 635 * segment lengths. 636 * 637 * * If the string is fully utf8, e.g. "\x{100}", then 638 * aux[PERL_MULTICONCAT_IX_PLAIN_PV/LEN] == (NULL,0) and there is 639 * one set of segment lengths. 640 * 641 * * If the string has different plain and utf8 representations 642 * (e.g. "\x80"), then then aux[PERL_MULTICONCAT_IX_PLAIN_PV/LEN]] 643 * holds the plain rep, while aux[PERL_MULTICONCAT_IX_UTF8_PV/LEN] 644 * holds the utf8 rep, and there are 2 sets of segment lengths, 645 * with the utf8 set following after the plain set. 646 * 647 * On entry to this section the (pv,len) pairs in svpv_buf have the 648 * following meanings: 649 * (pv, len) a plain string 650 * (pv, -len) a utf8 string 651 * (NULL, 0) left-most targ \ linked together R-to-L 652 * (next, 0) other targ / in targ_chain 653 */ 654 655 /* turn off utf8 handling if 'use bytes' is in scope */ 656 if (UNLIKELY(dst_utf8 && IN_BYTES)) { 657 dst_utf8 = 0; 658 SvUTF8_off(targ); 659 /* undo all the negative lengths which flag utf8-ness */ 660 for (svpv_p = svpv_buf; svpv_p < svpv_end; svpv_p++) { 661 SSize_t len = svpv_p->len; 662 if (len < 0) 663 svpv_p->len = -len; 664 } 665 } 666 667 /* grow += total of lengths of constant string segments */ 668 { 669 SSize_t len; 670 len = aux[dst_utf8 ? PERL_MULTICONCAT_IX_UTF8_LEN 671 : PERL_MULTICONCAT_IX_PLAIN_LEN].ssize; 672 slow_concat = cBOOL(len); 673 grow += len; 674 } 675 676 const_lens = aux + PERL_MULTICONCAT_IX_LENGTHS; 677 678 if (dst_utf8) { 679 const_pv = aux[PERL_MULTICONCAT_IX_UTF8_PV].pv; 680 if ( aux[PERL_MULTICONCAT_IX_PLAIN_PV].pv 681 && const_pv != aux[PERL_MULTICONCAT_IX_PLAIN_PV].pv) 682 /* separate sets of lengths for plain and utf8 */ 683 const_lens += nargs + 1; 684 685 /* If the result is utf8 but some of the args aren't, 686 * calculate how much extra growth is needed for all the chars 687 * which will expand to two utf8 bytes. 688 * Also, if the growth is non-zero, negate the length to indicate 689 * that this this is a variant string. Conversely, un-negate the 690 * length on utf8 args (which was only needed to flag non-utf8 691 * args in this loop */ 692 for (svpv_p = svpv_buf; svpv_p < svpv_end; svpv_p++) { 693 SSize_t len, extra; 694 695 len = svpv_p->len; 696 if (len <= 0) { 697 svpv_p->len = -len; 698 continue; 699 } 700 701 extra = variant_under_utf8_count((U8 *) svpv_p->pv, 702 (U8 *) svpv_p->pv + len); 703 if (UNLIKELY(extra)) { 704 grow += extra; 705 /* -ve len indicates special handling */ 706 svpv_p->len = -(len + extra); 707 slow_concat = TRUE; 708 } 709 } 710 } 711 else 712 const_pv = aux[PERL_MULTICONCAT_IX_PLAIN_PV].pv; 713 714 /* unrolled SvGROW(), except don't check for SVf_IsCOW, which should 715 * already have been dropped */ 716 assert(!SvIsCOW(targ)); 717 targ_pv = (SvLEN(targ) < (grow) ? sv_grow(targ,grow) : SvPVX(targ)); 718 719 720 /* -------------------------------------------------------------- 721 * Phase 4: 722 * 723 * Now that targ has been grown, we know the final address of the targ 724 * PVX, if needed. Preserve / move targ contents if appending or if 725 * targ appears on RHS. 726 * 727 * Also update svpv_buf slots in targ_chain. 728 * 729 * Don't bother with any of this if the target length is zero: 730 * targ_len is set to zero unless we're appending or targ appears on 731 * RHS. And even if it is, we can optimise by skipping this chunk of 732 * code for zero targ_len. In the latter case, we don't need to update 733 * the slots in targ_chain with the (zero length) target string, since 734 * we set the len in such slots to 0 earlier, and since the Copy() is 735 * skipped on zero length, it doesn't matter what svpv_p->pv contains. 736 * 737 * On entry to this section the (pv,len) pairs in svpv_buf have the 738 * following meanings: 739 * (pv, len) a pure-plain or utf8 string 740 * (pv, -(len+extra)) a plain string which will expand by 'extra' 741 * bytes when converted to utf8 742 * (NULL, 0) left-most targ \ linked together R-to-L 743 * (next, 0) other targ / in targ_chain 744 * 745 * On exit, the targ contents will have been moved to the 746 * earliest place they are needed (e.g. $x = "abc$x" will shift them 747 * 3 bytes, while $x .= ... will leave them at the beginning); 748 * and dst_pv will point to the location within SvPVX(targ) where the 749 * next arg should be copied. 750 */ 751 752 svpv_base = svpv_buf; 753 754 if (targ_len) { 755 struct multiconcat_svpv *tc_stop; 756 char *targ_buf = targ_pv; /* ptr to original targ string */ 757 758 assert(is_append || targ_count); 759 760 if (is_append) { 761 targ_pv += targ_len; 762 tc_stop = NULL; 763 } 764 else { 765 /* The targ appears on RHS, e.g. '$t = $a . $t . $t'. 766 * Move the current contents of targ to the first 767 * position where it's needed, and use that as the src buffer 768 * for any further uses (such as the second RHS $t above). 769 * In calculating the first position, we need to sum the 770 * lengths of all consts and args before that. 771 */ 772 773 UNOP_AUX_item *lens = const_lens; 774 /* length of first const string segment */ 775 STRLEN offset = lens->ssize > 0 ? lens->ssize : 0; 776 777 assert(targ_chain); 778 svpv_p = svpv_base; 779 780 for (;;) { 781 SSize_t len; 782 if (!svpv_p->pv) 783 break; /* the first targ argument */ 784 /* add lengths of the next arg and const string segment */ 785 len = svpv_p->len; 786 if (len < 0) /* variant args have this */ 787 len = -len; 788 offset += (STRLEN)len; 789 len = (++lens)->ssize; 790 offset += (len >= 0) ? (STRLEN)len : 0; 791 if (!offset) { 792 /* all args and consts so far are empty; update 793 * the start position for the concat later */ 794 svpv_base++; 795 const_lens++; 796 } 797 svpv_p++; 798 assert(svpv_p < svpv_end); 799 } 800 801 if (offset) { 802 targ_buf += offset; 803 Move(targ_pv, targ_buf, targ_len, char); 804 /* a negative length implies don't Copy(), but do increment */ 805 svpv_p->len = -((SSize_t)targ_len); 806 slow_concat = TRUE; 807 } 808 else { 809 /* skip the first targ copy */ 810 svpv_base++; 811 const_lens++; 812 targ_pv += targ_len; 813 } 814 815 /* Don't populate the first targ slot in the loop below; it's 816 * either not used because we advanced svpv_base beyond it, or 817 * we already stored the special -targ_len value in it 818 */ 819 tc_stop = svpv_p; 820 } 821 822 /* populate slots in svpv_buf representing targ on RHS */ 823 while (targ_chain != tc_stop) { 824 struct multiconcat_svpv *p = targ_chain; 825 targ_chain = (struct multiconcat_svpv *)(p->pv); 826 p->pv = targ_buf; 827 p->len = (SSize_t)targ_len; 828 } 829 } 830 831 832 /* -------------------------------------------------------------- 833 * Phase 5: 834 * 835 * Append all the args in svpv_buf, plus the const strings, to targ. 836 * 837 * On entry to this section the (pv,len) pairs in svpv_buf have the 838 * following meanings: 839 * (pv, len) a pure-plain or utf8 string (which may be targ) 840 * (pv, -(len+extra)) a plain string which will expand by 'extra' 841 * bytes when converted to utf8 842 * (0, -len) left-most targ, whose content has already 843 * been copied. Just advance targ_pv by len. 844 */ 845 846 /* If there are no constant strings and no special case args 847 * (svpv_p->len < 0), use a simpler, more efficient concat loop 848 */ 849 if (!slow_concat) { 850 for (svpv_p = svpv_base; svpv_p < svpv_end; svpv_p++) { 851 SSize_t len = svpv_p->len; 852 if (!len) 853 continue; 854 Copy(svpv_p->pv, targ_pv, len, char); 855 targ_pv += len; 856 } 857 const_lens += (svpv_end - svpv_base + 1); 858 } 859 else { 860 /* Note that we iterate the loop nargs+1 times: to append nargs 861 * arguments and nargs+1 constant strings. For example, "-$a-$b-" 862 */ 863 svpv_p = svpv_base - 1; 864 865 for (;;) { 866 SSize_t len = (const_lens++)->ssize; 867 868 /* append next const string segment */ 869 if (len > 0) { 870 Copy(const_pv, targ_pv, len, char); 871 targ_pv += len; 872 const_pv += len; 873 } 874 875 if (++svpv_p == svpv_end) 876 break; 877 878 /* append next arg */ 879 len = svpv_p->len; 880 881 if (LIKELY(len > 0)) { 882 Copy(svpv_p->pv, targ_pv, len, char); 883 targ_pv += len; 884 } 885 else if (UNLIKELY(len < 0)) { 886 /* negative length indicates two special cases */ 887 const char *p = svpv_p->pv; 888 len = -len; 889 if (UNLIKELY(p)) { 890 /* copy plain-but-variant pv to a utf8 targ */ 891 char * end_pv = targ_pv + len; 892 assert(dst_utf8); 893 while (targ_pv < end_pv) { 894 U8 c = (U8) *p++; 895 append_utf8_from_native_byte(c, (U8**)&targ_pv); 896 } 897 } 898 else 899 /* arg is already-copied targ */ 900 targ_pv += len; 901 } 902 903 } 904 } 905 906 *targ_pv = '\0'; 907 SvCUR_set(targ, targ_pv - SvPVX(targ)); 908 assert(grow >= SvCUR(targ) + 1); 909 assert(SvLEN(targ) >= SvCUR(targ) + 1); 910 911 /* -------------------------------------------------------------- 912 * Phase 6: 913 * 914 * return result 915 */ 916 917 SP -= stack_adj; 918 SvTAINT(targ); 919 SETTARG; 920 RETURN; 921 922 /* -------------------------------------------------------------- 923 * Phase 7: 924 * 925 * We only get here if any of the args (or targ too in the case of 926 * append) have something which might cause side effects, such 927 * as magic, overload, or an undef value in the presence of warnings. 928 * In that case, any earlier attempt to stringify the args will have 929 * been abandoned, and we come here instead. 930 * 931 * Here, we concat each arg in turn the old-fashioned way: essentially 932 * emulating pp_concat() in a loop. This means that all the weird edge 933 * cases will be handled correctly, if not necessarily speedily. 934 * 935 * Note that some args may already have been stringified - those are 936 * processed again, which is safe, since only args without side-effects 937 * were stringified earlier. 938 */ 939 940 do_magical: 941 { 942 SSize_t i, n; 943 SV *left = NULL; 944 SV *right; 945 SV* nexttarg; 946 bool nextappend; 947 U32 utf8 = 0; 948 SV **svp; 949 const char *cpv = aux[PERL_MULTICONCAT_IX_PLAIN_PV].pv; 950 UNOP_AUX_item *lens = aux + PERL_MULTICONCAT_IX_LENGTHS; 951 Size_t arg_count = 0; /* how many args have been processed */ 952 953 if (!cpv) { 954 cpv = aux[PERL_MULTICONCAT_IX_UTF8_PV].pv; 955 utf8 = SVf_UTF8; 956 } 957 958 svp = toparg - nargs + 1; 959 960 /* iterate for: 961 * nargs arguments, 962 * plus possible nargs+1 consts, 963 * plus, if appending, a final targ in an extra last iteration 964 */ 965 966 n = nargs *2 + 1; 967 for (i = 0; i <= n; i++) { 968 SSize_t len; 969 970 /* if necessary, stringify the final RHS result in 971 * something like $targ .= "$a$b$c" - simulating 972 * pp_stringify 973 */ 974 if ( i == n 975 && (PL_op->op_private &OPpMULTICONCAT_STRINGIFY) 976 && !(SvPOK(left)) 977 /* extra conditions for backwards compatibility: 978 * probably incorrect, but keep the existing behaviour 979 * for now. The rules are: 980 * $x = "$ov" single arg: stringify; 981 * $x = "$ov$y" multiple args: don't stringify, 982 * $lex = "$ov$y$z" except TARGMY with at least 2 concats 983 */ 984 && ( arg_count == 1 985 || ( arg_count >= 3 986 && !is_append 987 && (PL_op->op_private & OPpTARGET_MY) 988 && !(PL_op->op_private & OPpLVAL_INTRO) 989 ) 990 ) 991 ) 992 { 993 SV *tmp = sv_newmortal(); 994 sv_copypv(tmp, left); 995 SvSETMAGIC(tmp); 996 left = tmp; 997 } 998 999 /* do one extra iteration to handle $targ in $targ .= ... */ 1000 if (i == n && !is_append) 1001 break; 1002 1003 /* get the next arg SV or regen the next const SV */ 1004 len = lens[i >> 1].ssize; 1005 if (i == n) { 1006 /* handle the final targ .= (....) */ 1007 right = left; 1008 left = targ; 1009 } 1010 else if (i & 1) 1011 right = svp[(i >> 1)]; 1012 else if (len < 0) 1013 continue; /* no const in this position */ 1014 else { 1015 right = newSVpvn_flags(cpv, len, (utf8 | SVs_TEMP)); 1016 cpv += len; 1017 } 1018 1019 arg_count++; 1020 1021 if (arg_count <= 1) { 1022 left = right; 1023 continue; /* need at least two SVs to concat together */ 1024 } 1025 1026 if (arg_count == 2 && i < n) { 1027 /* for the first concat, create a mortal acting like the 1028 * padtmp from OP_CONST. In later iterations this will 1029 * be appended to */ 1030 nexttarg = sv_newmortal(); 1031 nextappend = FALSE; 1032 } 1033 else { 1034 nexttarg = left; 1035 nextappend = TRUE; 1036 } 1037 1038 /* Handle possible overloading. 1039 * This is basically an unrolled 1040 * tryAMAGICbin_MG(concat_amg, AMGf_assign); 1041 * and 1042 * Perl_try_amagic_bin() 1043 * call, but using left and right rather than SP[-1], SP[0], 1044 * and not relying on OPf_STACKED implying .= 1045 */ 1046 1047 if ((SvFLAGS(left)|SvFLAGS(right)) & (SVf_ROK|SVs_GMG)) { 1048 SvGETMAGIC(left); 1049 if (left != right) 1050 SvGETMAGIC(right); 1051 1052 if ((SvAMAGIC(left) || SvAMAGIC(right)) 1053 /* sprintf doesn't do concat overloading, 1054 * but allow for $x .= sprintf(...) 1055 */ 1056 && ( !(PL_op->op_private & OPpMULTICONCAT_FAKE) 1057 || i == n) 1058 ) 1059 { 1060 SV * const tmpsv = amagic_call(left, right, concat_amg, 1061 (nextappend ? AMGf_assign: 0)); 1062 if (tmpsv) { 1063 /* NB: tryAMAGICbin_MG() includes an SvPADMY test 1064 * here, which isn;t needed as any implicit 1065 * assign does under OPpTARGET_MY is done after 1066 * this loop */ 1067 if (nextappend) { 1068 sv_setsv(left, tmpsv); 1069 SvSETMAGIC(left); 1070 } 1071 else 1072 left = tmpsv; 1073 continue; 1074 } 1075 } 1076 1077 /* if both args are the same magical value, make one a copy */ 1078 if (left == right && SvGMAGICAL(left)) { 1079 left = sv_newmortal(); 1080 /* Print the uninitialized warning now, so it includes the 1081 * variable name. */ 1082 if (!SvOK(right)) { 1083 if (ckWARN(WARN_UNINITIALIZED)) 1084 report_uninit(right); 1085 sv_setsv_flags(left, &PL_sv_no, 0); 1086 } 1087 else 1088 sv_setsv_flags(left, right, 0); 1089 SvGETMAGIC(right); 1090 } 1091 } 1092 1093 /* nexttarg = left . right */ 1094 S_do_concat(aTHX_ left, right, nexttarg, 0); 1095 left = nexttarg; 1096 } 1097 1098 SP = toparg - stack_adj + 1; 1099 1100 /* Assign result of all RHS concats (left) to LHS (targ). 1101 * If we are appending, targ will already have been appended to in 1102 * the loop */ 1103 if (is_append) 1104 SvTAINT(targ); 1105 else { 1106 sv_setsv(targ, left); 1107 SvSETMAGIC(targ); 1108 } 1109 SETs(targ); 1110 RETURN; 1111 } 1112 } 1113 1114 1115 /* push the elements of av onto the stack. 1116 * Returns PL_op->op_next to allow tail-call optimisation of its callers */ 1117 1118 STATIC OP* 1119 S_pushav(pTHX_ AV* const av) 1120 { 1121 dSP; 1122 const SSize_t maxarg = AvFILL(av) + 1; 1123 EXTEND(SP, maxarg); 1124 if (UNLIKELY(SvRMAGICAL(av))) { 1125 PADOFFSET i; 1126 for (i=0; i < (PADOFFSET)maxarg; i++) { 1127 SV ** const svp = av_fetch(av, i, FALSE); 1128 SP[i+1] = LIKELY(svp) 1129 ? *svp 1130 : UNLIKELY(PL_op->op_flags & OPf_MOD) 1131 ? av_nonelem(av,i) 1132 : &PL_sv_undef; 1133 } 1134 } 1135 else { 1136 PADOFFSET i; 1137 for (i=0; i < (PADOFFSET)maxarg; i++) { 1138 SV *sv = AvARRAY(av)[i]; 1139 SP[i+1] = LIKELY(sv) 1140 ? sv 1141 : UNLIKELY(PL_op->op_flags & OPf_MOD) 1142 ? av_nonelem(av,i) 1143 : &PL_sv_undef; 1144 } 1145 } 1146 SP += maxarg; 1147 PUTBACK; 1148 return NORMAL; 1149 } 1150 1151 1152 /* ($lex1,@lex2,...) or my ($lex1,@lex2,...) */ 1153 1154 PP(pp_padrange) 1155 { 1156 dSP; 1157 PADOFFSET base = PL_op->op_targ; 1158 int count = (int)(PL_op->op_private) & OPpPADRANGE_COUNTMASK; 1159 if (PL_op->op_flags & OPf_SPECIAL) { 1160 /* fake the RHS of my ($x,$y,..) = @_ */ 1161 PUSHMARK(SP); 1162 (void)S_pushav(aTHX_ GvAVn(PL_defgv)); 1163 SPAGAIN; 1164 } 1165 1166 /* note, this is only skipped for compile-time-known void cxt */ 1167 if ((PL_op->op_flags & OPf_WANT) != OPf_WANT_VOID) { 1168 int i; 1169 1170 EXTEND(SP, count); 1171 PUSHMARK(SP); 1172 for (i = 0; i <count; i++) 1173 *++SP = PAD_SV(base+i); 1174 } 1175 if (PL_op->op_private & OPpLVAL_INTRO) { 1176 SV **svp = &(PAD_SVl(base)); 1177 const UV payload = (UV)( 1178 (base << (OPpPADRANGE_COUNTSHIFT + SAVE_TIGHT_SHIFT)) 1179 | (count << SAVE_TIGHT_SHIFT) 1180 | SAVEt_CLEARPADRANGE); 1181 int i; 1182 1183 STATIC_ASSERT_STMT(OPpPADRANGE_COUNTMASK + 1 == (1 << OPpPADRANGE_COUNTSHIFT)); 1184 assert((payload >> (OPpPADRANGE_COUNTSHIFT+SAVE_TIGHT_SHIFT)) 1185 == (Size_t)base); 1186 { 1187 dSS_ADD; 1188 SS_ADD_UV(payload); 1189 SS_ADD_END(1); 1190 } 1191 1192 for (i = 0; i <count; i++) 1193 SvPADSTALE_off(*svp++); /* mark lexical as active */ 1194 } 1195 RETURN; 1196 } 1197 1198 1199 PP(pp_padsv) 1200 { 1201 dSP; 1202 EXTEND(SP, 1); 1203 { 1204 OP * const op = PL_op; 1205 /* access PL_curpad once */ 1206 SV ** const padentry = &(PAD_SVl(op->op_targ)); 1207 { 1208 dTARG; 1209 TARG = *padentry; 1210 PUSHs(TARG); 1211 PUTBACK; /* no pop/push after this, TOPs ok */ 1212 } 1213 if (op->op_flags & OPf_MOD) { 1214 if (op->op_private & OPpLVAL_INTRO) 1215 if (!(op->op_private & OPpPAD_STATE)) 1216 save_clearsv(padentry); 1217 if (op->op_private & OPpDEREF) { 1218 /* TOPs is equivalent to TARG here. Using TOPs (SP) rather 1219 than TARG reduces the scope of TARG, so it does not 1220 span the call to save_clearsv, resulting in smaller 1221 machine code. */ 1222 TOPs = vivify_ref(TOPs, op->op_private & OPpDEREF); 1223 } 1224 } 1225 return op->op_next; 1226 } 1227 } 1228 1229 PP(pp_readline) 1230 { 1231 dSP; 1232 /* pp_coreargs pushes a NULL to indicate no args passed to 1233 * CORE::readline() */ 1234 if (TOPs) { 1235 SvGETMAGIC(TOPs); 1236 tryAMAGICunTARGETlist(iter_amg, 0); 1237 PL_last_in_gv = MUTABLE_GV(*PL_stack_sp--); 1238 } 1239 else PL_last_in_gv = PL_argvgv, PL_stack_sp--; 1240 if (!isGV_with_GP(PL_last_in_gv)) { 1241 if (SvROK(PL_last_in_gv) && isGV_with_GP(SvRV(PL_last_in_gv))) 1242 PL_last_in_gv = MUTABLE_GV(SvRV(PL_last_in_gv)); 1243 else { 1244 dSP; 1245 XPUSHs(MUTABLE_SV(PL_last_in_gv)); 1246 PUTBACK; 1247 Perl_pp_rv2gv(aTHX); 1248 PL_last_in_gv = MUTABLE_GV(*PL_stack_sp--); 1249 assert((SV*)PL_last_in_gv == &PL_sv_undef || isGV_with_GP(PL_last_in_gv)); 1250 } 1251 } 1252 return do_readline(); 1253 } 1254 1255 PP(pp_eq) 1256 { 1257 dSP; 1258 SV *left, *right; 1259 1260 tryAMAGICbin_MG(eq_amg, AMGf_set|AMGf_numeric); 1261 right = POPs; 1262 left = TOPs; 1263 SETs(boolSV( 1264 (SvIOK_notUV(left) && SvIOK_notUV(right)) 1265 ? (SvIVX(left) == SvIVX(right)) 1266 : ( do_ncmp(left, right) == 0) 1267 )); 1268 RETURN; 1269 } 1270 1271 1272 /* also used for: pp_i_preinc() */ 1273 1274 PP(pp_preinc) 1275 { 1276 SV *sv = *PL_stack_sp; 1277 1278 if (LIKELY(((sv->sv_flags & 1279 (SVf_THINKFIRST|SVs_GMG|SVf_IVisUV| 1280 SVf_IOK|SVf_NOK|SVf_POK|SVp_NOK|SVp_POK|SVf_ROK)) 1281 == SVf_IOK)) 1282 && SvIVX(sv) != IV_MAX) 1283 { 1284 SvIV_set(sv, SvIVX(sv) + 1); 1285 } 1286 else /* Do all the PERL_PRESERVE_IVUV and hard cases in sv_inc */ 1287 sv_inc(sv); 1288 SvSETMAGIC(sv); 1289 return NORMAL; 1290 } 1291 1292 1293 /* also used for: pp_i_predec() */ 1294 1295 PP(pp_predec) 1296 { 1297 SV *sv = *PL_stack_sp; 1298 1299 if (LIKELY(((sv->sv_flags & 1300 (SVf_THINKFIRST|SVs_GMG|SVf_IVisUV| 1301 SVf_IOK|SVf_NOK|SVf_POK|SVp_NOK|SVp_POK|SVf_ROK)) 1302 == SVf_IOK)) 1303 && SvIVX(sv) != IV_MIN) 1304 { 1305 SvIV_set(sv, SvIVX(sv) - 1); 1306 } 1307 else /* Do all the PERL_PRESERVE_IVUV and hard cases in sv_dec */ 1308 sv_dec(sv); 1309 SvSETMAGIC(sv); 1310 return NORMAL; 1311 } 1312 1313 1314 /* also used for: pp_orassign() */ 1315 1316 PP(pp_or) 1317 { 1318 dSP; 1319 SV *sv; 1320 PERL_ASYNC_CHECK(); 1321 sv = TOPs; 1322 if (SvTRUE_NN(sv)) 1323 RETURN; 1324 else { 1325 if (PL_op->op_type == OP_OR) 1326 --SP; 1327 RETURNOP(cLOGOP->op_other); 1328 } 1329 } 1330 1331 1332 /* also used for: pp_dor() pp_dorassign() */ 1333 1334 PP(pp_defined) 1335 { 1336 dSP; 1337 SV* sv; 1338 bool defined; 1339 const int op_type = PL_op->op_type; 1340 const bool is_dor = (op_type == OP_DOR || op_type == OP_DORASSIGN); 1341 1342 if (is_dor) { 1343 PERL_ASYNC_CHECK(); 1344 sv = TOPs; 1345 if (UNLIKELY(!sv || !SvANY(sv))) { 1346 if (op_type == OP_DOR) 1347 --SP; 1348 RETURNOP(cLOGOP->op_other); 1349 } 1350 } 1351 else { 1352 /* OP_DEFINED */ 1353 sv = POPs; 1354 if (UNLIKELY(!sv || !SvANY(sv))) 1355 RETPUSHNO; 1356 } 1357 1358 defined = FALSE; 1359 switch (SvTYPE(sv)) { 1360 case SVt_PVAV: 1361 if (AvMAX(sv) >= 0 || SvGMAGICAL(sv) || (SvRMAGICAL(sv) && mg_find(sv, PERL_MAGIC_tied))) 1362 defined = TRUE; 1363 break; 1364 case SVt_PVHV: 1365 if (HvARRAY(sv) || SvGMAGICAL(sv) || (SvRMAGICAL(sv) && mg_find(sv, PERL_MAGIC_tied))) 1366 defined = TRUE; 1367 break; 1368 case SVt_PVCV: 1369 if (CvROOT(sv) || CvXSUB(sv)) 1370 defined = TRUE; 1371 break; 1372 default: 1373 SvGETMAGIC(sv); 1374 if (SvOK(sv)) 1375 defined = TRUE; 1376 break; 1377 } 1378 1379 if (is_dor) { 1380 if(defined) 1381 RETURN; 1382 if(op_type == OP_DOR) 1383 --SP; 1384 RETURNOP(cLOGOP->op_other); 1385 } 1386 /* assuming OP_DEFINED */ 1387 if(defined) 1388 RETPUSHYES; 1389 RETPUSHNO; 1390 } 1391 1392 1393 1394 PP(pp_add) 1395 { 1396 dSP; dATARGET; bool useleft; SV *svl, *svr; 1397 1398 tryAMAGICbin_MG(add_amg, AMGf_assign|AMGf_numeric); 1399 svr = TOPs; 1400 svl = TOPm1s; 1401 1402 #ifdef PERL_PRESERVE_IVUV 1403 1404 /* special-case some simple common cases */ 1405 if (!((svl->sv_flags|svr->sv_flags) & (SVf_IVisUV|SVs_GMG))) { 1406 IV il, ir; 1407 U32 flags = (svl->sv_flags & svr->sv_flags); 1408 if (flags & SVf_IOK) { 1409 /* both args are simple IVs */ 1410 UV topl, topr; 1411 il = SvIVX(svl); 1412 ir = SvIVX(svr); 1413 do_iv: 1414 topl = ((UV)il) >> (UVSIZE * 8 - 2); 1415 topr = ((UV)ir) >> (UVSIZE * 8 - 2); 1416 1417 /* if both are in a range that can't under/overflow, do a 1418 * simple integer add: if the top of both numbers 1419 * are 00 or 11, then it's safe */ 1420 if (!( ((topl+1) | (topr+1)) & 2)) { 1421 SP--; 1422 TARGi(il + ir, 0); /* args not GMG, so can't be tainted */ 1423 SETs(TARG); 1424 RETURN; 1425 } 1426 goto generic; 1427 } 1428 else if (flags & SVf_NOK) { 1429 /* both args are NVs */ 1430 NV nl = SvNVX(svl); 1431 NV nr = SvNVX(svr); 1432 1433 if ( 1434 #if defined(NAN_COMPARE_BROKEN) && defined(Perl_isnan) 1435 !Perl_isnan(nl) && nl == (NV)(il = (IV)nl) 1436 && !Perl_isnan(nr) && nr == (NV)(ir = (IV)nr) 1437 #else 1438 nl == (NV)(il = (IV)nl) && nr == (NV)(ir = (IV)nr) 1439 #endif 1440 ) 1441 /* nothing was lost by converting to IVs */ 1442 goto do_iv; 1443 SP--; 1444 TARGn(nl + nr, 0); /* args not GMG, so can't be tainted */ 1445 SETs(TARG); 1446 RETURN; 1447 } 1448 } 1449 1450 generic: 1451 1452 useleft = USE_LEFT(svl); 1453 /* We must see if we can perform the addition with integers if possible, 1454 as the integer code detects overflow while the NV code doesn't. 1455 If either argument hasn't had a numeric conversion yet attempt to get 1456 the IV. It's important to do this now, rather than just assuming that 1457 it's not IOK as a PV of "9223372036854775806" may not take well to NV 1458 addition, and an SV which is NOK, NV=6.0 ought to be coerced to 1459 integer in case the second argument is IV=9223372036854775806 1460 We can (now) rely on sv_2iv to do the right thing, only setting the 1461 public IOK flag if the value in the NV (or PV) slot is truly integer. 1462 1463 A side effect is that this also aggressively prefers integer maths over 1464 fp maths for integer values. 1465 1466 How to detect overflow? 1467 1468 C 99 section 6.2.6.1 says 1469 1470 The range of nonnegative values of a signed integer type is a subrange 1471 of the corresponding unsigned integer type, and the representation of 1472 the same value in each type is the same. A computation involving 1473 unsigned operands can never overflow, because a result that cannot be 1474 represented by the resulting unsigned integer type is reduced modulo 1475 the number that is one greater than the largest value that can be 1476 represented by the resulting type. 1477 1478 (the 9th paragraph) 1479 1480 which I read as "unsigned ints wrap." 1481 1482 signed integer overflow seems to be classed as "exception condition" 1483 1484 If an exceptional condition occurs during the evaluation of an 1485 expression (that is, if the result is not mathematically defined or not 1486 in the range of representable values for its type), the behavior is 1487 undefined. 1488 1489 (6.5, the 5th paragraph) 1490 1491 I had assumed that on 2s complement machines signed arithmetic would 1492 wrap, hence coded pp_add and pp_subtract on the assumption that 1493 everything perl builds on would be happy. After much wailing and 1494 gnashing of teeth it would seem that irix64 knows its ANSI spec well, 1495 knows that it doesn't need to, and doesn't. Bah. Anyway, the all- 1496 unsigned code below is actually shorter than the old code. :-) 1497 */ 1498 1499 if (SvIV_please_nomg(svr)) { 1500 /* Unless the left argument is integer in range we are going to have to 1501 use NV maths. Hence only attempt to coerce the right argument if 1502 we know the left is integer. */ 1503 UV auv = 0; 1504 bool auvok = FALSE; 1505 bool a_valid = 0; 1506 1507 if (!useleft) { 1508 auv = 0; 1509 a_valid = auvok = 1; 1510 /* left operand is undef, treat as zero. + 0 is identity, 1511 Could SETi or SETu right now, but space optimise by not adding 1512 lots of code to speed up what is probably a rarish case. */ 1513 } else { 1514 /* Left operand is defined, so is it IV? */ 1515 if (SvIV_please_nomg(svl)) { 1516 if ((auvok = SvUOK(svl))) 1517 auv = SvUVX(svl); 1518 else { 1519 const IV aiv = SvIVX(svl); 1520 if (aiv >= 0) { 1521 auv = aiv; 1522 auvok = 1; /* Now acting as a sign flag. */ 1523 } else { 1524 auv = (aiv == IV_MIN) ? (UV)aiv : (UV)(-aiv); 1525 } 1526 } 1527 a_valid = 1; 1528 } 1529 } 1530 if (a_valid) { 1531 bool result_good = 0; 1532 UV result; 1533 UV buv; 1534 bool buvok = SvUOK(svr); 1535 1536 if (buvok) 1537 buv = SvUVX(svr); 1538 else { 1539 const IV biv = SvIVX(svr); 1540 if (biv >= 0) { 1541 buv = biv; 1542 buvok = 1; 1543 } else 1544 buv = (biv == IV_MIN) ? (UV)biv : (UV)(-biv); 1545 } 1546 /* ?uvok if value is >= 0. basically, flagged as UV if it's +ve, 1547 else "IV" now, independent of how it came in. 1548 if a, b represents positive, A, B negative, a maps to -A etc 1549 a + b => (a + b) 1550 A + b => -(a - b) 1551 a + B => (a - b) 1552 A + B => -(a + b) 1553 all UV maths. negate result if A negative. 1554 add if signs same, subtract if signs differ. */ 1555 1556 if (auvok ^ buvok) { 1557 /* Signs differ. */ 1558 if (auv >= buv) { 1559 result = auv - buv; 1560 /* Must get smaller */ 1561 if (result <= auv) 1562 result_good = 1; 1563 } else { 1564 result = buv - auv; 1565 if (result <= buv) { 1566 /* result really should be -(auv-buv). as its negation 1567 of true value, need to swap our result flag */ 1568 auvok = !auvok; 1569 result_good = 1; 1570 } 1571 } 1572 } else { 1573 /* Signs same */ 1574 result = auv + buv; 1575 if (result >= auv) 1576 result_good = 1; 1577 } 1578 if (result_good) { 1579 SP--; 1580 if (auvok) 1581 SETu( result ); 1582 else { 1583 /* Negate result */ 1584 if (result <= (UV)IV_MIN) 1585 SETi(result == (UV)IV_MIN 1586 ? IV_MIN : -(IV)result); 1587 else { 1588 /* result valid, but out of range for IV. */ 1589 SETn( -(NV)result ); 1590 } 1591 } 1592 RETURN; 1593 } /* Overflow, drop through to NVs. */ 1594 } 1595 } 1596 1597 #else 1598 useleft = USE_LEFT(svl); 1599 #endif 1600 1601 { 1602 NV value = SvNV_nomg(svr); 1603 (void)POPs; 1604 if (!useleft) { 1605 /* left operand is undef, treat as zero. + 0.0 is identity. */ 1606 SETn(value); 1607 RETURN; 1608 } 1609 SETn( value + SvNV_nomg(svl) ); 1610 RETURN; 1611 } 1612 } 1613 1614 1615 /* also used for: pp_aelemfast_lex() */ 1616 1617 PP(pp_aelemfast) 1618 { 1619 dSP; 1620 AV * const av = PL_op->op_type == OP_AELEMFAST_LEX 1621 ? MUTABLE_AV(PAD_SV(PL_op->op_targ)) : GvAVn(cGVOP_gv); 1622 const U32 lval = PL_op->op_flags & OPf_MOD; 1623 const I8 key = (I8)PL_op->op_private; 1624 SV** svp; 1625 SV *sv; 1626 1627 assert(SvTYPE(av) == SVt_PVAV); 1628 1629 EXTEND(SP, 1); 1630 1631 /* inlined av_fetch() for simple cases ... */ 1632 if (!SvRMAGICAL(av) && key >= 0 && key <= AvFILLp(av)) { 1633 sv = AvARRAY(av)[key]; 1634 if (sv) { 1635 PUSHs(sv); 1636 RETURN; 1637 } 1638 } 1639 1640 /* ... else do it the hard way */ 1641 svp = av_fetch(av, key, lval); 1642 sv = (svp ? *svp : &PL_sv_undef); 1643 1644 if (UNLIKELY(!svp && lval)) 1645 DIE(aTHX_ PL_no_aelem, (int)key); 1646 1647 if (!lval && SvRMAGICAL(av) && SvGMAGICAL(sv)) /* see note in pp_helem() */ 1648 mg_get(sv); 1649 PUSHs(sv); 1650 RETURN; 1651 } 1652 1653 PP(pp_join) 1654 { 1655 dSP; dMARK; dTARGET; 1656 MARK++; 1657 do_join(TARG, *MARK, MARK, SP); 1658 SP = MARK; 1659 SETs(TARG); 1660 RETURN; 1661 } 1662 1663 /* Oversized hot code. */ 1664 1665 /* also used for: pp_say() */ 1666 1667 PP(pp_print) 1668 { 1669 dSP; dMARK; dORIGMARK; 1670 PerlIO *fp; 1671 MAGIC *mg; 1672 GV * const gv 1673 = (PL_op->op_flags & OPf_STACKED) ? MUTABLE_GV(*++MARK) : PL_defoutgv; 1674 IO *io = GvIO(gv); 1675 1676 if (io 1677 && (mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar))) 1678 { 1679 had_magic: 1680 if (MARK == ORIGMARK) { 1681 /* If using default handle then we need to make space to 1682 * pass object as 1st arg, so move other args up ... 1683 */ 1684 MEXTEND(SP, 1); 1685 ++MARK; 1686 Move(MARK, MARK + 1, (SP - MARK) + 1, SV*); 1687 ++SP; 1688 } 1689 return Perl_tied_method(aTHX_ SV_CONST(PRINT), mark - 1, MUTABLE_SV(io), 1690 mg, 1691 (G_SCALAR | TIED_METHOD_ARGUMENTS_ON_STACK 1692 | (PL_op->op_type == OP_SAY 1693 ? TIED_METHOD_SAY : 0)), sp - mark); 1694 } 1695 if (!io) { 1696 if ( gv && GvEGVx(gv) && (io = GvIO(GvEGV(gv))) 1697 && (mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar))) 1698 goto had_magic; 1699 report_evil_fh(gv); 1700 SETERRNO(EBADF,RMS_IFI); 1701 goto just_say_no; 1702 } 1703 else if (!(fp = IoOFP(io))) { 1704 if (IoIFP(io)) 1705 report_wrongway_fh(gv, '<'); 1706 else 1707 report_evil_fh(gv); 1708 SETERRNO(EBADF,IoIFP(io)?RMS_FAC:RMS_IFI); 1709 goto just_say_no; 1710 } 1711 else { 1712 SV * const ofs = GvSV(PL_ofsgv); /* $, */ 1713 MARK++; 1714 if (ofs && (SvGMAGICAL(ofs) || SvOK(ofs))) { 1715 while (MARK <= SP) { 1716 if (!do_print(*MARK, fp)) 1717 break; 1718 MARK++; 1719 if (MARK <= SP) { 1720 /* don't use 'ofs' here - it may be invalidated by magic callbacks */ 1721 if (!do_print(GvSV(PL_ofsgv), fp)) { 1722 MARK--; 1723 break; 1724 } 1725 } 1726 } 1727 } 1728 else { 1729 while (MARK <= SP) { 1730 if (!do_print(*MARK, fp)) 1731 break; 1732 MARK++; 1733 } 1734 } 1735 if (MARK <= SP) 1736 goto just_say_no; 1737 else { 1738 if (PL_op->op_type == OP_SAY) { 1739 if (PerlIO_write(fp, "\n", 1) == 0 || PerlIO_error(fp)) 1740 goto just_say_no; 1741 } 1742 else if (PL_ors_sv && SvOK(PL_ors_sv)) 1743 if (!do_print(PL_ors_sv, fp)) /* $\ */ 1744 goto just_say_no; 1745 1746 if (IoFLAGS(io) & IOf_FLUSH) 1747 if (PerlIO_flush(fp) == EOF) 1748 goto just_say_no; 1749 } 1750 } 1751 SP = ORIGMARK; 1752 XPUSHs(&PL_sv_yes); 1753 RETURN; 1754 1755 just_say_no: 1756 SP = ORIGMARK; 1757 XPUSHs(&PL_sv_undef); 1758 RETURN; 1759 } 1760 1761 1762 /* do the common parts of pp_padhv() and pp_rv2hv() 1763 * It assumes the caller has done EXTEND(SP, 1) or equivalent. 1764 * 'is_keys' indicates the OPpPADHV_ISKEYS/OPpRV2HV_ISKEYS flag is set. 1765 * 'has_targ' indicates that the op has a target - this should 1766 * be a compile-time constant so that the code can constant-folded as 1767 * appropriate 1768 * */ 1769 1770 PERL_STATIC_INLINE OP* 1771 S_padhv_rv2hv_common(pTHX_ HV *hv, U8 gimme, bool is_keys, bool has_targ) 1772 { 1773 bool is_tied; 1774 bool is_bool; 1775 MAGIC *mg; 1776 dSP; 1777 IV i; 1778 SV *sv; 1779 1780 assert(PL_op->op_type == OP_PADHV || PL_op->op_type == OP_RV2HV); 1781 1782 if (gimme == G_ARRAY) { 1783 hv_pushkv(hv, 3); 1784 return NORMAL; 1785 } 1786 1787 if (is_keys) 1788 /* 'keys %h' masquerading as '%h': reset iterator */ 1789 (void)hv_iterinit(hv); 1790 1791 if (gimme == G_VOID) 1792 return NORMAL; 1793 1794 is_bool = ( PL_op->op_private & OPpTRUEBOOL 1795 || ( PL_op->op_private & OPpMAYBE_TRUEBOOL 1796 && block_gimme() == G_VOID)); 1797 is_tied = SvRMAGICAL(hv) && (mg = mg_find(MUTABLE_SV(hv), PERL_MAGIC_tied)); 1798 1799 if (UNLIKELY(is_tied)) { 1800 if (is_keys && !is_bool) { 1801 i = 0; 1802 while (hv_iternext(hv)) 1803 i++; 1804 goto push_i; 1805 } 1806 else { 1807 sv = magic_scalarpack(hv, mg); 1808 goto push_sv; 1809 } 1810 } 1811 else { 1812 i = HvUSEDKEYS(hv); 1813 if (is_bool) { 1814 sv = i ? &PL_sv_yes : &PL_sv_zero; 1815 push_sv: 1816 PUSHs(sv); 1817 } 1818 else { 1819 push_i: 1820 if (has_targ) { 1821 dTARGET; 1822 PUSHi(i); 1823 } 1824 else 1825 #ifdef PERL_OP_PARENT 1826 if (is_keys) { 1827 /* parent op should be an unused OP_KEYS whose targ we can 1828 * use */ 1829 dTARG; 1830 OP *k; 1831 1832 assert(!OpHAS_SIBLING(PL_op)); 1833 k = PL_op->op_sibparent; 1834 assert(k->op_type == OP_KEYS); 1835 TARG = PAD_SV(k->op_targ); 1836 PUSHi(i); 1837 } 1838 else 1839 #endif 1840 mPUSHi(i); 1841 } 1842 } 1843 1844 PUTBACK; 1845 return NORMAL; 1846 } 1847 1848 1849 /* This is also called directly by pp_lvavref. */ 1850 PP(pp_padav) 1851 { 1852 dSP; dTARGET; 1853 U8 gimme; 1854 assert(SvTYPE(TARG) == SVt_PVAV); 1855 if (UNLIKELY( PL_op->op_private & OPpLVAL_INTRO )) 1856 if (LIKELY( !(PL_op->op_private & OPpPAD_STATE) )) 1857 SAVECLEARSV(PAD_SVl(PL_op->op_targ)); 1858 EXTEND(SP, 1); 1859 1860 if (PL_op->op_flags & OPf_REF) { 1861 PUSHs(TARG); 1862 RETURN; 1863 } 1864 else if (PL_op->op_private & OPpMAYBE_LVSUB) { 1865 const I32 flags = is_lvalue_sub(); 1866 if (flags && !(flags & OPpENTERSUB_INARGS)) { 1867 if (GIMME_V == G_SCALAR) 1868 /* diag_listed_as: Can't return %s to lvalue scalar context */ 1869 Perl_croak(aTHX_ "Can't return array to lvalue scalar context"); 1870 PUSHs(TARG); 1871 RETURN; 1872 } 1873 } 1874 1875 gimme = GIMME_V; 1876 if (gimme == G_ARRAY) 1877 return S_pushav(aTHX_ (AV*)TARG); 1878 1879 if (gimme == G_SCALAR) { 1880 const SSize_t maxarg = AvFILL(MUTABLE_AV(TARG)) + 1; 1881 if (!maxarg) 1882 PUSHs(&PL_sv_zero); 1883 else if (PL_op->op_private & OPpTRUEBOOL) 1884 PUSHs(&PL_sv_yes); 1885 else 1886 mPUSHi(maxarg); 1887 } 1888 RETURN; 1889 } 1890 1891 1892 PP(pp_padhv) 1893 { 1894 dSP; dTARGET; 1895 U8 gimme; 1896 1897 assert(SvTYPE(TARG) == SVt_PVHV); 1898 if (UNLIKELY( PL_op->op_private & OPpLVAL_INTRO )) 1899 if (LIKELY( !(PL_op->op_private & OPpPAD_STATE) )) 1900 SAVECLEARSV(PAD_SVl(PL_op->op_targ)); 1901 1902 EXTEND(SP, 1); 1903 1904 if (PL_op->op_flags & OPf_REF) { 1905 PUSHs(TARG); 1906 RETURN; 1907 } 1908 else if (PL_op->op_private & OPpMAYBE_LVSUB) { 1909 const I32 flags = is_lvalue_sub(); 1910 if (flags && !(flags & OPpENTERSUB_INARGS)) { 1911 if (GIMME_V == G_SCALAR) 1912 /* diag_listed_as: Can't return %s to lvalue scalar context */ 1913 Perl_croak(aTHX_ "Can't return hash to lvalue scalar context"); 1914 PUSHs(TARG); 1915 RETURN; 1916 } 1917 } 1918 1919 gimme = GIMME_V; 1920 1921 return S_padhv_rv2hv_common(aTHX_ (HV*)TARG, gimme, 1922 cBOOL(PL_op->op_private & OPpPADHV_ISKEYS), 1923 0 /* has_targ*/); 1924 } 1925 1926 1927 /* also used for: pp_rv2hv() */ 1928 /* also called directly by pp_lvavref */ 1929 1930 PP(pp_rv2av) 1931 { 1932 dSP; dTOPss; 1933 const U8 gimme = GIMME_V; 1934 static const char an_array[] = "an ARRAY"; 1935 static const char a_hash[] = "a HASH"; 1936 const bool is_pp_rv2av = PL_op->op_type == OP_RV2AV 1937 || PL_op->op_type == OP_LVAVREF; 1938 const svtype type = is_pp_rv2av ? SVt_PVAV : SVt_PVHV; 1939 1940 SvGETMAGIC(sv); 1941 if (SvROK(sv)) { 1942 if (UNLIKELY(SvAMAGIC(sv))) { 1943 sv = amagic_deref_call(sv, is_pp_rv2av ? to_av_amg : to_hv_amg); 1944 } 1945 sv = SvRV(sv); 1946 if (UNLIKELY(SvTYPE(sv) != type)) 1947 /* diag_listed_as: Not an ARRAY reference */ 1948 DIE(aTHX_ "Not %s reference", is_pp_rv2av ? an_array : a_hash); 1949 else if (UNLIKELY(PL_op->op_flags & OPf_MOD 1950 && PL_op->op_private & OPpLVAL_INTRO)) 1951 Perl_croak(aTHX_ "%s", PL_no_localize_ref); 1952 } 1953 else if (UNLIKELY(SvTYPE(sv) != type)) { 1954 GV *gv; 1955 1956 if (!isGV_with_GP(sv)) { 1957 gv = Perl_softref2xv(aTHX_ sv, is_pp_rv2av ? an_array : a_hash, 1958 type, &sp); 1959 if (!gv) 1960 RETURN; 1961 } 1962 else { 1963 gv = MUTABLE_GV(sv); 1964 } 1965 sv = is_pp_rv2av ? MUTABLE_SV(GvAVn(gv)) : MUTABLE_SV(GvHVn(gv)); 1966 if (PL_op->op_private & OPpLVAL_INTRO) 1967 sv = is_pp_rv2av ? MUTABLE_SV(save_ary(gv)) : MUTABLE_SV(save_hash(gv)); 1968 } 1969 if (PL_op->op_flags & OPf_REF) { 1970 SETs(sv); 1971 RETURN; 1972 } 1973 else if (UNLIKELY(PL_op->op_private & OPpMAYBE_LVSUB)) { 1974 const I32 flags = is_lvalue_sub(); 1975 if (flags && !(flags & OPpENTERSUB_INARGS)) { 1976 if (gimme != G_ARRAY) 1977 goto croak_cant_return; 1978 SETs(sv); 1979 RETURN; 1980 } 1981 } 1982 1983 if (is_pp_rv2av) { 1984 AV *const av = MUTABLE_AV(sv); 1985 1986 if (gimme == G_ARRAY) { 1987 SP--; 1988 PUTBACK; 1989 return S_pushav(aTHX_ av); 1990 } 1991 1992 if (gimme == G_SCALAR) { 1993 const SSize_t maxarg = AvFILL(av) + 1; 1994 if (PL_op->op_private & OPpTRUEBOOL) 1995 SETs(maxarg ? &PL_sv_yes : &PL_sv_zero); 1996 else { 1997 dTARGET; 1998 SETi(maxarg); 1999 } 2000 } 2001 } 2002 else { 2003 SP--; PUTBACK; 2004 return S_padhv_rv2hv_common(aTHX_ (HV*)sv, gimme, 2005 cBOOL(PL_op->op_private & OPpRV2HV_ISKEYS), 2006 1 /* has_targ*/); 2007 } 2008 RETURN; 2009 2010 croak_cant_return: 2011 Perl_croak(aTHX_ "Can't return %s to lvalue scalar context", 2012 is_pp_rv2av ? "array" : "hash"); 2013 RETURN; 2014 } 2015 2016 STATIC void 2017 S_do_oddball(pTHX_ SV **oddkey, SV **firstkey) 2018 { 2019 PERL_ARGS_ASSERT_DO_ODDBALL; 2020 2021 if (*oddkey) { 2022 if (ckWARN(WARN_MISC)) { 2023 const char *err; 2024 if (oddkey == firstkey && 2025 SvROK(*oddkey) && 2026 (SvTYPE(SvRV(*oddkey)) == SVt_PVAV || 2027 SvTYPE(SvRV(*oddkey)) == SVt_PVHV)) 2028 { 2029 err = "Reference found where even-sized list expected"; 2030 } 2031 else 2032 err = "Odd number of elements in hash assignment"; 2033 Perl_warner(aTHX_ packWARN(WARN_MISC), "%s", err); 2034 } 2035 2036 } 2037 } 2038 2039 2040 /* Do a mark and sweep with the SVf_BREAK flag to detect elements which 2041 * are common to both the LHS and RHS of an aassign, and replace them 2042 * with copies. All these copies are made before the actual list assign is 2043 * done. 2044 * 2045 * For example in ($a,$b) = ($b,$a), assigning the value of the first RHS 2046 * element ($b) to the first LH element ($a), modifies $a; when the 2047 * second assignment is done, the second RH element now has the wrong 2048 * value. So we initially replace the RHS with ($b, mortalcopy($a)). 2049 * Note that we don't need to make a mortal copy of $b. 2050 * 2051 * The algorithm below works by, for every RHS element, mark the 2052 * corresponding LHS target element with SVf_BREAK. Then if the RHS 2053 * element is found with SVf_BREAK set, it means it would have been 2054 * modified, so make a copy. 2055 * Note that by scanning both LHS and RHS in lockstep, we avoid 2056 * unnecessary copies (like $b above) compared with a naive 2057 * "mark all LHS; copy all marked RHS; unmark all LHS". 2058 * 2059 * If the LHS element is a 'my' declaration' and has a refcount of 1, then 2060 * it can't be common and can be skipped. 2061 * 2062 * On DEBUGGING builds it takes an extra boolean, fake. If true, it means 2063 * that we thought we didn't need to call S_aassign_copy_common(), but we 2064 * have anyway for sanity checking. If we find we need to copy, then panic. 2065 */ 2066 2067 PERL_STATIC_INLINE void 2068 S_aassign_copy_common(pTHX_ SV **firstlelem, SV **lastlelem, 2069 SV **firstrelem, SV **lastrelem 2070 #ifdef DEBUGGING 2071 , bool fake 2072 #endif 2073 ) 2074 { 2075 dVAR; 2076 SV **relem; 2077 SV **lelem; 2078 SSize_t lcount = lastlelem - firstlelem + 1; 2079 bool marked = FALSE; /* have we marked any LHS with SVf_BREAK ? */ 2080 bool const do_rc1 = cBOOL(PL_op->op_private & OPpASSIGN_COMMON_RC1); 2081 bool copy_all = FALSE; 2082 2083 assert(!PL_in_clean_all); /* SVf_BREAK not already in use */ 2084 assert(firstlelem < lastlelem); /* at least 2 LH elements */ 2085 assert(firstrelem < lastrelem); /* at least 2 RH elements */ 2086 2087 2088 lelem = firstlelem; 2089 /* we never have to copy the first RH element; it can't be corrupted 2090 * by assigning something to the corresponding first LH element. 2091 * So this scan does in a loop: mark LHS[N]; test RHS[N+1] 2092 */ 2093 relem = firstrelem + 1; 2094 2095 for (; relem <= lastrelem; relem++) { 2096 SV *svr; 2097 2098 /* mark next LH element */ 2099 2100 if (--lcount >= 0) { 2101 SV *svl = *lelem++; 2102 2103 if (UNLIKELY(!svl)) {/* skip AV alias marker */ 2104 assert (lelem <= lastlelem); 2105 svl = *lelem++; 2106 lcount--; 2107 } 2108 2109 assert(svl); 2110 if (SvSMAGICAL(svl)) { 2111 copy_all = TRUE; 2112 } 2113 if (SvTYPE(svl) == SVt_PVAV || SvTYPE(svl) == SVt_PVHV) { 2114 if (!marked) 2115 return; 2116 /* this LH element will consume all further args; 2117 * no need to mark any further LH elements (if any). 2118 * But we still need to scan any remaining RHS elements; 2119 * set lcount negative to distinguish from lcount == 0, 2120 * so the loop condition continues being true 2121 */ 2122 lcount = -1; 2123 lelem--; /* no need to unmark this element */ 2124 } 2125 else if (!(do_rc1 && SvREFCNT(svl) == 1) && !SvIMMORTAL(svl)) { 2126 SvFLAGS(svl) |= SVf_BREAK; 2127 marked = TRUE; 2128 } 2129 else if (!marked) { 2130 /* don't check RH element if no SVf_BREAK flags set yet */ 2131 if (!lcount) 2132 break; 2133 continue; 2134 } 2135 } 2136 2137 /* see if corresponding RH element needs copying */ 2138 2139 assert(marked); 2140 svr = *relem; 2141 assert(svr); 2142 2143 if (UNLIKELY(SvFLAGS(svr) & (SVf_BREAK|SVs_GMG) || copy_all)) { 2144 U32 brk = (SvFLAGS(svr) & SVf_BREAK); 2145 2146 #ifdef DEBUGGING 2147 if (fake) { 2148 /* op_dump(PL_op); */ 2149 Perl_croak(aTHX_ 2150 "panic: aassign skipped needed copy of common RH elem %" 2151 UVuf, (UV)(relem - firstrelem)); 2152 } 2153 #endif 2154 2155 TAINT_NOT; /* Each item is independent */ 2156 2157 /* Dear TODO test in t/op/sort.t, I love you. 2158 (It's relying on a panic, not a "semi-panic" from newSVsv() 2159 and then an assertion failure below.) */ 2160 if (UNLIKELY(SvIS_FREED(svr))) { 2161 Perl_croak(aTHX_ "panic: attempt to copy freed scalar %p", 2162 (void*)svr); 2163 } 2164 /* avoid break flag while copying; otherwise COW etc 2165 * disabled... */ 2166 SvFLAGS(svr) &= ~SVf_BREAK; 2167 /* Not newSVsv(), as it does not allow copy-on-write, 2168 resulting in wasteful copies. 2169 Also, we use SV_NOSTEAL in case the SV is used more than 2170 once, e.g. (...) = (f())[0,0] 2171 Where the same SV appears twice on the RHS without a ref 2172 count bump. (Although I suspect that the SV won't be 2173 stealable here anyway - DAPM). 2174 */ 2175 *relem = sv_mortalcopy_flags(svr, 2176 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL); 2177 /* ... but restore afterwards in case it's needed again, 2178 * e.g. ($a,$b,$c) = (1,$a,$a) 2179 */ 2180 SvFLAGS(svr) |= brk; 2181 } 2182 2183 if (!lcount) 2184 break; 2185 } 2186 2187 if (!marked) 2188 return; 2189 2190 /*unmark LHS */ 2191 2192 while (lelem > firstlelem) { 2193 SV * const svl = *(--lelem); 2194 if (svl) 2195 SvFLAGS(svl) &= ~SVf_BREAK; 2196 } 2197 } 2198 2199 2200 2201 PP(pp_aassign) 2202 { 2203 dVAR; dSP; 2204 SV **lastlelem = PL_stack_sp; 2205 SV **lastrelem = PL_stack_base + POPMARK; 2206 SV **firstrelem = PL_stack_base + POPMARK + 1; 2207 SV **firstlelem = lastrelem + 1; 2208 2209 SV **relem; 2210 SV **lelem; 2211 U8 gimme; 2212 /* PL_delaymagic is restored by JUMPENV_POP on dieing, so we 2213 * only need to save locally, not on the save stack */ 2214 U16 old_delaymagic = PL_delaymagic; 2215 #ifdef DEBUGGING 2216 bool fake = 0; 2217 #endif 2218 2219 PL_delaymagic = DM_DELAY; /* catch simultaneous items */ 2220 2221 /* If there's a common identifier on both sides we have to take 2222 * special care that assigning the identifier on the left doesn't 2223 * clobber a value on the right that's used later in the list. 2224 */ 2225 2226 /* at least 2 LH and RH elements, or commonality isn't an issue */ 2227 if (firstlelem < lastlelem && firstrelem < lastrelem) { 2228 for (relem = firstrelem+1; relem <= lastrelem; relem++) { 2229 if (SvGMAGICAL(*relem)) 2230 goto do_scan; 2231 } 2232 for (lelem = firstlelem; lelem <= lastlelem; lelem++) { 2233 if (*lelem && SvSMAGICAL(*lelem)) 2234 goto do_scan; 2235 } 2236 if ( PL_op->op_private & (OPpASSIGN_COMMON_SCALAR|OPpASSIGN_COMMON_RC1) ) { 2237 if (PL_op->op_private & OPpASSIGN_COMMON_RC1) { 2238 /* skip the scan if all scalars have a ref count of 1 */ 2239 for (lelem = firstlelem; lelem <= lastlelem; lelem++) { 2240 SV *sv = *lelem; 2241 if (!sv || SvREFCNT(sv) == 1) 2242 continue; 2243 if (SvTYPE(sv) != SVt_PVAV && SvTYPE(sv) != SVt_PVAV) 2244 goto do_scan; 2245 break; 2246 } 2247 } 2248 else { 2249 do_scan: 2250 S_aassign_copy_common(aTHX_ 2251 firstlelem, lastlelem, firstrelem, lastrelem 2252 #ifdef DEBUGGING 2253 , fake 2254 #endif 2255 ); 2256 } 2257 } 2258 } 2259 #ifdef DEBUGGING 2260 else { 2261 /* on debugging builds, do the scan even if we've concluded we 2262 * don't need to, then panic if we find commonality. Note that the 2263 * scanner assumes at least 2 elements */ 2264 if (firstlelem < lastlelem && firstrelem < lastrelem) { 2265 fake = 1; 2266 goto do_scan; 2267 } 2268 } 2269 #endif 2270 2271 gimme = GIMME_V; 2272 relem = firstrelem; 2273 lelem = firstlelem; 2274 2275 if (relem > lastrelem) 2276 goto no_relems; 2277 2278 /* first lelem loop while there are still relems */ 2279 while (LIKELY(lelem <= lastlelem)) { 2280 bool alias = FALSE; 2281 SV *lsv = *lelem++; 2282 2283 TAINT_NOT; /* Each item stands on its own, taintwise. */ 2284 2285 assert(relem <= lastrelem); 2286 if (UNLIKELY(!lsv)) { 2287 alias = TRUE; 2288 lsv = *lelem++; 2289 ASSUME(SvTYPE(lsv) == SVt_PVAV); 2290 } 2291 2292 switch (SvTYPE(lsv)) { 2293 case SVt_PVAV: { 2294 SV **svp; 2295 SSize_t i; 2296 SSize_t tmps_base; 2297 SSize_t nelems = lastrelem - relem + 1; 2298 AV *ary = MUTABLE_AV(lsv); 2299 2300 /* Assigning to an aggregate is tricky. First there is the 2301 * issue of commonality, e.g. @a = ($a[0]). Since the 2302 * stack isn't refcounted, clearing @a prior to storing 2303 * elements will free $a[0]. Similarly with 2304 * sub FETCH { $status[$_[1]] } @status = @tied[0,1]; 2305 * 2306 * The way to avoid these issues is to make the copy of each 2307 * SV (and we normally store a *copy* in the array) *before* 2308 * clearing the array. But this has a problem in that 2309 * if the code croaks during copying, the not-yet-stored copies 2310 * could leak. One way to avoid this is to make all the copies 2311 * mortal, but that's quite expensive. 2312 * 2313 * The current solution to these issues is to use a chunk 2314 * of the tmps stack as a temporary refcounted-stack. SVs 2315 * will be put on there during processing to avoid leaks, 2316 * but will be removed again before the end of this block, 2317 * so free_tmps() is never normally called. Also, the 2318 * sv_refcnt of the SVs doesn't have to be manipulated, since 2319 * the ownership of 1 reference count is transferred directly 2320 * from the tmps stack to the AV when the SV is stored. 2321 * 2322 * We disarm slots in the temps stack by storing PL_sv_undef 2323 * there: it doesn't matter if that SV's refcount is 2324 * repeatedly decremented during a croak. But usually this is 2325 * only an interim measure. By the end of this code block 2326 * we try where possible to not leave any PL_sv_undef's on the 2327 * tmps stack e.g. by shuffling newer entries down. 2328 * 2329 * There is one case where we don't copy: non-magical 2330 * SvTEMP(sv)'s with a ref count of 1. The only owner of these 2331 * is on the tmps stack, so its safe to directly steal the SV 2332 * rather than copying. This is common in things like function 2333 * returns, map etc, which all return a list of such SVs. 2334 * 2335 * Note however something like @a = (f())[0,0], where there is 2336 * a danger of the same SV being shared: this avoided because 2337 * when the SV is stored as $a[0], its ref count gets bumped, 2338 * so the RC==1 test fails and the second element is copied 2339 * instead. 2340 * 2341 * We also use one slot in the tmps stack to hold an extra 2342 * ref to the array, to ensure it doesn't get prematurely 2343 * freed. Again, this is removed before the end of this block. 2344 * 2345 * Note that OPpASSIGN_COMMON_AGG is used to flag a possible 2346 * @a = ($a[0]) case, but the current implementation uses the 2347 * same algorithm regardless, so ignores that flag. (It *is* 2348 * used in the hash branch below, however). 2349 */ 2350 2351 /* Reserve slots for ary, plus the elems we're about to copy, 2352 * then protect ary and temporarily void the remaining slots 2353 * with &PL_sv_undef */ 2354 EXTEND_MORTAL(nelems + 1); 2355 PL_tmps_stack[++PL_tmps_ix] = SvREFCNT_inc_simple_NN(ary); 2356 tmps_base = PL_tmps_ix + 1; 2357 for (i = 0; i < nelems; i++) 2358 PL_tmps_stack[tmps_base + i] = &PL_sv_undef; 2359 PL_tmps_ix += nelems; 2360 2361 /* Make a copy of each RHS elem and save on the tmps_stack 2362 * (or pass through where we can optimise away the copy) */ 2363 2364 if (UNLIKELY(alias)) { 2365 U32 lval = (gimme == G_ARRAY) 2366 ? (PL_op->op_flags & OPf_MOD || LVRET) : 0; 2367 for (svp = relem; svp <= lastrelem; svp++) { 2368 SV *rsv = *svp; 2369 2370 SvGETMAGIC(rsv); 2371 if (!SvROK(rsv)) 2372 DIE(aTHX_ "Assigned value is not a reference"); 2373 if (SvTYPE(SvRV(rsv)) > SVt_PVLV) 2374 /* diag_listed_as: Assigned value is not %s reference */ 2375 DIE(aTHX_ 2376 "Assigned value is not a SCALAR reference"); 2377 if (lval) 2378 *svp = rsv = sv_mortalcopy(rsv); 2379 /* XXX else check for weak refs? */ 2380 rsv = SvREFCNT_inc_NN(SvRV(rsv)); 2381 assert(tmps_base <= PL_tmps_max); 2382 PL_tmps_stack[tmps_base++] = rsv; 2383 } 2384 } 2385 else { 2386 for (svp = relem; svp <= lastrelem; svp++) { 2387 SV *rsv = *svp; 2388 2389 if (SvTEMP(rsv) && !SvGMAGICAL(rsv) && SvREFCNT(rsv) == 1) { 2390 /* can skip the copy */ 2391 SvREFCNT_inc_simple_void_NN(rsv); 2392 SvTEMP_off(rsv); 2393 } 2394 else { 2395 SV *nsv; 2396 /* do get before newSV, in case it dies and leaks */ 2397 SvGETMAGIC(rsv); 2398 nsv = newSV(0); 2399 /* see comment in S_aassign_copy_common about 2400 * SV_NOSTEAL */ 2401 sv_setsv_flags(nsv, rsv, 2402 (SV_DO_COW_SVSETSV|SV_NOSTEAL)); 2403 rsv = *svp = nsv; 2404 } 2405 2406 assert(tmps_base <= PL_tmps_max); 2407 PL_tmps_stack[tmps_base++] = rsv; 2408 } 2409 } 2410 2411 if (SvRMAGICAL(ary) || AvFILLp(ary) >= 0) /* may be non-empty */ 2412 av_clear(ary); 2413 2414 /* store in the array, the SVs that are in the tmps stack */ 2415 2416 tmps_base -= nelems; 2417 2418 if (SvMAGICAL(ary) || SvREADONLY(ary) || !AvREAL(ary)) { 2419 /* for arrays we can't cheat with, use the official API */ 2420 av_extend(ary, nelems - 1); 2421 for (i = 0; i < nelems; i++) { 2422 SV **svp = &(PL_tmps_stack[tmps_base + i]); 2423 SV *rsv = *svp; 2424 /* A tied store won't take ownership of rsv, so keep 2425 * the 1 refcnt on the tmps stack; otherwise disarm 2426 * the tmps stack entry */ 2427 if (av_store(ary, i, rsv)) 2428 *svp = &PL_sv_undef; 2429 /* av_store() may have added set magic to rsv */; 2430 SvSETMAGIC(rsv); 2431 } 2432 /* disarm ary refcount: see comments below about leak */ 2433 PL_tmps_stack[tmps_base - 1] = &PL_sv_undef; 2434 } 2435 else { 2436 /* directly access/set the guts of the AV */ 2437 SSize_t fill = nelems - 1; 2438 if (fill > AvMAX(ary)) 2439 av_extend_guts(ary, fill, &AvMAX(ary), &AvALLOC(ary), 2440 &AvARRAY(ary)); 2441 AvFILLp(ary) = fill; 2442 Copy(&(PL_tmps_stack[tmps_base]), AvARRAY(ary), nelems, SV*); 2443 /* Quietly remove all the SVs from the tmps stack slots, 2444 * since ary has now taken ownership of the refcnt. 2445 * Also remove ary: which will now leak if we die before 2446 * the SvREFCNT_dec_NN(ary) below */ 2447 if (UNLIKELY(PL_tmps_ix >= tmps_base + nelems)) 2448 Move(&PL_tmps_stack[tmps_base + nelems], 2449 &PL_tmps_stack[tmps_base - 1], 2450 PL_tmps_ix - (tmps_base + nelems) + 1, 2451 SV*); 2452 PL_tmps_ix -= (nelems + 1); 2453 } 2454 2455 if (UNLIKELY(PL_delaymagic & DM_ARRAY_ISA)) 2456 /* its assumed @ISA set magic can't die and leak ary */ 2457 SvSETMAGIC(MUTABLE_SV(ary)); 2458 SvREFCNT_dec_NN(ary); 2459 2460 relem = lastrelem + 1; 2461 goto no_relems; 2462 } 2463 2464 case SVt_PVHV: { /* normal hash */ 2465 2466 SV **svp; 2467 bool dirty_tmps; 2468 SSize_t i; 2469 SSize_t tmps_base; 2470 SSize_t nelems = lastrelem - relem + 1; 2471 HV *hash = MUTABLE_HV(lsv); 2472 2473 if (UNLIKELY(nelems & 1)) { 2474 do_oddball(lastrelem, relem); 2475 /* we have firstlelem to reuse, it's not needed any more */ 2476 *++lastrelem = &PL_sv_undef; 2477 nelems++; 2478 } 2479 2480 /* See the SVt_PVAV branch above for a long description of 2481 * how the following all works. The main difference for hashes 2482 * is that we treat keys and values separately (and have 2483 * separate loops for them): as for arrays, values are always 2484 * copied (except for the SvTEMP optimisation), since they 2485 * need to be stored in the hash; while keys are only 2486 * processed where they might get prematurely freed or 2487 * whatever. */ 2488 2489 /* tmps stack slots: 2490 * * reserve a slot for the hash keepalive; 2491 * * reserve slots for the hash values we're about to copy; 2492 * * preallocate for the keys we'll possibly copy or refcount bump 2493 * later; 2494 * then protect hash and temporarily void the remaining 2495 * value slots with &PL_sv_undef */ 2496 EXTEND_MORTAL(nelems + 1); 2497 2498 /* convert to number of key/value pairs */ 2499 nelems >>= 1; 2500 2501 PL_tmps_stack[++PL_tmps_ix] = SvREFCNT_inc_simple_NN(hash); 2502 tmps_base = PL_tmps_ix + 1; 2503 for (i = 0; i < nelems; i++) 2504 PL_tmps_stack[tmps_base + i] = &PL_sv_undef; 2505 PL_tmps_ix += nelems; 2506 2507 /* Make a copy of each RHS hash value and save on the tmps_stack 2508 * (or pass through where we can optimise away the copy) */ 2509 2510 for (svp = relem + 1; svp <= lastrelem; svp += 2) { 2511 SV *rsv = *svp; 2512 2513 if (SvTEMP(rsv) && !SvGMAGICAL(rsv) && SvREFCNT(rsv) == 1) { 2514 /* can skip the copy */ 2515 SvREFCNT_inc_simple_void_NN(rsv); 2516 SvTEMP_off(rsv); 2517 } 2518 else { 2519 SV *nsv; 2520 /* do get before newSV, in case it dies and leaks */ 2521 SvGETMAGIC(rsv); 2522 nsv = newSV(0); 2523 /* see comment in S_aassign_copy_common about 2524 * SV_NOSTEAL */ 2525 sv_setsv_flags(nsv, rsv, 2526 (SV_DO_COW_SVSETSV|SV_NOSTEAL)); 2527 rsv = *svp = nsv; 2528 } 2529 2530 assert(tmps_base <= PL_tmps_max); 2531 PL_tmps_stack[tmps_base++] = rsv; 2532 } 2533 tmps_base -= nelems; 2534 2535 2536 /* possibly protect keys */ 2537 2538 if (UNLIKELY(gimme == G_ARRAY)) { 2539 /* handle e.g. 2540 * @a = ((%h = ($$r, 1)), $r = "x"); 2541 * $_++ for %h = (1,2,3,4); 2542 */ 2543 EXTEND_MORTAL(nelems); 2544 for (svp = relem; svp <= lastrelem; svp += 2) 2545 *svp = sv_mortalcopy_flags(*svp, 2546 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL); 2547 } 2548 else if (PL_op->op_private & OPpASSIGN_COMMON_AGG) { 2549 /* for possible commonality, e.g. 2550 * %h = ($h{a},1) 2551 * avoid premature freeing RHS keys by mortalising 2552 * them. 2553 * For a magic element, make a copy so that its magic is 2554 * called *before* the hash is emptied (which may affect 2555 * a tied value for example). 2556 * In theory we should check for magic keys in all 2557 * cases, not just under OPpASSIGN_COMMON_AGG, but in 2558 * practice, !OPpASSIGN_COMMON_AGG implies only 2559 * constants or padtmps on the RHS. 2560 */ 2561 EXTEND_MORTAL(nelems); 2562 for (svp = relem; svp <= lastrelem; svp += 2) { 2563 SV *rsv = *svp; 2564 if (UNLIKELY(SvGMAGICAL(rsv))) { 2565 SSize_t n; 2566 *svp = sv_mortalcopy_flags(*svp, 2567 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL); 2568 /* allow other branch to continue pushing 2569 * onto tmps stack without checking each time */ 2570 n = (lastrelem - relem) >> 1; 2571 EXTEND_MORTAL(n); 2572 } 2573 else 2574 PL_tmps_stack[++PL_tmps_ix] = 2575 SvREFCNT_inc_simple_NN(rsv); 2576 } 2577 } 2578 2579 if (SvRMAGICAL(hash) || HvUSEDKEYS(hash)) 2580 hv_clear(hash); 2581 2582 /* now assign the keys and values to the hash */ 2583 2584 dirty_tmps = FALSE; 2585 2586 if (UNLIKELY(gimme == G_ARRAY)) { 2587 /* @a = (%h = (...)) etc */ 2588 SV **svp; 2589 SV **topelem = relem; 2590 2591 for (i = 0, svp = relem; svp <= lastrelem; i++, svp++) { 2592 SV *key = *svp++; 2593 SV *val = *svp; 2594 /* remove duplicates from list we return */ 2595 if (!hv_exists_ent(hash, key, 0)) { 2596 /* copy key back: possibly to an earlier 2597 * stack location if we encountered dups earlier, 2598 * The values will be updated later 2599 */ 2600 *topelem = key; 2601 topelem += 2; 2602 } 2603 /* A tied store won't take ownership of val, so keep 2604 * the 1 refcnt on the tmps stack; otherwise disarm 2605 * the tmps stack entry */ 2606 if (hv_store_ent(hash, key, val, 0)) 2607 PL_tmps_stack[tmps_base + i] = &PL_sv_undef; 2608 else 2609 dirty_tmps = TRUE; 2610 /* hv_store_ent() may have added set magic to val */; 2611 SvSETMAGIC(val); 2612 } 2613 if (topelem < svp) { 2614 /* at this point we have removed the duplicate key/value 2615 * pairs from the stack, but the remaining values may be 2616 * wrong; i.e. with (a 1 a 2 b 3) on the stack we've removed 2617 * the (a 2), but the stack now probably contains 2618 * (a <freed> b 3), because { hv_save(a,1); hv_save(a,2) } 2619 * obliterates the earlier key. So refresh all values. */ 2620 lastrelem = topelem - 1; 2621 while (relem < lastrelem) { 2622 HE *he; 2623 he = hv_fetch_ent(hash, *relem++, 0, 0); 2624 *relem++ = (he ? HeVAL(he) : &PL_sv_undef); 2625 } 2626 } 2627 } 2628 else { 2629 SV **svp; 2630 for (i = 0, svp = relem; svp <= lastrelem; i++, svp++) { 2631 SV *key = *svp++; 2632 SV *val = *svp; 2633 if (hv_store_ent(hash, key, val, 0)) 2634 PL_tmps_stack[tmps_base + i] = &PL_sv_undef; 2635 else 2636 dirty_tmps = TRUE; 2637 /* hv_store_ent() may have added set magic to val */; 2638 SvSETMAGIC(val); 2639 } 2640 } 2641 2642 if (dirty_tmps) { 2643 /* there are still some 'live' recounts on the tmps stack 2644 * - usually caused by storing into a tied hash. So let 2645 * free_tmps() do the proper but slow job later. 2646 * Just disarm hash refcount: see comments below about leak 2647 */ 2648 PL_tmps_stack[tmps_base - 1] = &PL_sv_undef; 2649 } 2650 else { 2651 /* Quietly remove all the SVs from the tmps stack slots, 2652 * since hash has now taken ownership of the refcnt. 2653 * Also remove hash: which will now leak if we die before 2654 * the SvREFCNT_dec_NN(hash) below */ 2655 if (UNLIKELY(PL_tmps_ix >= tmps_base + nelems)) 2656 Move(&PL_tmps_stack[tmps_base + nelems], 2657 &PL_tmps_stack[tmps_base - 1], 2658 PL_tmps_ix - (tmps_base + nelems) + 1, 2659 SV*); 2660 PL_tmps_ix -= (nelems + 1); 2661 } 2662 2663 SvREFCNT_dec_NN(hash); 2664 2665 relem = lastrelem + 1; 2666 goto no_relems; 2667 } 2668 2669 default: 2670 if (!SvIMMORTAL(lsv)) { 2671 SV *ref; 2672 2673 if (UNLIKELY( 2674 SvTEMP(lsv) && !SvSMAGICAL(lsv) && SvREFCNT(lsv) == 1 && 2675 (!isGV_with_GP(lsv) || SvFAKE(lsv)) && ckWARN(WARN_MISC) 2676 )) 2677 Perl_warner(aTHX_ 2678 packWARN(WARN_MISC), 2679 "Useless assignment to a temporary" 2680 ); 2681 2682 /* avoid freeing $$lsv if it might be needed for further 2683 * elements, e.g. ($ref, $foo) = (1, $$ref) */ 2684 if ( SvROK(lsv) 2685 && ( ((ref = SvRV(lsv)), SvREFCNT(ref)) == 1) 2686 && lelem <= lastlelem 2687 ) { 2688 SSize_t ix; 2689 SvREFCNT_inc_simple_void_NN(ref); 2690 /* an unrolled sv_2mortal */ 2691 ix = ++PL_tmps_ix; 2692 if (UNLIKELY(ix >= PL_tmps_max)) 2693 /* speculatively grow enough to cover other 2694 * possible refs */ 2695 (void)tmps_grow_p(ix + (lastlelem - lelem)); 2696 PL_tmps_stack[ix] = ref; 2697 } 2698 2699 sv_setsv(lsv, *relem); 2700 *relem = lsv; 2701 SvSETMAGIC(lsv); 2702 } 2703 if (++relem > lastrelem) 2704 goto no_relems; 2705 break; 2706 } /* switch */ 2707 } /* while */ 2708 2709 2710 no_relems: 2711 2712 /* simplified lelem loop for when there are no relems left */ 2713 while (LIKELY(lelem <= lastlelem)) { 2714 SV *lsv = *lelem++; 2715 2716 TAINT_NOT; /* Each item stands on its own, taintwise. */ 2717 2718 if (UNLIKELY(!lsv)) { 2719 lsv = *lelem++; 2720 ASSUME(SvTYPE(lsv) == SVt_PVAV); 2721 } 2722 2723 switch (SvTYPE(lsv)) { 2724 case SVt_PVAV: 2725 if (SvRMAGICAL(lsv) || AvFILLp((SV*)lsv) >= 0) { 2726 av_clear((AV*)lsv); 2727 if (UNLIKELY(PL_delaymagic & DM_ARRAY_ISA)) 2728 SvSETMAGIC(lsv); 2729 } 2730 break; 2731 2732 case SVt_PVHV: 2733 if (SvRMAGICAL(lsv) || HvUSEDKEYS((HV*)lsv)) 2734 hv_clear((HV*)lsv); 2735 break; 2736 2737 default: 2738 if (!SvIMMORTAL(lsv)) { 2739 sv_set_undef(lsv); 2740 SvSETMAGIC(lsv); 2741 *relem++ = lsv; 2742 } 2743 break; 2744 } /* switch */ 2745 } /* while */ 2746 2747 TAINT_NOT; /* result of list assign isn't tainted */ 2748 2749 if (UNLIKELY(PL_delaymagic & ~DM_DELAY)) { 2750 /* Will be used to set PL_tainting below */ 2751 Uid_t tmp_uid = PerlProc_getuid(); 2752 Uid_t tmp_euid = PerlProc_geteuid(); 2753 Gid_t tmp_gid = PerlProc_getgid(); 2754 Gid_t tmp_egid = PerlProc_getegid(); 2755 2756 /* XXX $> et al currently silently ignore failures */ 2757 if (PL_delaymagic & DM_UID) { 2758 #ifdef HAS_SETRESUID 2759 PERL_UNUSED_RESULT( 2760 setresuid((PL_delaymagic & DM_RUID) ? PL_delaymagic_uid : (Uid_t)-1, 2761 (PL_delaymagic & DM_EUID) ? PL_delaymagic_euid : (Uid_t)-1, 2762 (Uid_t)-1)); 2763 #elif defined(HAS_SETREUID) 2764 PERL_UNUSED_RESULT( 2765 setreuid((PL_delaymagic & DM_RUID) ? PL_delaymagic_uid : (Uid_t)-1, 2766 (PL_delaymagic & DM_EUID) ? PL_delaymagic_euid : (Uid_t)-1)); 2767 #else 2768 # ifdef HAS_SETRUID 2769 if ((PL_delaymagic & DM_UID) == DM_RUID) { 2770 PERL_UNUSED_RESULT(setruid(PL_delaymagic_uid)); 2771 PL_delaymagic &= ~DM_RUID; 2772 } 2773 # endif /* HAS_SETRUID */ 2774 # ifdef HAS_SETEUID 2775 if ((PL_delaymagic & DM_UID) == DM_EUID) { 2776 PERL_UNUSED_RESULT(seteuid(PL_delaymagic_euid)); 2777 PL_delaymagic &= ~DM_EUID; 2778 } 2779 # endif /* HAS_SETEUID */ 2780 if (PL_delaymagic & DM_UID) { 2781 if (PL_delaymagic_uid != PL_delaymagic_euid) 2782 DIE(aTHX_ "No setreuid available"); 2783 PERL_UNUSED_RESULT(PerlProc_setuid(PL_delaymagic_uid)); 2784 } 2785 #endif /* HAS_SETRESUID */ 2786 2787 tmp_uid = PerlProc_getuid(); 2788 tmp_euid = PerlProc_geteuid(); 2789 } 2790 /* XXX $> et al currently silently ignore failures */ 2791 if (PL_delaymagic & DM_GID) { 2792 #ifdef HAS_SETRESGID 2793 PERL_UNUSED_RESULT( 2794 setresgid((PL_delaymagic & DM_RGID) ? PL_delaymagic_gid : (Gid_t)-1, 2795 (PL_delaymagic & DM_EGID) ? PL_delaymagic_egid : (Gid_t)-1, 2796 (Gid_t)-1)); 2797 #elif defined(HAS_SETREGID) 2798 PERL_UNUSED_RESULT( 2799 setregid((PL_delaymagic & DM_RGID) ? PL_delaymagic_gid : (Gid_t)-1, 2800 (PL_delaymagic & DM_EGID) ? PL_delaymagic_egid : (Gid_t)-1)); 2801 #else 2802 # ifdef HAS_SETRGID 2803 if ((PL_delaymagic & DM_GID) == DM_RGID) { 2804 PERL_UNUSED_RESULT(setrgid(PL_delaymagic_gid)); 2805 PL_delaymagic &= ~DM_RGID; 2806 } 2807 # endif /* HAS_SETRGID */ 2808 # ifdef HAS_SETEGID 2809 if ((PL_delaymagic & DM_GID) == DM_EGID) { 2810 PERL_UNUSED_RESULT(setegid(PL_delaymagic_egid)); 2811 PL_delaymagic &= ~DM_EGID; 2812 } 2813 # endif /* HAS_SETEGID */ 2814 if (PL_delaymagic & DM_GID) { 2815 if (PL_delaymagic_gid != PL_delaymagic_egid) 2816 DIE(aTHX_ "No setregid available"); 2817 PERL_UNUSED_RESULT(PerlProc_setgid(PL_delaymagic_gid)); 2818 } 2819 #endif /* HAS_SETRESGID */ 2820 2821 tmp_gid = PerlProc_getgid(); 2822 tmp_egid = PerlProc_getegid(); 2823 } 2824 TAINTING_set( TAINTING_get | (tmp_uid && (tmp_euid != tmp_uid || tmp_egid != tmp_gid)) ); 2825 #ifdef NO_TAINT_SUPPORT 2826 PERL_UNUSED_VAR(tmp_uid); 2827 PERL_UNUSED_VAR(tmp_euid); 2828 PERL_UNUSED_VAR(tmp_gid); 2829 PERL_UNUSED_VAR(tmp_egid); 2830 #endif 2831 } 2832 PL_delaymagic = old_delaymagic; 2833 2834 if (gimme == G_VOID) 2835 SP = firstrelem - 1; 2836 else if (gimme == G_SCALAR) { 2837 SP = firstrelem; 2838 EXTEND(SP,1); 2839 if (PL_op->op_private & OPpASSIGN_TRUEBOOL) 2840 SETs((firstlelem - firstrelem) ? &PL_sv_yes : &PL_sv_zero); 2841 else { 2842 dTARGET; 2843 SETi(firstlelem - firstrelem); 2844 } 2845 } 2846 else 2847 SP = relem - 1; 2848 2849 RETURN; 2850 } 2851 2852 PP(pp_qr) 2853 { 2854 dSP; 2855 PMOP * const pm = cPMOP; 2856 REGEXP * rx = PM_GETRE(pm); 2857 regexp *prog = ReANY(rx); 2858 SV * const pkg = RXp_ENGINE(prog)->qr_package(aTHX_ (rx)); 2859 SV * const rv = sv_newmortal(); 2860 CV **cvp; 2861 CV *cv; 2862 2863 SvUPGRADE(rv, SVt_IV); 2864 /* For a subroutine describing itself as "This is a hacky workaround" I'm 2865 loathe to use it here, but it seems to be the right fix. Or close. 2866 The key part appears to be that it's essential for pp_qr to return a new 2867 object (SV), which implies that there needs to be an effective way to 2868 generate a new SV from the existing SV that is pre-compiled in the 2869 optree. */ 2870 SvRV_set(rv, MUTABLE_SV(reg_temp_copy(NULL, rx))); 2871 SvROK_on(rv); 2872 2873 cvp = &( ReANY((REGEXP *)SvRV(rv))->qr_anoncv); 2874 if (UNLIKELY((cv = *cvp) && CvCLONE(*cvp))) { 2875 *cvp = cv_clone(cv); 2876 SvREFCNT_dec_NN(cv); 2877 } 2878 2879 if (pkg) { 2880 HV *const stash = gv_stashsv(pkg, GV_ADD); 2881 SvREFCNT_dec_NN(pkg); 2882 (void)sv_bless(rv, stash); 2883 } 2884 2885 if (UNLIKELY(RXp_ISTAINTED(prog))) { 2886 SvTAINTED_on(rv); 2887 SvTAINTED_on(SvRV(rv)); 2888 } 2889 XPUSHs(rv); 2890 RETURN; 2891 } 2892 2893 PP(pp_match) 2894 { 2895 dSP; dTARG; 2896 PMOP *pm = cPMOP; 2897 PMOP *dynpm = pm; 2898 const char *s; 2899 const char *strend; 2900 SSize_t curpos = 0; /* initial pos() or current $+[0] */ 2901 I32 global; 2902 U8 r_flags = 0; 2903 const char *truebase; /* Start of string */ 2904 REGEXP *rx = PM_GETRE(pm); 2905 regexp *prog = ReANY(rx); 2906 bool rxtainted; 2907 const U8 gimme = GIMME_V; 2908 STRLEN len; 2909 const I32 oldsave = PL_savestack_ix; 2910 I32 had_zerolen = 0; 2911 MAGIC *mg = NULL; 2912 2913 if (PL_op->op_flags & OPf_STACKED) 2914 TARG = POPs; 2915 else { 2916 if (ARGTARG) 2917 GETTARGET; 2918 else { 2919 TARG = DEFSV; 2920 } 2921 EXTEND(SP,1); 2922 } 2923 2924 PUTBACK; /* EVAL blocks need stack_sp. */ 2925 /* Skip get-magic if this is a qr// clone, because regcomp has 2926 already done it. */ 2927 truebase = prog->mother_re 2928 ? SvPV_nomg_const(TARG, len) 2929 : SvPV_const(TARG, len); 2930 if (!truebase) 2931 DIE(aTHX_ "panic: pp_match"); 2932 strend = truebase + len; 2933 rxtainted = (RXp_ISTAINTED(prog) || 2934 (TAINT_get && (pm->op_pmflags & PMf_RETAINT))); 2935 TAINT_NOT; 2936 2937 /* We need to know this in case we fail out early - pos() must be reset */ 2938 global = dynpm->op_pmflags & PMf_GLOBAL; 2939 2940 /* PMdf_USED is set after a ?? matches once */ 2941 if ( 2942 #ifdef USE_ITHREADS 2943 SvREADONLY(PL_regex_pad[pm->op_pmoffset]) 2944 #else 2945 pm->op_pmflags & PMf_USED 2946 #endif 2947 ) { 2948 DEBUG_r(PerlIO_printf(Perl_debug_log, "?? already matched once")); 2949 goto nope; 2950 } 2951 2952 /* handle the empty pattern */ 2953 if (!RX_PRELEN(rx) && PL_curpm && !prog->mother_re) { 2954 if (PL_curpm == PL_reg_curpm) { 2955 if (PL_curpm_under) { 2956 if (PL_curpm_under == PL_reg_curpm) { 2957 Perl_croak(aTHX_ "Infinite recursion via empty pattern"); 2958 } else { 2959 pm = PL_curpm_under; 2960 } 2961 } 2962 } else { 2963 pm = PL_curpm; 2964 } 2965 rx = PM_GETRE(pm); 2966 prog = ReANY(rx); 2967 } 2968 2969 if (RXp_MINLEN(prog) >= 0 && (STRLEN)RXp_MINLEN(prog) > len) { 2970 DEBUG_r(PerlIO_printf(Perl_debug_log, "String shorter than min possible regex match (%" 2971 UVuf " < %" IVdf ")\n", 2972 (UV)len, (IV)RXp_MINLEN(prog))); 2973 goto nope; 2974 } 2975 2976 /* get pos() if //g */ 2977 if (global) { 2978 mg = mg_find_mglob(TARG); 2979 if (mg && mg->mg_len >= 0) { 2980 curpos = MgBYTEPOS(mg, TARG, truebase, len); 2981 /* last time pos() was set, it was zero-length match */ 2982 if (mg->mg_flags & MGf_MINMATCH) 2983 had_zerolen = 1; 2984 } 2985 } 2986 2987 #ifdef PERL_SAWAMPERSAND 2988 if ( RXp_NPARENS(prog) 2989 || PL_sawampersand 2990 || (RXp_EXTFLAGS(prog) & (RXf_EVAL_SEEN|RXf_PMf_KEEPCOPY)) 2991 || (dynpm->op_pmflags & PMf_KEEPCOPY) 2992 ) 2993 #endif 2994 { 2995 r_flags |= (REXEC_COPY_STR|REXEC_COPY_SKIP_PRE); 2996 /* in @a =~ /(.)/g, we iterate multiple times, but copy the buffer 2997 * only on the first iteration. Therefore we need to copy $' as well 2998 * as $&, to make the rest of the string available for captures in 2999 * subsequent iterations */ 3000 if (! (global && gimme == G_ARRAY)) 3001 r_flags |= REXEC_COPY_SKIP_POST; 3002 }; 3003 #ifdef PERL_SAWAMPERSAND 3004 if (dynpm->op_pmflags & PMf_KEEPCOPY) 3005 /* handle KEEPCOPY in pmop but not rx, eg $r=qr/a/; /$r/p */ 3006 r_flags &= ~(REXEC_COPY_SKIP_PRE|REXEC_COPY_SKIP_POST); 3007 #endif 3008 3009 s = truebase; 3010 3011 play_it_again: 3012 if (global) 3013 s = truebase + curpos; 3014 3015 if (!CALLREGEXEC(rx, (char*)s, (char *)strend, (char*)truebase, 3016 had_zerolen, TARG, NULL, r_flags)) 3017 goto nope; 3018 3019 PL_curpm = pm; 3020 if (dynpm->op_pmflags & PMf_ONCE) 3021 #ifdef USE_ITHREADS 3022 SvREADONLY_on(PL_regex_pad[dynpm->op_pmoffset]); 3023 #else 3024 dynpm->op_pmflags |= PMf_USED; 3025 #endif 3026 3027 if (rxtainted) 3028 RXp_MATCH_TAINTED_on(prog); 3029 TAINT_IF(RXp_MATCH_TAINTED(prog)); 3030 3031 /* update pos */ 3032 3033 if (global && (gimme != G_ARRAY || (dynpm->op_pmflags & PMf_CONTINUE))) { 3034 if (!mg) 3035 mg = sv_magicext_mglob(TARG); 3036 MgBYTEPOS_set(mg, TARG, truebase, RXp_OFFS(prog)[0].end); 3037 if (RXp_ZERO_LEN(prog)) 3038 mg->mg_flags |= MGf_MINMATCH; 3039 else 3040 mg->mg_flags &= ~MGf_MINMATCH; 3041 } 3042 3043 if ((!RXp_NPARENS(prog) && !global) || gimme != G_ARRAY) { 3044 LEAVE_SCOPE(oldsave); 3045 RETPUSHYES; 3046 } 3047 3048 /* push captures on stack */ 3049 3050 { 3051 const I32 nparens = RXp_NPARENS(prog); 3052 I32 i = (global && !nparens) ? 1 : 0; 3053 3054 SPAGAIN; /* EVAL blocks could move the stack. */ 3055 EXTEND(SP, nparens + i); 3056 EXTEND_MORTAL(nparens + i); 3057 for (i = !i; i <= nparens; i++) { 3058 PUSHs(sv_newmortal()); 3059 if (LIKELY((RXp_OFFS(prog)[i].start != -1) 3060 && RXp_OFFS(prog)[i].end != -1 )) 3061 { 3062 const I32 len = RXp_OFFS(prog)[i].end - RXp_OFFS(prog)[i].start; 3063 const char * const s = RXp_OFFS(prog)[i].start + truebase; 3064 if (UNLIKELY( RXp_OFFS(prog)[i].end < 0 3065 || RXp_OFFS(prog)[i].start < 0 3066 || len < 0 3067 || len > strend - s) 3068 ) 3069 DIE(aTHX_ "panic: pp_match start/end pointers, i=%ld, " 3070 "start=%ld, end=%ld, s=%p, strend=%p, len=%" UVuf, 3071 (long) i, (long) RXp_OFFS(prog)[i].start, 3072 (long)RXp_OFFS(prog)[i].end, s, strend, (UV) len); 3073 sv_setpvn(*SP, s, len); 3074 if (DO_UTF8(TARG) && is_utf8_string((U8*)s, len)) 3075 SvUTF8_on(*SP); 3076 } 3077 } 3078 if (global) { 3079 curpos = (UV)RXp_OFFS(prog)[0].end; 3080 had_zerolen = RXp_ZERO_LEN(prog); 3081 PUTBACK; /* EVAL blocks may use stack */ 3082 r_flags |= REXEC_IGNOREPOS | REXEC_NOT_FIRST; 3083 goto play_it_again; 3084 } 3085 LEAVE_SCOPE(oldsave); 3086 RETURN; 3087 } 3088 NOT_REACHED; /* NOTREACHED */ 3089 3090 nope: 3091 if (global && !(dynpm->op_pmflags & PMf_CONTINUE)) { 3092 if (!mg) 3093 mg = mg_find_mglob(TARG); 3094 if (mg) 3095 mg->mg_len = -1; 3096 } 3097 LEAVE_SCOPE(oldsave); 3098 if (gimme == G_ARRAY) 3099 RETURN; 3100 RETPUSHNO; 3101 } 3102 3103 OP * 3104 Perl_do_readline(pTHX) 3105 { 3106 dSP; dTARGETSTACKED; 3107 SV *sv; 3108 STRLEN tmplen = 0; 3109 STRLEN offset; 3110 PerlIO *fp; 3111 IO * const io = GvIO(PL_last_in_gv); 3112 const I32 type = PL_op->op_type; 3113 const U8 gimme = GIMME_V; 3114 3115 if (io) { 3116 const MAGIC *const mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar); 3117 if (mg) { 3118 Perl_tied_method(aTHX_ SV_CONST(READLINE), SP, MUTABLE_SV(io), mg, gimme, 0); 3119 if (gimme == G_SCALAR) { 3120 SPAGAIN; 3121 SvSetSV_nosteal(TARG, TOPs); 3122 SETTARG; 3123 } 3124 return NORMAL; 3125 } 3126 } 3127 fp = NULL; 3128 if (io) { 3129 fp = IoIFP(io); 3130 if (!fp) { 3131 if (IoFLAGS(io) & IOf_ARGV) { 3132 if (IoFLAGS(io) & IOf_START) { 3133 IoLINES(io) = 0; 3134 if (av_tindex(GvAVn(PL_last_in_gv)) < 0) { 3135 IoFLAGS(io) &= ~IOf_START; 3136 do_open6(PL_last_in_gv, "-", 1, NULL, NULL, 0); 3137 SvTAINTED_off(GvSVn(PL_last_in_gv)); /* previous tainting irrelevant */ 3138 sv_setpvs(GvSVn(PL_last_in_gv), "-"); 3139 SvSETMAGIC(GvSV(PL_last_in_gv)); 3140 fp = IoIFP(io); 3141 goto have_fp; 3142 } 3143 } 3144 fp = nextargv(PL_last_in_gv, PL_op->op_flags & OPf_SPECIAL); 3145 if (!fp) { /* Note: fp != IoIFP(io) */ 3146 (void)do_close(PL_last_in_gv, FALSE); /* now it does*/ 3147 } 3148 } 3149 else if (type == OP_GLOB) 3150 fp = Perl_start_glob(aTHX_ POPs, io); 3151 } 3152 else if (type == OP_GLOB) 3153 SP--; 3154 else if (IoTYPE(io) == IoTYPE_WRONLY) { 3155 report_wrongway_fh(PL_last_in_gv, '>'); 3156 } 3157 } 3158 if (!fp) { 3159 if ((!io || !(IoFLAGS(io) & IOf_START)) 3160 && ckWARN(WARN_CLOSED) 3161 && type != OP_GLOB) 3162 { 3163 report_evil_fh(PL_last_in_gv); 3164 } 3165 if (gimme == G_SCALAR) { 3166 /* undef TARG, and push that undefined value */ 3167 if (type != OP_RCATLINE) { 3168 sv_set_undef(TARG); 3169 } 3170 PUSHTARG; 3171 } 3172 RETURN; 3173 } 3174 have_fp: 3175 if (gimme == G_SCALAR) { 3176 sv = TARG; 3177 if (type == OP_RCATLINE && SvGMAGICAL(sv)) 3178 mg_get(sv); 3179 if (SvROK(sv)) { 3180 if (type == OP_RCATLINE) 3181 SvPV_force_nomg_nolen(sv); 3182 else 3183 sv_unref(sv); 3184 } 3185 else if (isGV_with_GP(sv)) { 3186 SvPV_force_nomg_nolen(sv); 3187 } 3188 SvUPGRADE(sv, SVt_PV); 3189 tmplen = SvLEN(sv); /* remember if already alloced */ 3190 if (!tmplen && !SvREADONLY(sv) && !SvIsCOW(sv)) { 3191 /* try short-buffering it. Please update t/op/readline.t 3192 * if you change the growth length. 3193 */ 3194 Sv_Grow(sv, 80); 3195 } 3196 offset = 0; 3197 if (type == OP_RCATLINE && SvOK(sv)) { 3198 if (!SvPOK(sv)) { 3199 SvPV_force_nomg_nolen(sv); 3200 } 3201 offset = SvCUR(sv); 3202 } 3203 } 3204 else { 3205 sv = sv_2mortal(newSV(80)); 3206 offset = 0; 3207 } 3208 3209 /* This should not be marked tainted if the fp is marked clean */ 3210 #define MAYBE_TAINT_LINE(io, sv) \ 3211 if (!(IoFLAGS(io) & IOf_UNTAINT)) { \ 3212 TAINT; \ 3213 SvTAINTED_on(sv); \ 3214 } 3215 3216 /* delay EOF state for a snarfed empty file */ 3217 #define SNARF_EOF(gimme,rs,io,sv) \ 3218 (gimme != G_SCALAR || SvCUR(sv) \ 3219 || (IoFLAGS(io) & IOf_NOLINE) || !RsSNARF(rs)) 3220 3221 for (;;) { 3222 PUTBACK; 3223 if (!sv_gets(sv, fp, offset) 3224 && (type == OP_GLOB 3225 || SNARF_EOF(gimme, PL_rs, io, sv) 3226 || PerlIO_error(fp))) 3227 { 3228 PerlIO_clearerr(fp); 3229 if (IoFLAGS(io) & IOf_ARGV) { 3230 fp = nextargv(PL_last_in_gv, PL_op->op_flags & OPf_SPECIAL); 3231 if (fp) 3232 continue; 3233 (void)do_close(PL_last_in_gv, FALSE); 3234 } 3235 else if (type == OP_GLOB) { 3236 if (!do_close(PL_last_in_gv, FALSE)) { 3237 Perl_ck_warner(aTHX_ packWARN(WARN_GLOB), 3238 "glob failed (child exited with status %d%s)", 3239 (int)(STATUS_CURRENT >> 8), 3240 (STATUS_CURRENT & 0x80) ? ", core dumped" : ""); 3241 } 3242 } 3243 if (gimme == G_SCALAR) { 3244 if (type != OP_RCATLINE) { 3245 SV_CHECK_THINKFIRST_COW_DROP(TARG); 3246 SvOK_off(TARG); 3247 } 3248 SPAGAIN; 3249 PUSHTARG; 3250 } 3251 MAYBE_TAINT_LINE(io, sv); 3252 RETURN; 3253 } 3254 MAYBE_TAINT_LINE(io, sv); 3255 IoLINES(io)++; 3256 IoFLAGS(io) |= IOf_NOLINE; 3257 SvSETMAGIC(sv); 3258 SPAGAIN; 3259 XPUSHs(sv); 3260 if (type == OP_GLOB) { 3261 const char *t1; 3262 Stat_t statbuf; 3263 3264 if (SvCUR(sv) > 0 && SvCUR(PL_rs) > 0) { 3265 char * const tmps = SvEND(sv) - 1; 3266 if (*tmps == *SvPVX_const(PL_rs)) { 3267 *tmps = '\0'; 3268 SvCUR_set(sv, SvCUR(sv) - 1); 3269 } 3270 } 3271 for (t1 = SvPVX_const(sv); *t1; t1++) 3272 #ifdef __VMS 3273 if (strchr("*%?", *t1)) 3274 #else 3275 if (strchr("$&*(){}[]'\";\\|?<>~`", *t1)) 3276 #endif 3277 break; 3278 if (*t1 && PerlLIO_lstat(SvPVX_const(sv), &statbuf) < 0) { 3279 (void)POPs; /* Unmatched wildcard? Chuck it... */ 3280 continue; 3281 } 3282 } else if (SvUTF8(sv)) { /* OP_READLINE, OP_RCATLINE */ 3283 if (ckWARN(WARN_UTF8)) { 3284 const U8 * const s = (const U8*)SvPVX_const(sv) + offset; 3285 const STRLEN len = SvCUR(sv) - offset; 3286 const U8 *f; 3287 3288 if (!is_utf8_string_loc(s, len, &f)) 3289 /* Emulate :encoding(utf8) warning in the same case. */ 3290 Perl_warner(aTHX_ packWARN(WARN_UTF8), 3291 "utf8 \"\\x%02X\" does not map to Unicode", 3292 f < (U8*)SvEND(sv) ? *f : 0); 3293 } 3294 } 3295 if (gimme == G_ARRAY) { 3296 if (SvLEN(sv) - SvCUR(sv) > 20) { 3297 SvPV_shrink_to_cur(sv); 3298 } 3299 sv = sv_2mortal(newSV(80)); 3300 continue; 3301 } 3302 else if (gimme == G_SCALAR && !tmplen && SvLEN(sv) - SvCUR(sv) > 80) { 3303 /* try to reclaim a bit of scalar space (only on 1st alloc) */ 3304 const STRLEN new_len 3305 = SvCUR(sv) < 60 ? 80 : SvCUR(sv)+40; /* allow some slop */ 3306 SvPV_renew(sv, new_len); 3307 } 3308 RETURN; 3309 } 3310 } 3311 3312 PP(pp_helem) 3313 { 3314 dSP; 3315 HE* he; 3316 SV **svp; 3317 SV * const keysv = POPs; 3318 HV * const hv = MUTABLE_HV(POPs); 3319 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET; 3320 const U32 defer = PL_op->op_private & OPpLVAL_DEFER; 3321 SV *sv; 3322 const bool localizing = PL_op->op_private & OPpLVAL_INTRO; 3323 bool preeminent = TRUE; 3324 3325 if (SvTYPE(hv) != SVt_PVHV) 3326 RETPUSHUNDEF; 3327 3328 if (localizing) { 3329 MAGIC *mg; 3330 HV *stash; 3331 3332 /* If we can determine whether the element exist, 3333 * Try to preserve the existenceness of a tied hash 3334 * element by using EXISTS and DELETE if possible. 3335 * Fallback to FETCH and STORE otherwise. */ 3336 if (SvCANEXISTDELETE(hv)) 3337 preeminent = hv_exists_ent(hv, keysv, 0); 3338 } 3339 3340 he = hv_fetch_ent(hv, keysv, lval && !defer, 0); 3341 svp = he ? &HeVAL(he) : NULL; 3342 if (lval) { 3343 if (!svp || !*svp || *svp == &PL_sv_undef) { 3344 SV* lv; 3345 SV* key2; 3346 if (!defer) { 3347 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv)); 3348 } 3349 lv = sv_newmortal(); 3350 sv_upgrade(lv, SVt_PVLV); 3351 LvTYPE(lv) = 'y'; 3352 sv_magic(lv, key2 = newSVsv(keysv), PERL_MAGIC_defelem, NULL, 0); 3353 SvREFCNT_dec_NN(key2); /* sv_magic() increments refcount */ 3354 LvTARG(lv) = SvREFCNT_inc_simple_NN(hv); 3355 LvTARGLEN(lv) = 1; 3356 PUSHs(lv); 3357 RETURN; 3358 } 3359 if (localizing) { 3360 if (HvNAME_get(hv) && isGV_or_RVCV(*svp)) 3361 save_gp(MUTABLE_GV(*svp), !(PL_op->op_flags & OPf_SPECIAL)); 3362 else if (preeminent) 3363 save_helem_flags(hv, keysv, svp, 3364 (PL_op->op_flags & OPf_SPECIAL) ? 0 : SAVEf_SETMAGIC); 3365 else 3366 SAVEHDELETE(hv, keysv); 3367 } 3368 else if (PL_op->op_private & OPpDEREF) { 3369 PUSHs(vivify_ref(*svp, PL_op->op_private & OPpDEREF)); 3370 RETURN; 3371 } 3372 } 3373 sv = (svp && *svp ? *svp : &PL_sv_undef); 3374 /* Originally this did a conditional C<sv = sv_mortalcopy(sv)>; this 3375 * was to make C<local $tied{foo} = $tied{foo}> possible. 3376 * However, it seems no longer to be needed for that purpose, and 3377 * introduced a new bug: stuff like C<while ($hash{taintedval} =~ /.../g> 3378 * would loop endlessly since the pos magic is getting set on the 3379 * mortal copy and lost. However, the copy has the effect of 3380 * triggering the get magic, and losing it altogether made things like 3381 * c<$tied{foo};> in void context no longer do get magic, which some 3382 * code relied on. Also, delayed triggering of magic on @+ and friends 3383 * meant the original regex may be out of scope by now. So as a 3384 * compromise, do the get magic here. (The MGf_GSKIP flag will stop it 3385 * being called too many times). */ 3386 if (!lval && SvRMAGICAL(hv) && SvGMAGICAL(sv)) 3387 mg_get(sv); 3388 PUSHs(sv); 3389 RETURN; 3390 } 3391 3392 3393 /* a stripped-down version of Perl_softref2xv() for use by 3394 * pp_multideref(), which doesn't use PL_op->op_flags */ 3395 3396 STATIC GV * 3397 S_softref2xv_lite(pTHX_ SV *const sv, const char *const what, 3398 const svtype type) 3399 { 3400 if (PL_op->op_private & HINT_STRICT_REFS) { 3401 if (SvOK(sv)) 3402 Perl_die(aTHX_ PL_no_symref_sv, sv, 3403 (SvPOKp(sv) && SvCUR(sv)>32 ? "..." : ""), what); 3404 else 3405 Perl_die(aTHX_ PL_no_usym, what); 3406 } 3407 if (!SvOK(sv)) 3408 Perl_die(aTHX_ PL_no_usym, what); 3409 return gv_fetchsv_nomg(sv, GV_ADD, type); 3410 } 3411 3412 3413 /* Handle one or more aggregate derefs and array/hash indexings, e.g. 3414 * $h->{foo} or $a[0]{$key}[$i] or f()->[1] 3415 * 3416 * op_aux points to an array of unions of UV / IV / SV* / PADOFFSET. 3417 * Each of these either contains a set of actions, or an argument, such as 3418 * an IV to use as an array index, or a lexical var to retrieve. 3419 * Several actions re stored per UV; we keep shifting new actions off the 3420 * one UV, and only reload when it becomes zero. 3421 */ 3422 3423 PP(pp_multideref) 3424 { 3425 SV *sv = NULL; /* init to avoid spurious 'may be used uninitialized' */ 3426 UNOP_AUX_item *items = cUNOP_AUXx(PL_op)->op_aux; 3427 UV actions = items->uv; 3428 3429 assert(actions); 3430 /* this tells find_uninit_var() where we're up to */ 3431 PL_multideref_pc = items; 3432 3433 while (1) { 3434 /* there are three main classes of action; the first retrieve 3435 * the initial AV or HV from a variable or the stack; the second 3436 * does the equivalent of an unrolled (/DREFAV, rv2av, aelem), 3437 * the third an unrolled (/DREFHV, rv2hv, helem). 3438 */ 3439 switch (actions & MDEREF_ACTION_MASK) { 3440 3441 case MDEREF_reload: 3442 actions = (++items)->uv; 3443 continue; 3444 3445 case MDEREF_AV_padav_aelem: /* $lex[...] */ 3446 sv = PAD_SVl((++items)->pad_offset); 3447 goto do_AV_aelem; 3448 3449 case MDEREF_AV_gvav_aelem: /* $pkg[...] */ 3450 sv = UNOP_AUX_item_sv(++items); 3451 assert(isGV_with_GP(sv)); 3452 sv = (SV*)GvAVn((GV*)sv); 3453 goto do_AV_aelem; 3454 3455 case MDEREF_AV_pop_rv2av_aelem: /* expr->[...] */ 3456 { 3457 dSP; 3458 sv = POPs; 3459 PUTBACK; 3460 goto do_AV_rv2av_aelem; 3461 } 3462 3463 case MDEREF_AV_gvsv_vivify_rv2av_aelem: /* $pkg->[...] */ 3464 sv = UNOP_AUX_item_sv(++items); 3465 assert(isGV_with_GP(sv)); 3466 sv = GvSVn((GV*)sv); 3467 goto do_AV_vivify_rv2av_aelem; 3468 3469 case MDEREF_AV_padsv_vivify_rv2av_aelem: /* $lex->[...] */ 3470 sv = PAD_SVl((++items)->pad_offset); 3471 /* FALLTHROUGH */ 3472 3473 do_AV_vivify_rv2av_aelem: 3474 case MDEREF_AV_vivify_rv2av_aelem: /* vivify, ->[...] */ 3475 /* this is the OPpDEREF action normally found at the end of 3476 * ops like aelem, helem, rv2sv */ 3477 sv = vivify_ref(sv, OPpDEREF_AV); 3478 /* FALLTHROUGH */ 3479 3480 do_AV_rv2av_aelem: 3481 /* this is basically a copy of pp_rv2av when it just has the 3482 * sKR/1 flags */ 3483 SvGETMAGIC(sv); 3484 if (LIKELY(SvROK(sv))) { 3485 if (UNLIKELY(SvAMAGIC(sv))) { 3486 sv = amagic_deref_call(sv, to_av_amg); 3487 } 3488 sv = SvRV(sv); 3489 if (UNLIKELY(SvTYPE(sv) != SVt_PVAV)) 3490 DIE(aTHX_ "Not an ARRAY reference"); 3491 } 3492 else if (SvTYPE(sv) != SVt_PVAV) { 3493 if (!isGV_with_GP(sv)) 3494 sv = (SV*)S_softref2xv_lite(aTHX_ sv, "an ARRAY", SVt_PVAV); 3495 sv = MUTABLE_SV(GvAVn((GV*)sv)); 3496 } 3497 /* FALLTHROUGH */ 3498 3499 do_AV_aelem: 3500 { 3501 /* retrieve the key; this may be either a lexical or package 3502 * var (whose index/ptr is stored as an item) or a signed 3503 * integer constant stored as an item. 3504 */ 3505 SV *elemsv; 3506 IV elem = 0; /* to shut up stupid compiler warnings */ 3507 3508 3509 assert(SvTYPE(sv) == SVt_PVAV); 3510 3511 switch (actions & MDEREF_INDEX_MASK) { 3512 case MDEREF_INDEX_none: 3513 goto finish; 3514 case MDEREF_INDEX_const: 3515 elem = (++items)->iv; 3516 break; 3517 case MDEREF_INDEX_padsv: 3518 elemsv = PAD_SVl((++items)->pad_offset); 3519 goto check_elem; 3520 case MDEREF_INDEX_gvsv: 3521 elemsv = UNOP_AUX_item_sv(++items); 3522 assert(isGV_with_GP(elemsv)); 3523 elemsv = GvSVn((GV*)elemsv); 3524 check_elem: 3525 if (UNLIKELY(SvROK(elemsv) && !SvGAMAGIC(elemsv) 3526 && ckWARN(WARN_MISC))) 3527 Perl_warner(aTHX_ packWARN(WARN_MISC), 3528 "Use of reference \"%" SVf "\" as array index", 3529 SVfARG(elemsv)); 3530 /* the only time that S_find_uninit_var() needs this 3531 * is to determine which index value triggered the 3532 * undef warning. So just update it here. Note that 3533 * since we don't save and restore this var (e.g. for 3534 * tie or overload execution), its value will be 3535 * meaningless apart from just here */ 3536 PL_multideref_pc = items; 3537 elem = SvIV(elemsv); 3538 break; 3539 } 3540 3541 3542 /* this is basically a copy of pp_aelem with OPpDEREF skipped */ 3543 3544 if (!(actions & MDEREF_FLAG_last)) { 3545 SV** svp = av_fetch((AV*)sv, elem, 1); 3546 if (!svp || ! (sv=*svp)) 3547 DIE(aTHX_ PL_no_aelem, elem); 3548 break; 3549 } 3550 3551 if (PL_op->op_private & 3552 (OPpMULTIDEREF_EXISTS|OPpMULTIDEREF_DELETE)) 3553 { 3554 if (PL_op->op_private & OPpMULTIDEREF_EXISTS) { 3555 sv = av_exists((AV*)sv, elem) ? &PL_sv_yes : &PL_sv_no; 3556 } 3557 else { 3558 I32 discard = (GIMME_V == G_VOID) ? G_DISCARD : 0; 3559 sv = av_delete((AV*)sv, elem, discard); 3560 if (discard) 3561 return NORMAL; 3562 if (!sv) 3563 sv = &PL_sv_undef; 3564 } 3565 } 3566 else { 3567 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET; 3568 const U32 defer = PL_op->op_private & OPpLVAL_DEFER; 3569 const bool localizing = PL_op->op_private & OPpLVAL_INTRO; 3570 bool preeminent = TRUE; 3571 AV *const av = (AV*)sv; 3572 SV** svp; 3573 3574 if (UNLIKELY(localizing)) { 3575 MAGIC *mg; 3576 HV *stash; 3577 3578 /* If we can determine whether the element exist, 3579 * Try to preserve the existenceness of a tied array 3580 * element by using EXISTS and DELETE if possible. 3581 * Fallback to FETCH and STORE otherwise. */ 3582 if (SvCANEXISTDELETE(av)) 3583 preeminent = av_exists(av, elem); 3584 } 3585 3586 svp = av_fetch(av, elem, lval && !defer); 3587 3588 if (lval) { 3589 if (!svp || !(sv = *svp)) { 3590 IV len; 3591 if (!defer) 3592 DIE(aTHX_ PL_no_aelem, elem); 3593 len = av_tindex(av); 3594 /* Resolve a negative index that falls within 3595 * the array. Leave it negative it if falls 3596 * outside the array. */ 3597 if (elem < 0 && len + elem >= 0) 3598 elem = len + elem; 3599 if (elem >= 0 && elem <= len) 3600 /* Falls within the array. */ 3601 sv = av_nonelem(av,elem); 3602 else 3603 /* Falls outside the array. If it is neg- 3604 ative, magic_setdefelem will use the 3605 index for error reporting. */ 3606 sv = sv_2mortal(newSVavdefelem(av,elem,1)); 3607 } 3608 else { 3609 if (UNLIKELY(localizing)) { 3610 if (preeminent) { 3611 save_aelem(av, elem, svp); 3612 sv = *svp; /* may have changed */ 3613 } 3614 else 3615 SAVEADELETE(av, elem); 3616 } 3617 } 3618 } 3619 else { 3620 sv = (svp ? *svp : &PL_sv_undef); 3621 /* see note in pp_helem() */ 3622 if (SvRMAGICAL(av) && SvGMAGICAL(sv)) 3623 mg_get(sv); 3624 } 3625 } 3626 3627 } 3628 finish: 3629 { 3630 dSP; 3631 XPUSHs(sv); 3632 RETURN; 3633 } 3634 /* NOTREACHED */ 3635 3636 3637 3638 3639 case MDEREF_HV_padhv_helem: /* $lex{...} */ 3640 sv = PAD_SVl((++items)->pad_offset); 3641 goto do_HV_helem; 3642 3643 case MDEREF_HV_gvhv_helem: /* $pkg{...} */ 3644 sv = UNOP_AUX_item_sv(++items); 3645 assert(isGV_with_GP(sv)); 3646 sv = (SV*)GvHVn((GV*)sv); 3647 goto do_HV_helem; 3648 3649 case MDEREF_HV_pop_rv2hv_helem: /* expr->{...} */ 3650 { 3651 dSP; 3652 sv = POPs; 3653 PUTBACK; 3654 goto do_HV_rv2hv_helem; 3655 } 3656 3657 case MDEREF_HV_gvsv_vivify_rv2hv_helem: /* $pkg->{...} */ 3658 sv = UNOP_AUX_item_sv(++items); 3659 assert(isGV_with_GP(sv)); 3660 sv = GvSVn((GV*)sv); 3661 goto do_HV_vivify_rv2hv_helem; 3662 3663 case MDEREF_HV_padsv_vivify_rv2hv_helem: /* $lex->{...} */ 3664 sv = PAD_SVl((++items)->pad_offset); 3665 /* FALLTHROUGH */ 3666 3667 do_HV_vivify_rv2hv_helem: 3668 case MDEREF_HV_vivify_rv2hv_helem: /* vivify, ->{...} */ 3669 /* this is the OPpDEREF action normally found at the end of 3670 * ops like aelem, helem, rv2sv */ 3671 sv = vivify_ref(sv, OPpDEREF_HV); 3672 /* FALLTHROUGH */ 3673 3674 do_HV_rv2hv_helem: 3675 /* this is basically a copy of pp_rv2hv when it just has the 3676 * sKR/1 flags (and pp_rv2hv is aliased to pp_rv2av) */ 3677 3678 SvGETMAGIC(sv); 3679 if (LIKELY(SvROK(sv))) { 3680 if (UNLIKELY(SvAMAGIC(sv))) { 3681 sv = amagic_deref_call(sv, to_hv_amg); 3682 } 3683 sv = SvRV(sv); 3684 if (UNLIKELY(SvTYPE(sv) != SVt_PVHV)) 3685 DIE(aTHX_ "Not a HASH reference"); 3686 } 3687 else if (SvTYPE(sv) != SVt_PVHV) { 3688 if (!isGV_with_GP(sv)) 3689 sv = (SV*)S_softref2xv_lite(aTHX_ sv, "a HASH", SVt_PVHV); 3690 sv = MUTABLE_SV(GvHVn((GV*)sv)); 3691 } 3692 /* FALLTHROUGH */ 3693 3694 do_HV_helem: 3695 { 3696 /* retrieve the key; this may be either a lexical / package 3697 * var or a string constant, whose index/ptr is stored as an 3698 * item 3699 */ 3700 SV *keysv = NULL; /* to shut up stupid compiler warnings */ 3701 3702 assert(SvTYPE(sv) == SVt_PVHV); 3703 3704 switch (actions & MDEREF_INDEX_MASK) { 3705 case MDEREF_INDEX_none: 3706 goto finish; 3707 3708 case MDEREF_INDEX_const: 3709 keysv = UNOP_AUX_item_sv(++items); 3710 break; 3711 3712 case MDEREF_INDEX_padsv: 3713 keysv = PAD_SVl((++items)->pad_offset); 3714 break; 3715 3716 case MDEREF_INDEX_gvsv: 3717 keysv = UNOP_AUX_item_sv(++items); 3718 keysv = GvSVn((GV*)keysv); 3719 break; 3720 } 3721 3722 /* see comment above about setting this var */ 3723 PL_multideref_pc = items; 3724 3725 3726 /* ensure that candidate CONSTs have been HEKified */ 3727 assert( ((actions & MDEREF_INDEX_MASK) != MDEREF_INDEX_const) 3728 || SvTYPE(keysv) >= SVt_PVMG 3729 || !SvOK(keysv) 3730 || SvROK(keysv) 3731 || SvIsCOW_shared_hash(keysv)); 3732 3733 /* this is basically a copy of pp_helem with OPpDEREF skipped */ 3734 3735 if (!(actions & MDEREF_FLAG_last)) { 3736 HE *he = hv_fetch_ent((HV*)sv, keysv, 1, 0); 3737 if (!he || !(sv=HeVAL(he)) || sv == &PL_sv_undef) 3738 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv)); 3739 break; 3740 } 3741 3742 if (PL_op->op_private & 3743 (OPpMULTIDEREF_EXISTS|OPpMULTIDEREF_DELETE)) 3744 { 3745 if (PL_op->op_private & OPpMULTIDEREF_EXISTS) { 3746 sv = hv_exists_ent((HV*)sv, keysv, 0) 3747 ? &PL_sv_yes : &PL_sv_no; 3748 } 3749 else { 3750 I32 discard = (GIMME_V == G_VOID) ? G_DISCARD : 0; 3751 sv = hv_delete_ent((HV*)sv, keysv, discard, 0); 3752 if (discard) 3753 return NORMAL; 3754 if (!sv) 3755 sv = &PL_sv_undef; 3756 } 3757 } 3758 else { 3759 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET; 3760 const U32 defer = PL_op->op_private & OPpLVAL_DEFER; 3761 const bool localizing = PL_op->op_private & OPpLVAL_INTRO; 3762 bool preeminent = TRUE; 3763 SV **svp; 3764 HV * const hv = (HV*)sv; 3765 HE* he; 3766 3767 if (UNLIKELY(localizing)) { 3768 MAGIC *mg; 3769 HV *stash; 3770 3771 /* If we can determine whether the element exist, 3772 * Try to preserve the existenceness of a tied hash 3773 * element by using EXISTS and DELETE if possible. 3774 * Fallback to FETCH and STORE otherwise. */ 3775 if (SvCANEXISTDELETE(hv)) 3776 preeminent = hv_exists_ent(hv, keysv, 0); 3777 } 3778 3779 he = hv_fetch_ent(hv, keysv, lval && !defer, 0); 3780 svp = he ? &HeVAL(he) : NULL; 3781 3782 3783 if (lval) { 3784 if (!svp || !(sv = *svp) || sv == &PL_sv_undef) { 3785 SV* lv; 3786 SV* key2; 3787 if (!defer) 3788 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv)); 3789 lv = sv_newmortal(); 3790 sv_upgrade(lv, SVt_PVLV); 3791 LvTYPE(lv) = 'y'; 3792 sv_magic(lv, key2 = newSVsv(keysv), 3793 PERL_MAGIC_defelem, NULL, 0); 3794 /* sv_magic() increments refcount */ 3795 SvREFCNT_dec_NN(key2); 3796 LvTARG(lv) = SvREFCNT_inc_simple_NN(hv); 3797 LvTARGLEN(lv) = 1; 3798 sv = lv; 3799 } 3800 else { 3801 if (localizing) { 3802 if (HvNAME_get(hv) && isGV_or_RVCV(sv)) 3803 save_gp(MUTABLE_GV(sv), 3804 !(PL_op->op_flags & OPf_SPECIAL)); 3805 else if (preeminent) { 3806 save_helem_flags(hv, keysv, svp, 3807 (PL_op->op_flags & OPf_SPECIAL) 3808 ? 0 : SAVEf_SETMAGIC); 3809 sv = *svp; /* may have changed */ 3810 } 3811 else 3812 SAVEHDELETE(hv, keysv); 3813 } 3814 } 3815 } 3816 else { 3817 sv = (svp && *svp ? *svp : &PL_sv_undef); 3818 /* see note in pp_helem() */ 3819 if (SvRMAGICAL(hv) && SvGMAGICAL(sv)) 3820 mg_get(sv); 3821 } 3822 } 3823 goto finish; 3824 } 3825 3826 } /* switch */ 3827 3828 actions >>= MDEREF_SHIFT; 3829 } /* while */ 3830 /* NOTREACHED */ 3831 } 3832 3833 3834 PP(pp_iter) 3835 { 3836 PERL_CONTEXT *cx; 3837 SV *oldsv; 3838 SV **itersvp; 3839 3840 SV *sv; 3841 AV *av; 3842 IV ix; 3843 IV inc; 3844 3845 cx = CX_CUR(); 3846 itersvp = CxITERVAR(cx); 3847 assert(itersvp); 3848 3849 switch (CxTYPE(cx)) { 3850 3851 case CXt_LOOP_LAZYSV: /* string increment */ 3852 { 3853 SV* cur = cx->blk_loop.state_u.lazysv.cur; 3854 SV *end = cx->blk_loop.state_u.lazysv.end; 3855 /* If the maximum is !SvOK(), pp_enteriter substitutes PL_sv_no. 3856 It has SvPVX of "" and SvCUR of 0, which is what we want. */ 3857 STRLEN maxlen = 0; 3858 const char *max = SvPV_const(end, maxlen); 3859 if (DO_UTF8(end) && IN_UNI_8_BIT) 3860 maxlen = sv_len_utf8_nomg(end); 3861 if (UNLIKELY(SvNIOK(cur) || SvCUR(cur) > maxlen)) 3862 goto retno; 3863 3864 oldsv = *itersvp; 3865 /* NB: on the first iteration, oldsv will have a ref count of at 3866 * least 2 (one extra from blk_loop.itersave), so the GV or pad 3867 * slot will get localised; on subsequent iterations the RC==1 3868 * optimisation may kick in and the SV will be reused. */ 3869 if (oldsv && LIKELY(SvREFCNT(oldsv) == 1 && !SvMAGICAL(oldsv))) { 3870 /* safe to reuse old SV */ 3871 sv_setsv(oldsv, cur); 3872 } 3873 else 3874 { 3875 /* we need a fresh SV every time so that loop body sees a 3876 * completely new SV for closures/references to work as 3877 * they used to */ 3878 *itersvp = newSVsv(cur); 3879 SvREFCNT_dec(oldsv); 3880 } 3881 if (strEQ(SvPVX_const(cur), max)) 3882 sv_setiv(cur, 0); /* terminate next time */ 3883 else 3884 sv_inc(cur); 3885 break; 3886 } 3887 3888 case CXt_LOOP_LAZYIV: /* integer increment */ 3889 { 3890 IV cur = cx->blk_loop.state_u.lazyiv.cur; 3891 if (UNLIKELY(cur > cx->blk_loop.state_u.lazyiv.end)) 3892 goto retno; 3893 3894 oldsv = *itersvp; 3895 /* see NB comment above */ 3896 if (oldsv && LIKELY(SvREFCNT(oldsv) == 1 && !SvMAGICAL(oldsv))) { 3897 /* safe to reuse old SV */ 3898 3899 if ( (SvFLAGS(oldsv) & (SVTYPEMASK|SVf_THINKFIRST|SVf_IVisUV)) 3900 == SVt_IV) 3901 { 3902 /* Cheap SvIOK_only(). 3903 * Assert that flags which SvIOK_only() would test or 3904 * clear can't be set, because we're SVt_IV */ 3905 assert(!(SvFLAGS(oldsv) & 3906 (SVf_OOK|SVf_UTF8|(SVf_OK & ~(SVf_IOK|SVp_IOK))))); 3907 SvFLAGS(oldsv) |= (SVf_IOK|SVp_IOK); 3908 /* SvIV_set() where sv_any points to head */ 3909 oldsv->sv_u.svu_iv = cur; 3910 3911 } 3912 else 3913 sv_setiv(oldsv, cur); 3914 } 3915 else 3916 { 3917 /* we need a fresh SV every time so that loop body sees a 3918 * completely new SV for closures/references to work as they 3919 * used to */ 3920 *itersvp = newSViv(cur); 3921 SvREFCNT_dec(oldsv); 3922 } 3923 3924 if (UNLIKELY(cur == IV_MAX)) { 3925 /* Handle end of range at IV_MAX */ 3926 cx->blk_loop.state_u.lazyiv.end = IV_MIN; 3927 } else 3928 ++cx->blk_loop.state_u.lazyiv.cur; 3929 break; 3930 } 3931 3932 case CXt_LOOP_LIST: /* for (1,2,3) */ 3933 3934 assert(OPpITER_REVERSED == 2); /* so inc becomes -1 or 1 */ 3935 inc = (IV)1 - (IV)(PL_op->op_private & OPpITER_REVERSED); 3936 ix = (cx->blk_loop.state_u.stack.ix += inc); 3937 if (UNLIKELY(inc > 0 3938 ? ix > cx->blk_oldsp 3939 : ix <= cx->blk_loop.state_u.stack.basesp) 3940 ) 3941 goto retno; 3942 3943 sv = PL_stack_base[ix]; 3944 av = NULL; 3945 goto loop_ary_common; 3946 3947 case CXt_LOOP_ARY: /* for (@ary) */ 3948 3949 av = cx->blk_loop.state_u.ary.ary; 3950 inc = (IV)1 - (IV)(PL_op->op_private & OPpITER_REVERSED); 3951 ix = (cx->blk_loop.state_u.ary.ix += inc); 3952 if (UNLIKELY(inc > 0 3953 ? ix > AvFILL(av) 3954 : ix < 0) 3955 ) 3956 goto retno; 3957 3958 if (UNLIKELY(SvRMAGICAL(av))) { 3959 SV * const * const svp = av_fetch(av, ix, FALSE); 3960 sv = svp ? *svp : NULL; 3961 } 3962 else { 3963 sv = AvARRAY(av)[ix]; 3964 } 3965 3966 loop_ary_common: 3967 3968 if (UNLIKELY(cx->cx_type & CXp_FOR_LVREF)) { 3969 SvSetMagicSV(*itersvp, sv); 3970 break; 3971 } 3972 3973 if (LIKELY(sv)) { 3974 if (UNLIKELY(SvIS_FREED(sv))) { 3975 *itersvp = NULL; 3976 Perl_croak(aTHX_ "Use of freed value in iteration"); 3977 } 3978 if (SvPADTMP(sv)) { 3979 sv = newSVsv(sv); 3980 } 3981 else { 3982 SvTEMP_off(sv); 3983 SvREFCNT_inc_simple_void_NN(sv); 3984 } 3985 } 3986 else if (av) { 3987 sv = newSVavdefelem(av, ix, 0); 3988 } 3989 else 3990 sv = &PL_sv_undef; 3991 3992 oldsv = *itersvp; 3993 *itersvp = sv; 3994 SvREFCNT_dec(oldsv); 3995 break; 3996 3997 default: 3998 DIE(aTHX_ "panic: pp_iter, type=%u", CxTYPE(cx)); 3999 } 4000 4001 /* Try to bypass pushing &PL_sv_yes and calling pp_and(); instead 4002 * jump straight to the AND op's op_other */ 4003 assert(PL_op->op_next->op_type == OP_AND); 4004 if (PL_op->op_next->op_ppaddr == Perl_pp_and) { 4005 return cLOGOPx(PL_op->op_next)->op_other; 4006 } 4007 else { 4008 /* An XS module has replaced the op_ppaddr, so fall back to the slow, 4009 * obvious way. */ 4010 /* pp_enteriter should have pre-extended the stack */ 4011 EXTEND_SKIP(PL_stack_sp, 1); 4012 *++PL_stack_sp = &PL_sv_yes; 4013 return PL_op->op_next; 4014 } 4015 4016 retno: 4017 /* Try to bypass pushing &PL_sv_no and calling pp_and(); instead 4018 * jump straight to the AND op's op_next */ 4019 assert(PL_op->op_next->op_type == OP_AND); 4020 /* pp_enteriter should have pre-extended the stack */ 4021 EXTEND_SKIP(PL_stack_sp, 1); 4022 /* we only need this for the rare case where the OP_AND isn't 4023 * in void context, e.g. $x = do { for (..) {...} }; 4024 * (or for when an XS module has replaced the op_ppaddr) 4025 * but it's cheaper to just push it rather than testing first 4026 */ 4027 *++PL_stack_sp = &PL_sv_no; 4028 if (PL_op->op_next->op_ppaddr == Perl_pp_and) { 4029 return PL_op->op_next->op_next; 4030 } 4031 else { 4032 /* An XS module has replaced the op_ppaddr, so fall back to the slow, 4033 * obvious way. */ 4034 return PL_op->op_next; 4035 } 4036 } 4037 4038 4039 /* 4040 A description of how taint works in pattern matching and substitution. 4041 4042 This is all conditional on NO_TAINT_SUPPORT not being defined. Under 4043 NO_TAINT_SUPPORT, taint-related operations should become no-ops. 4044 4045 While the pattern is being assembled/concatenated and then compiled, 4046 PL_tainted will get set (via TAINT_set) if any component of the pattern 4047 is tainted, e.g. /.*$tainted/. At the end of pattern compilation, 4048 the RXf_TAINTED flag is set on the pattern if PL_tainted is set (via 4049 TAINT_get). It will also be set if any component of the pattern matches 4050 based on locale-dependent behavior. 4051 4052 When the pattern is copied, e.g. $r = qr/..../, the SV holding the ref to 4053 the pattern is marked as tainted. This means that subsequent usage, such 4054 as /x$r/, will set PL_tainted using TAINT_set, and thus RXf_TAINTED, 4055 on the new pattern too. 4056 4057 RXf_TAINTED_SEEN is used post-execution by the get magic code 4058 of $1 et al to indicate whether the returned value should be tainted. 4059 It is the responsibility of the caller of the pattern (i.e. pp_match, 4060 pp_subst etc) to set this flag for any other circumstances where $1 needs 4061 to be tainted. 4062 4063 The taint behaviour of pp_subst (and pp_substcont) is quite complex. 4064 4065 There are three possible sources of taint 4066 * the source string 4067 * the pattern (both compile- and run-time, RXf_TAINTED / RXf_TAINTED_SEEN) 4068 * the replacement string (or expression under /e) 4069 4070 There are four destinations of taint and they are affected by the sources 4071 according to the rules below: 4072 4073 * the return value (not including /r): 4074 tainted by the source string and pattern, but only for the 4075 number-of-iterations case; boolean returns aren't tainted; 4076 * the modified string (or modified copy under /r): 4077 tainted by the source string, pattern, and replacement strings; 4078 * $1 et al: 4079 tainted by the pattern, and under 'use re "taint"', by the source 4080 string too; 4081 * PL_taint - i.e. whether subsequent code (e.g. in a /e block) is tainted: 4082 should always be unset before executing subsequent code. 4083 4084 The overall action of pp_subst is: 4085 4086 * at the start, set bits in rxtainted indicating the taint status of 4087 the various sources. 4088 4089 * After each pattern execution, update the SUBST_TAINT_PAT bit in 4090 rxtainted if RXf_TAINTED_SEEN has been set, to indicate that the 4091 pattern has subsequently become tainted via locale ops. 4092 4093 * If control is being passed to pp_substcont to execute a /e block, 4094 save rxtainted in the CXt_SUBST block, for future use by 4095 pp_substcont. 4096 4097 * Whenever control is being returned to perl code (either by falling 4098 off the "end" of pp_subst/pp_substcont, or by entering a /e block), 4099 use the flag bits in rxtainted to make all the appropriate types of 4100 destination taint visible; e.g. set RXf_TAINTED_SEEN so that $1 4101 et al will appear tainted. 4102 4103 pp_match is just a simpler version of the above. 4104 4105 */ 4106 4107 PP(pp_subst) 4108 { 4109 dSP; dTARG; 4110 PMOP *pm = cPMOP; 4111 PMOP *rpm = pm; 4112 char *s; 4113 char *strend; 4114 const char *c; 4115 STRLEN clen; 4116 SSize_t iters = 0; 4117 SSize_t maxiters; 4118 bool once; 4119 U8 rxtainted = 0; /* holds various SUBST_TAINT_* flag bits. 4120 See "how taint works" above */ 4121 char *orig; 4122 U8 r_flags; 4123 REGEXP *rx = PM_GETRE(pm); 4124 regexp *prog = ReANY(rx); 4125 STRLEN len; 4126 int force_on_match = 0; 4127 const I32 oldsave = PL_savestack_ix; 4128 STRLEN slen; 4129 bool doutf8 = FALSE; /* whether replacement is in utf8 */ 4130 #ifdef PERL_ANY_COW 4131 bool was_cow; 4132 #endif 4133 SV *nsv = NULL; 4134 /* known replacement string? */ 4135 SV *dstr = (pm->op_pmflags & PMf_CONST) ? POPs : NULL; 4136 4137 PERL_ASYNC_CHECK(); 4138 4139 if (PL_op->op_flags & OPf_STACKED) 4140 TARG = POPs; 4141 else { 4142 if (ARGTARG) 4143 GETTARGET; 4144 else { 4145 TARG = DEFSV; 4146 } 4147 EXTEND(SP,1); 4148 } 4149 4150 SvGETMAGIC(TARG); /* must come before cow check */ 4151 #ifdef PERL_ANY_COW 4152 /* note that a string might get converted to COW during matching */ 4153 was_cow = cBOOL(SvIsCOW(TARG)); 4154 #endif 4155 if (!(rpm->op_pmflags & PMf_NONDESTRUCT)) { 4156 #ifndef PERL_ANY_COW 4157 if (SvIsCOW(TARG)) 4158 sv_force_normal_flags(TARG,0); 4159 #endif 4160 if ((SvREADONLY(TARG) 4161 || ( ((SvTYPE(TARG) == SVt_PVGV && isGV_with_GP(TARG)) 4162 || SvTYPE(TARG) > SVt_PVLV) 4163 && !(SvTYPE(TARG) == SVt_PVGV && SvFAKE(TARG))))) 4164 Perl_croak_no_modify(); 4165 } 4166 PUTBACK; 4167 4168 orig = SvPV_nomg(TARG, len); 4169 /* note we don't (yet) force the var into being a string; if we fail 4170 * to match, we leave as-is; on successful match however, we *will* 4171 * coerce into a string, then repeat the match */ 4172 if (!SvPOKp(TARG) || SvTYPE(TARG) == SVt_PVGV || SvVOK(TARG)) 4173 force_on_match = 1; 4174 4175 /* only replace once? */ 4176 once = !(rpm->op_pmflags & PMf_GLOBAL); 4177 4178 /* See "how taint works" above */ 4179 if (TAINTING_get) { 4180 rxtainted = ( 4181 (SvTAINTED(TARG) ? SUBST_TAINT_STR : 0) 4182 | (RXp_ISTAINTED(prog) ? SUBST_TAINT_PAT : 0) 4183 | ((pm->op_pmflags & PMf_RETAINT) ? SUBST_TAINT_RETAINT : 0) 4184 | (( (once && !(rpm->op_pmflags & PMf_NONDESTRUCT)) 4185 || (PL_op->op_private & OPpTRUEBOOL)) ? SUBST_TAINT_BOOLRET : 0)); 4186 TAINT_NOT; 4187 } 4188 4189 force_it: 4190 if (!pm || !orig) 4191 DIE(aTHX_ "panic: pp_subst, pm=%p, orig=%p", pm, orig); 4192 4193 strend = orig + len; 4194 slen = DO_UTF8(TARG) ? utf8_length((U8*)orig, (U8*)strend) : len; 4195 maxiters = 2 * slen + 10; /* We can match twice at each 4196 position, once with zero-length, 4197 second time with non-zero. */ 4198 4199 /* handle the empty pattern */ 4200 if (!RX_PRELEN(rx) && PL_curpm && !prog->mother_re) { 4201 if (PL_curpm == PL_reg_curpm) { 4202 if (PL_curpm_under) { 4203 if (PL_curpm_under == PL_reg_curpm) { 4204 Perl_croak(aTHX_ "Infinite recursion via empty pattern"); 4205 } else { 4206 pm = PL_curpm_under; 4207 } 4208 } 4209 } else { 4210 pm = PL_curpm; 4211 } 4212 rx = PM_GETRE(pm); 4213 prog = ReANY(rx); 4214 } 4215 4216 #ifdef PERL_SAWAMPERSAND 4217 r_flags = ( RXp_NPARENS(prog) 4218 || PL_sawampersand 4219 || (RXp_EXTFLAGS(prog) & (RXf_EVAL_SEEN|RXf_PMf_KEEPCOPY)) 4220 || (rpm->op_pmflags & PMf_KEEPCOPY) 4221 ) 4222 ? REXEC_COPY_STR 4223 : 0; 4224 #else 4225 r_flags = REXEC_COPY_STR; 4226 #endif 4227 4228 if (!CALLREGEXEC(rx, orig, strend, orig, 0, TARG, NULL, r_flags)) 4229 { 4230 SPAGAIN; 4231 PUSHs(rpm->op_pmflags & PMf_NONDESTRUCT ? TARG : &PL_sv_no); 4232 LEAVE_SCOPE(oldsave); 4233 RETURN; 4234 } 4235 PL_curpm = pm; 4236 4237 /* known replacement string? */ 4238 if (dstr) { 4239 /* replacement needing upgrading? */ 4240 if (DO_UTF8(TARG) && !doutf8) { 4241 nsv = sv_newmortal(); 4242 SvSetSV(nsv, dstr); 4243 sv_utf8_upgrade(nsv); 4244 c = SvPV_const(nsv, clen); 4245 doutf8 = TRUE; 4246 } 4247 else { 4248 c = SvPV_const(dstr, clen); 4249 doutf8 = DO_UTF8(dstr); 4250 } 4251 4252 if (UNLIKELY(TAINT_get)) 4253 rxtainted |= SUBST_TAINT_REPL; 4254 } 4255 else { 4256 c = NULL; 4257 doutf8 = FALSE; 4258 } 4259 4260 /* can do inplace substitution? */ 4261 if (c 4262 #ifdef PERL_ANY_COW 4263 && !was_cow 4264 #endif 4265 && (I32)clen <= RXp_MINLENRET(prog) 4266 && ( once 4267 || !(r_flags & REXEC_COPY_STR) 4268 || (!SvGMAGICAL(dstr) && !(RXp_EXTFLAGS(prog) & RXf_EVAL_SEEN)) 4269 ) 4270 && !(RXp_EXTFLAGS(prog) & RXf_NO_INPLACE_SUBST) 4271 && (!doutf8 || SvUTF8(TARG)) 4272 && !(rpm->op_pmflags & PMf_NONDESTRUCT)) 4273 { 4274 4275 #ifdef PERL_ANY_COW 4276 /* string might have got converted to COW since we set was_cow */ 4277 if (SvIsCOW(TARG)) { 4278 if (!force_on_match) 4279 goto have_a_cow; 4280 assert(SvVOK(TARG)); 4281 } 4282 #endif 4283 if (force_on_match) { 4284 /* redo the first match, this time with the orig var 4285 * forced into being a string */ 4286 force_on_match = 0; 4287 orig = SvPV_force_nomg(TARG, len); 4288 goto force_it; 4289 } 4290 4291 if (once) { 4292 char *d, *m; 4293 if (RXp_MATCH_TAINTED(prog)) /* run time pattern taint, eg locale */ 4294 rxtainted |= SUBST_TAINT_PAT; 4295 m = orig + RXp_OFFS(prog)[0].start; 4296 d = orig + RXp_OFFS(prog)[0].end; 4297 s = orig; 4298 if (m - s > strend - d) { /* faster to shorten from end */ 4299 I32 i; 4300 if (clen) { 4301 Copy(c, m, clen, char); 4302 m += clen; 4303 } 4304 i = strend - d; 4305 if (i > 0) { 4306 Move(d, m, i, char); 4307 m += i; 4308 } 4309 *m = '\0'; 4310 SvCUR_set(TARG, m - s); 4311 } 4312 else { /* faster from front */ 4313 I32 i = m - s; 4314 d -= clen; 4315 if (i > 0) 4316 Move(s, d - i, i, char); 4317 sv_chop(TARG, d-i); 4318 if (clen) 4319 Copy(c, d, clen, char); 4320 } 4321 SPAGAIN; 4322 PUSHs(&PL_sv_yes); 4323 } 4324 else { 4325 char *d, *m; 4326 d = s = RXp_OFFS(prog)[0].start + orig; 4327 do { 4328 I32 i; 4329 if (UNLIKELY(iters++ > maxiters)) 4330 DIE(aTHX_ "Substitution loop"); 4331 /* run time pattern taint, eg locale */ 4332 if (UNLIKELY(RXp_MATCH_TAINTED(prog))) 4333 rxtainted |= SUBST_TAINT_PAT; 4334 m = RXp_OFFS(prog)[0].start + orig; 4335 if ((i = m - s)) { 4336 if (s != d) 4337 Move(s, d, i, char); 4338 d += i; 4339 } 4340 if (clen) { 4341 Copy(c, d, clen, char); 4342 d += clen; 4343 } 4344 s = RXp_OFFS(prog)[0].end + orig; 4345 } while (CALLREGEXEC(rx, s, strend, orig, 4346 s == m, /* don't match same null twice */ 4347 TARG, NULL, 4348 REXEC_NOT_FIRST|REXEC_IGNOREPOS|REXEC_FAIL_ON_UNDERFLOW)); 4349 if (s != d) { 4350 I32 i = strend - s; 4351 SvCUR_set(TARG, d - SvPVX_const(TARG) + i); 4352 Move(s, d, i+1, char); /* include the NUL */ 4353 } 4354 SPAGAIN; 4355 assert(iters); 4356 if (PL_op->op_private & OPpTRUEBOOL) 4357 PUSHs(&PL_sv_yes); 4358 else 4359 mPUSHi(iters); 4360 } 4361 } 4362 else { 4363 bool first; 4364 char *m; 4365 SV *repl; 4366 if (force_on_match) { 4367 /* redo the first match, this time with the orig var 4368 * forced into being a string */ 4369 force_on_match = 0; 4370 if (rpm->op_pmflags & PMf_NONDESTRUCT) { 4371 /* I feel that it should be possible to avoid this mortal copy 4372 given that the code below copies into a new destination. 4373 However, I suspect it isn't worth the complexity of 4374 unravelling the C<goto force_it> for the small number of 4375 cases where it would be viable to drop into the copy code. */ 4376 TARG = sv_2mortal(newSVsv(TARG)); 4377 } 4378 orig = SvPV_force_nomg(TARG, len); 4379 goto force_it; 4380 } 4381 #ifdef PERL_ANY_COW 4382 have_a_cow: 4383 #endif 4384 if (RXp_MATCH_TAINTED(prog)) /* run time pattern taint, eg locale */ 4385 rxtainted |= SUBST_TAINT_PAT; 4386 repl = dstr; 4387 s = RXp_OFFS(prog)[0].start + orig; 4388 dstr = newSVpvn_flags(orig, s-orig, 4389 SVs_TEMP | (DO_UTF8(TARG) ? SVf_UTF8 : 0)); 4390 if (!c) { 4391 PERL_CONTEXT *cx; 4392 SPAGAIN; 4393 m = orig; 4394 /* note that a whole bunch of local vars are saved here for 4395 * use by pp_substcont: here's a list of them in case you're 4396 * searching for places in this sub that uses a particular var: 4397 * iters maxiters r_flags oldsave rxtainted orig dstr targ 4398 * s m strend rx once */ 4399 CX_PUSHSUBST(cx); 4400 RETURNOP(cPMOP->op_pmreplrootu.op_pmreplroot); 4401 } 4402 first = TRUE; 4403 do { 4404 if (UNLIKELY(iters++ > maxiters)) 4405 DIE(aTHX_ "Substitution loop"); 4406 if (UNLIKELY(RXp_MATCH_TAINTED(prog))) 4407 rxtainted |= SUBST_TAINT_PAT; 4408 if (RXp_MATCH_COPIED(prog) && RXp_SUBBEG(prog) != orig) { 4409 char *old_s = s; 4410 char *old_orig = orig; 4411 assert(RXp_SUBOFFSET(prog) == 0); 4412 4413 orig = RXp_SUBBEG(prog); 4414 s = orig + (old_s - old_orig); 4415 strend = s + (strend - old_s); 4416 } 4417 m = RXp_OFFS(prog)[0].start + orig; 4418 sv_catpvn_nomg_maybeutf8(dstr, s, m - s, DO_UTF8(TARG)); 4419 s = RXp_OFFS(prog)[0].end + orig; 4420 if (first) { 4421 /* replacement already stringified */ 4422 if (clen) 4423 sv_catpvn_nomg_maybeutf8(dstr, c, clen, doutf8); 4424 first = FALSE; 4425 } 4426 else { 4427 sv_catsv(dstr, repl); 4428 } 4429 if (once) 4430 break; 4431 } while (CALLREGEXEC(rx, s, strend, orig, 4432 s == m, /* Yields minend of 0 or 1 */ 4433 TARG, NULL, 4434 REXEC_NOT_FIRST|REXEC_IGNOREPOS|REXEC_FAIL_ON_UNDERFLOW)); 4435 assert(strend >= s); 4436 sv_catpvn_nomg_maybeutf8(dstr, s, strend - s, DO_UTF8(TARG)); 4437 4438 if (rpm->op_pmflags & PMf_NONDESTRUCT) { 4439 /* From here on down we're using the copy, and leaving the original 4440 untouched. */ 4441 TARG = dstr; 4442 SPAGAIN; 4443 PUSHs(dstr); 4444 } else { 4445 #ifdef PERL_ANY_COW 4446 /* The match may make the string COW. If so, brilliant, because 4447 that's just saved us one malloc, copy and free - the regexp has 4448 donated the old buffer, and we malloc an entirely new one, rather 4449 than the regexp malloc()ing a buffer and copying our original, 4450 only for us to throw it away here during the substitution. */ 4451 if (SvIsCOW(TARG)) { 4452 sv_force_normal_flags(TARG, SV_COW_DROP_PV); 4453 } else 4454 #endif 4455 { 4456 SvPV_free(TARG); 4457 } 4458 SvPV_set(TARG, SvPVX(dstr)); 4459 SvCUR_set(TARG, SvCUR(dstr)); 4460 SvLEN_set(TARG, SvLEN(dstr)); 4461 SvFLAGS(TARG) |= SvUTF8(dstr); 4462 SvPV_set(dstr, NULL); 4463 4464 SPAGAIN; 4465 if (PL_op->op_private & OPpTRUEBOOL) 4466 PUSHs(&PL_sv_yes); 4467 else 4468 mPUSHi(iters); 4469 } 4470 } 4471 4472 if (!(rpm->op_pmflags & PMf_NONDESTRUCT)) { 4473 (void)SvPOK_only_UTF8(TARG); 4474 } 4475 4476 /* See "how taint works" above */ 4477 if (TAINTING_get) { 4478 if ((rxtainted & SUBST_TAINT_PAT) || 4479 ((rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_RETAINT)) == 4480 (SUBST_TAINT_STR|SUBST_TAINT_RETAINT)) 4481 ) 4482 (RXp_MATCH_TAINTED_on(prog)); /* taint $1 et al */ 4483 4484 if (!(rxtainted & SUBST_TAINT_BOOLRET) 4485 && (rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_PAT)) 4486 ) 4487 SvTAINTED_on(TOPs); /* taint return value */ 4488 else 4489 SvTAINTED_off(TOPs); /* may have got tainted earlier */ 4490 4491 /* needed for mg_set below */ 4492 TAINT_set( 4493 cBOOL(rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_PAT|SUBST_TAINT_REPL)) 4494 ); 4495 SvTAINT(TARG); 4496 } 4497 SvSETMAGIC(TARG); /* PL_tainted must be correctly set for this mg_set */ 4498 TAINT_NOT; 4499 LEAVE_SCOPE(oldsave); 4500 RETURN; 4501 } 4502 4503 PP(pp_grepwhile) 4504 { 4505 dSP; 4506 dPOPss; 4507 4508 if (SvTRUE_NN(sv)) 4509 PL_stack_base[PL_markstack_ptr[-1]++] = PL_stack_base[*PL_markstack_ptr]; 4510 ++*PL_markstack_ptr; 4511 FREETMPS; 4512 LEAVE_with_name("grep_item"); /* exit inner scope */ 4513 4514 /* All done yet? */ 4515 if (UNLIKELY(PL_stack_base + *PL_markstack_ptr > SP)) { 4516 I32 items; 4517 const U8 gimme = GIMME_V; 4518 4519 LEAVE_with_name("grep"); /* exit outer scope */ 4520 (void)POPMARK; /* pop src */ 4521 items = --*PL_markstack_ptr - PL_markstack_ptr[-1]; 4522 (void)POPMARK; /* pop dst */ 4523 SP = PL_stack_base + POPMARK; /* pop original mark */ 4524 if (gimme == G_SCALAR) { 4525 if (PL_op->op_private & OPpTRUEBOOL) 4526 PUSHs(items ? &PL_sv_yes : &PL_sv_zero); 4527 else { 4528 dTARGET; 4529 PUSHi(items); 4530 } 4531 } 4532 else if (gimme == G_ARRAY) 4533 SP += items; 4534 RETURN; 4535 } 4536 else { 4537 SV *src; 4538 4539 ENTER_with_name("grep_item"); /* enter inner scope */ 4540 SAVEVPTR(PL_curpm); 4541 4542 src = PL_stack_base[TOPMARK]; 4543 if (SvPADTMP(src)) { 4544 src = PL_stack_base[TOPMARK] = sv_mortalcopy(src); 4545 PL_tmps_floor++; 4546 } 4547 SvTEMP_off(src); 4548 DEFSV_set(src); 4549 4550 RETURNOP(cLOGOP->op_other); 4551 } 4552 } 4553 4554 /* leave_adjust_stacks(): 4555 * 4556 * Process a scope's return args (in the range from_sp+1 .. PL_stack_sp), 4557 * positioning them at to_sp+1 onwards, and do the equivalent of a 4558 * FREEMPS and TAINT_NOT. 4559 * 4560 * Not intended to be called in void context. 4561 * 4562 * When leaving a sub, eval, do{} or other scope, the things that need 4563 * doing to process the return args are: 4564 * * in scalar context, only return the last arg (or PL_sv_undef if none); 4565 * * for the types of return that return copies of their args (such 4566 * as rvalue sub return), make a mortal copy of every return arg, 4567 * except where we can optimise the copy away without it being 4568 * semantically visible; 4569 * * make sure that the arg isn't prematurely freed; in the case of an 4570 * arg not copied, this may involve mortalising it. For example, in 4571 * C<sub f { my $x = ...; $x }>, $x would be freed when we do 4572 * CX_LEAVE_SCOPE(cx) unless it's protected or copied. 4573 * 4574 * What condition to use when deciding whether to pass the arg through 4575 * or make a copy, is determined by the 'pass' arg; its valid values are: 4576 * 0: rvalue sub/eval exit 4577 * 1: other rvalue scope exit 4578 * 2: :lvalue sub exit in rvalue context 4579 * 3: :lvalue sub exit in lvalue context and other lvalue scope exits 4580 * 4581 * There is a big issue with doing a FREETMPS. We would like to free any 4582 * temps created by the last statement which the sub executed, rather than 4583 * leaving them for the caller. In a situation where a sub call isn't 4584 * soon followed by a nextstate (e.g. nested recursive calls, a la 4585 * fibonacci()), temps can accumulate, causing memory and performance 4586 * issues. 4587 * 4588 * On the other hand, we don't want to free any TEMPs which are keeping 4589 * alive any return args that we skipped copying; nor do we wish to undo 4590 * any mortalising done here. 4591 * 4592 * The solution is to split the temps stack frame into two, with a cut 4593 * point delineating the two halves. We arrange that by the end of this 4594 * function, all the temps stack frame entries we wish to keep are in the 4595 * range PL_tmps_floor+1.. tmps_base-1, while the ones to free now are in 4596 * the range tmps_base .. PL_tmps_ix. During the course of this 4597 * function, tmps_base starts off as PL_tmps_floor+1, then increases 4598 * whenever we find or create a temp that we know should be kept. In 4599 * general the stuff above tmps_base is undecided until we reach the end, 4600 * and we may need a sort stage for that. 4601 * 4602 * To determine whether a TEMP is keeping a return arg alive, every 4603 * arg that is kept rather than copied and which has the SvTEMP flag 4604 * set, has the flag temporarily unset, to mark it. At the end we scan 4605 * the temps stack frame above the cut for entries without SvTEMP and 4606 * keep them, while turning SvTEMP on again. Note that if we die before 4607 * the SvTEMPs flags are set again, its safe: at worst, subsequent use of 4608 * those SVs may be slightly less efficient. 4609 * 4610 * In practice various optimisations for some common cases mean we can 4611 * avoid most of the scanning and swapping about with the temps stack. 4612 */ 4613 4614 void 4615 Perl_leave_adjust_stacks(pTHX_ SV **from_sp, SV **to_sp, U8 gimme, int pass) 4616 { 4617 dVAR; 4618 dSP; 4619 SSize_t tmps_base; /* lowest index into tmps stack that needs freeing now */ 4620 SSize_t nargs; 4621 4622 PERL_ARGS_ASSERT_LEAVE_ADJUST_STACKS; 4623 4624 TAINT_NOT; 4625 4626 if (gimme == G_ARRAY) { 4627 nargs = SP - from_sp; 4628 from_sp++; 4629 } 4630 else { 4631 assert(gimme == G_SCALAR); 4632 if (UNLIKELY(from_sp >= SP)) { 4633 /* no return args */ 4634 assert(from_sp == SP); 4635 EXTEND(SP, 1); 4636 *++SP = &PL_sv_undef; 4637 to_sp = SP; 4638 nargs = 0; 4639 } 4640 else { 4641 from_sp = SP; 4642 nargs = 1; 4643 } 4644 } 4645 4646 /* common code for G_SCALAR and G_ARRAY */ 4647 4648 tmps_base = PL_tmps_floor + 1; 4649 4650 assert(nargs >= 0); 4651 if (nargs) { 4652 /* pointer version of tmps_base. Not safe across temp stack 4653 * reallocs. */ 4654 SV **tmps_basep; 4655 4656 EXTEND_MORTAL(nargs); /* one big extend for worst-case scenario */ 4657 tmps_basep = PL_tmps_stack + tmps_base; 4658 4659 /* process each return arg */ 4660 4661 do { 4662 SV *sv = *from_sp++; 4663 4664 assert(PL_tmps_ix + nargs < PL_tmps_max); 4665 #ifdef DEBUGGING 4666 /* PADTMPs with container set magic shouldn't appear in the 4667 * wild. This assert is more important for pp_leavesublv(), 4668 * but by testing for it here, we're more likely to catch 4669 * bad cases (what with :lvalue subs not being widely 4670 * deployed). The two issues are that for something like 4671 * sub :lvalue { $tied{foo} } 4672 * or 4673 * sub :lvalue { substr($foo,1,2) } 4674 * pp_leavesublv() will croak if the sub returns a PADTMP, 4675 * and currently functions like pp_substr() return a mortal 4676 * rather than using their PADTMP when returning a PVLV. 4677 * This is because the PVLV will hold a ref to $foo, 4678 * so $foo would get delayed in being freed while 4679 * the PADTMP SV remained in the PAD. 4680 * So if this assert fails it means either: 4681 * 1) there is pp code similar to pp_substr that is 4682 * returning a PADTMP instead of a mortal, and probably 4683 * needs fixing, or 4684 * 2) pp_leavesublv is making unwarranted assumptions 4685 * about always croaking on a PADTMP 4686 */ 4687 if (SvPADTMP(sv) && SvSMAGICAL(sv)) { 4688 MAGIC *mg; 4689 for (mg = SvMAGIC(sv); mg; mg = mg->mg_moremagic) { 4690 assert(PERL_MAGIC_TYPE_IS_VALUE_MAGIC(mg->mg_type)); 4691 } 4692 } 4693 #endif 4694 4695 if ( 4696 pass == 0 ? (SvTEMP(sv) && !SvMAGICAL(sv) && SvREFCNT(sv) == 1) 4697 : pass == 1 ? ((SvTEMP(sv) || SvPADTMP(sv)) && !SvMAGICAL(sv) && SvREFCNT(sv) == 1) 4698 : pass == 2 ? (!SvPADTMP(sv)) 4699 : 1) 4700 { 4701 /* pass through: skip copy for logic or optimisation 4702 * reasons; instead mortalise it, except that ... */ 4703 *++to_sp = sv; 4704 4705 if (SvTEMP(sv)) { 4706 /* ... since this SV is an SvTEMP , we don't need to 4707 * re-mortalise it; instead we just need to ensure 4708 * that its existing entry in the temps stack frame 4709 * ends up below the cut and so avoids being freed 4710 * this time round. We mark it as needing to be kept 4711 * by temporarily unsetting SvTEMP; then at the end, 4712 * we shuffle any !SvTEMP entries on the tmps stack 4713 * back below the cut. 4714 * However, there's a significant chance that there's 4715 * a 1:1 correspondence between the first few (or all) 4716 * elements in the return args stack frame and those 4717 * in the temps stack frame; e,g.: 4718 * sub f { ....; map {...} .... }, 4719 * or if we're exiting multiple scopes and one of the 4720 * inner scopes has already made mortal copies of each 4721 * return arg. 4722 * 4723 * If so, this arg sv will correspond to the next item 4724 * on the tmps stack above the cut, and so can be kept 4725 * merely by moving the cut boundary up one, rather 4726 * than messing with SvTEMP. If all args are 1:1 then 4727 * we can avoid the sorting stage below completely. 4728 * 4729 * If there are no items above the cut on the tmps 4730 * stack, then the SvTEMP must comne from an item 4731 * below the cut, so there's nothing to do. 4732 */ 4733 if (tmps_basep <= &PL_tmps_stack[PL_tmps_ix]) { 4734 if (sv == *tmps_basep) 4735 tmps_basep++; 4736 else 4737 SvTEMP_off(sv); 4738 } 4739 } 4740 else if (!SvPADTMP(sv)) { 4741 /* mortalise arg to avoid it being freed during save 4742 * stack unwinding. Pad tmps don't need mortalising as 4743 * they're never freed. This is the equivalent of 4744 * sv_2mortal(SvREFCNT_inc(sv)), except that: 4745 * * it assumes that the temps stack has already been 4746 * extended; 4747 * * it puts the new item at the cut rather than at 4748 * ++PL_tmps_ix, moving the previous occupant there 4749 * instead. 4750 */ 4751 if (!SvIMMORTAL(sv)) { 4752 SvREFCNT_inc_simple_void_NN(sv); 4753 SvTEMP_on(sv); 4754 /* Note that if there's nothing above the cut, 4755 * this copies the garbage one slot above 4756 * PL_tmps_ix onto itself. This is harmless (the 4757 * stack's already been extended), but might in 4758 * theory trigger warnings from tools like ASan 4759 */ 4760 PL_tmps_stack[++PL_tmps_ix] = *tmps_basep; 4761 *tmps_basep++ = sv; 4762 } 4763 } 4764 } 4765 else { 4766 /* Make a mortal copy of the SV. 4767 * The following code is the equivalent of sv_mortalcopy() 4768 * except that: 4769 * * it assumes the temps stack has already been extended; 4770 * * it optimises the copying for some simple SV types; 4771 * * it puts the new item at the cut rather than at 4772 * ++PL_tmps_ix, moving the previous occupant there 4773 * instead. 4774 */ 4775 SV *newsv = newSV(0); 4776 4777 PL_tmps_stack[++PL_tmps_ix] = *tmps_basep; 4778 /* put it on the tmps stack early so it gets freed if we die */ 4779 *tmps_basep++ = newsv; 4780 *++to_sp = newsv; 4781 4782 if (SvTYPE(sv) <= SVt_IV) { 4783 /* arg must be one of undef, IV/UV, or RV: skip 4784 * sv_setsv_flags() and do the copy directly */ 4785 U32 dstflags; 4786 U32 srcflags = SvFLAGS(sv); 4787 4788 assert(!SvGMAGICAL(sv)); 4789 if (srcflags & (SVf_IOK|SVf_ROK)) { 4790 SET_SVANY_FOR_BODYLESS_IV(newsv); 4791 4792 if (srcflags & SVf_ROK) { 4793 newsv->sv_u.svu_rv = SvREFCNT_inc(SvRV(sv)); 4794 /* SV type plus flags */ 4795 dstflags = (SVt_IV|SVf_ROK|SVs_TEMP); 4796 } 4797 else { 4798 /* both src and dst are <= SVt_IV, so sv_any 4799 * points to the head; so access the heads 4800 * directly rather than going via sv_any. 4801 */ 4802 assert( &(sv->sv_u.svu_iv) 4803 == &(((XPVIV*) SvANY(sv))->xiv_iv)); 4804 assert( &(newsv->sv_u.svu_iv) 4805 == &(((XPVIV*) SvANY(newsv))->xiv_iv)); 4806 newsv->sv_u.svu_iv = sv->sv_u.svu_iv; 4807 /* SV type plus flags */ 4808 dstflags = (SVt_IV|SVf_IOK|SVp_IOK|SVs_TEMP 4809 |(srcflags & SVf_IVisUV)); 4810 } 4811 } 4812 else { 4813 assert(!(srcflags & SVf_OK)); 4814 dstflags = (SVt_NULL|SVs_TEMP); /* SV type plus flags */ 4815 } 4816 SvFLAGS(newsv) = dstflags; 4817 4818 } 4819 else { 4820 /* do the full sv_setsv() */ 4821 SSize_t old_base; 4822 4823 SvTEMP_on(newsv); 4824 old_base = tmps_basep - PL_tmps_stack; 4825 SvGETMAGIC(sv); 4826 sv_setsv_flags(newsv, sv, SV_DO_COW_SVSETSV); 4827 /* the mg_get or sv_setsv might have created new temps 4828 * or realloced the tmps stack; regrow and reload */ 4829 EXTEND_MORTAL(nargs); 4830 tmps_basep = PL_tmps_stack + old_base; 4831 TAINT_NOT; /* Each item is independent */ 4832 } 4833 4834 } 4835 } while (--nargs); 4836 4837 /* If there are any temps left above the cut, we need to sort 4838 * them into those to keep and those to free. The only ones to 4839 * keep are those for which we've temporarily unset SvTEMP. 4840 * Work inwards from the two ends at tmps_basep .. PL_tmps_ix, 4841 * swapping pairs as necessary. Stop when we meet in the middle. 4842 */ 4843 { 4844 SV **top = PL_tmps_stack + PL_tmps_ix; 4845 while (tmps_basep <= top) { 4846 SV *sv = *top; 4847 if (SvTEMP(sv)) 4848 top--; 4849 else { 4850 SvTEMP_on(sv); 4851 *top = *tmps_basep; 4852 *tmps_basep = sv; 4853 tmps_basep++; 4854 } 4855 } 4856 } 4857 4858 tmps_base = tmps_basep - PL_tmps_stack; 4859 } 4860 4861 PL_stack_sp = to_sp; 4862 4863 /* unrolled FREETMPS() but using tmps_base-1 rather than PL_tmps_floor */ 4864 while (PL_tmps_ix >= tmps_base) { 4865 SV* const sv = PL_tmps_stack[PL_tmps_ix--]; 4866 #ifdef PERL_POISON 4867 PoisonWith(PL_tmps_stack + PL_tmps_ix + 1, 1, SV *, 0xAB); 4868 #endif 4869 if (LIKELY(sv)) { 4870 SvTEMP_off(sv); 4871 SvREFCNT_dec_NN(sv); /* note, can modify tmps_ix!!! */ 4872 } 4873 } 4874 } 4875 4876 4877 /* also tail-called by pp_return */ 4878 4879 PP(pp_leavesub) 4880 { 4881 U8 gimme; 4882 PERL_CONTEXT *cx; 4883 SV **oldsp; 4884 OP *retop; 4885 4886 cx = CX_CUR(); 4887 assert(CxTYPE(cx) == CXt_SUB); 4888 4889 if (CxMULTICALL(cx)) { 4890 /* entry zero of a stack is always PL_sv_undef, which 4891 * simplifies converting a '()' return into undef in scalar context */ 4892 assert(PL_stack_sp > PL_stack_base || *PL_stack_base == &PL_sv_undef); 4893 return 0; 4894 } 4895 4896 gimme = cx->blk_gimme; 4897 oldsp = PL_stack_base + cx->blk_oldsp; /* last arg of previous frame */ 4898 4899 if (gimme == G_VOID) 4900 PL_stack_sp = oldsp; 4901 else 4902 leave_adjust_stacks(oldsp, oldsp, gimme, 0); 4903 4904 CX_LEAVE_SCOPE(cx); 4905 cx_popsub(cx); /* Stack values are safe: release CV and @_ ... */ 4906 cx_popblock(cx); 4907 retop = cx->blk_sub.retop; 4908 CX_POP(cx); 4909 4910 return retop; 4911 } 4912 4913 4914 /* clear (if possible) or abandon the current @_. If 'abandon' is true, 4915 * forces an abandon */ 4916 4917 void 4918 Perl_clear_defarray(pTHX_ AV* av, bool abandon) 4919 { 4920 const SSize_t fill = AvFILLp(av); 4921 4922 PERL_ARGS_ASSERT_CLEAR_DEFARRAY; 4923 4924 if (LIKELY(!abandon && SvREFCNT(av) == 1 && !SvMAGICAL(av))) { 4925 av_clear(av); 4926 AvREIFY_only(av); 4927 } 4928 else { 4929 AV *newav = newAV(); 4930 av_extend(newav, fill); 4931 AvREIFY_only(newav); 4932 PAD_SVl(0) = MUTABLE_SV(newav); 4933 SvREFCNT_dec_NN(av); 4934 } 4935 } 4936 4937 4938 PP(pp_entersub) 4939 { 4940 dSP; dPOPss; 4941 GV *gv; 4942 CV *cv; 4943 PERL_CONTEXT *cx; 4944 I32 old_savestack_ix; 4945 4946 if (UNLIKELY(!sv)) 4947 goto do_die; 4948 4949 /* Locate the CV to call: 4950 * - most common case: RV->CV: f(), $ref->(): 4951 * note that if a sub is compiled before its caller is compiled, 4952 * the stash entry will be a ref to a CV, rather than being a GV. 4953 * - second most common case: CV: $ref->method() 4954 */ 4955 4956 /* a non-magic-RV -> CV ? */ 4957 if (LIKELY( (SvFLAGS(sv) & (SVf_ROK|SVs_GMG)) == SVf_ROK)) { 4958 cv = MUTABLE_CV(SvRV(sv)); 4959 if (UNLIKELY(SvOBJECT(cv))) /* might be overloaded */ 4960 goto do_ref; 4961 } 4962 else 4963 cv = MUTABLE_CV(sv); 4964 4965 /* a CV ? */ 4966 if (UNLIKELY(SvTYPE(cv) != SVt_PVCV)) { 4967 /* handle all the weird cases */ 4968 switch (SvTYPE(sv)) { 4969 case SVt_PVLV: 4970 if (!isGV_with_GP(sv)) 4971 goto do_default; 4972 /* FALLTHROUGH */ 4973 case SVt_PVGV: 4974 cv = GvCVu((const GV *)sv); 4975 if (UNLIKELY(!cv)) { 4976 HV *stash; 4977 cv = sv_2cv(sv, &stash, &gv, 0); 4978 if (!cv) { 4979 old_savestack_ix = PL_savestack_ix; 4980 goto try_autoload; 4981 } 4982 } 4983 break; 4984 4985 default: 4986 do_default: 4987 SvGETMAGIC(sv); 4988 if (SvROK(sv)) { 4989 do_ref: 4990 if (UNLIKELY(SvAMAGIC(sv))) { 4991 sv = amagic_deref_call(sv, to_cv_amg); 4992 /* Don't SPAGAIN here. */ 4993 } 4994 } 4995 else { 4996 const char *sym; 4997 STRLEN len; 4998 if (UNLIKELY(!SvOK(sv))) 4999 DIE(aTHX_ PL_no_usym, "a subroutine"); 5000 5001 sym = SvPV_nomg_const(sv, len); 5002 if (PL_op->op_private & HINT_STRICT_REFS) 5003 DIE(aTHX_ "Can't use string (\"%" SVf32 "\"%s) as a subroutine ref while \"strict refs\" in use", sv, len>32 ? "..." : ""); 5004 cv = get_cvn_flags(sym, len, GV_ADD|SvUTF8(sv)); 5005 break; 5006 } 5007 cv = MUTABLE_CV(SvRV(sv)); 5008 if (LIKELY(SvTYPE(cv) == SVt_PVCV)) 5009 break; 5010 /* FALLTHROUGH */ 5011 case SVt_PVHV: 5012 case SVt_PVAV: 5013 do_die: 5014 DIE(aTHX_ "Not a CODE reference"); 5015 } 5016 } 5017 5018 /* At this point we want to save PL_savestack_ix, either by doing a 5019 * cx_pushsub(), or for XS, doing an ENTER. But we don't yet know the final 5020 * CV we will be using (so we don't know whether its XS, so we can't 5021 * cx_pushsub() or ENTER yet), and determining cv may itself push stuff on 5022 * the save stack. So remember where we are currently on the save 5023 * stack, and later update the CX or scopestack entry accordingly. */ 5024 old_savestack_ix = PL_savestack_ix; 5025 5026 /* these two fields are in a union. If they ever become separate, 5027 * we have to test for both of them being null below */ 5028 assert(cv); 5029 assert((void*)&CvROOT(cv) == (void*)&CvXSUB(cv)); 5030 while (UNLIKELY(!CvROOT(cv))) { 5031 GV* autogv; 5032 SV* sub_name; 5033 5034 /* anonymous or undef'd function leaves us no recourse */ 5035 if (CvLEXICAL(cv) && CvHASGV(cv)) 5036 DIE(aTHX_ "Undefined subroutine &%" SVf " called", 5037 SVfARG(cv_name(cv, NULL, 0))); 5038 if (CvANON(cv) || !CvHASGV(cv)) { 5039 DIE(aTHX_ "Undefined subroutine called"); 5040 } 5041 5042 /* autoloaded stub? */ 5043 if (cv != GvCV(gv = CvGV(cv))) { 5044 cv = GvCV(gv); 5045 } 5046 /* should call AUTOLOAD now? */ 5047 else { 5048 try_autoload: 5049 autogv = gv_autoload_pvn(GvSTASH(gv), GvNAME(gv), GvNAMELEN(gv), 5050 (GvNAMEUTF8(gv) ? SVf_UTF8 : 0) 5051 |(PL_op->op_flags & OPf_REF 5052 ? GV_AUTOLOAD_ISMETHOD 5053 : 0)); 5054 cv = autogv ? GvCV(autogv) : NULL; 5055 } 5056 if (!cv) { 5057 sub_name = sv_newmortal(); 5058 gv_efullname3(sub_name, gv, NULL); 5059 DIE(aTHX_ "Undefined subroutine &%" SVf " called", SVfARG(sub_name)); 5060 } 5061 } 5062 5063 /* unrolled "CvCLONE(cv) && ! CvCLONED(cv)" */ 5064 if (UNLIKELY((CvFLAGS(cv) & (CVf_CLONE|CVf_CLONED)) == CVf_CLONE)) 5065 DIE(aTHX_ "Closure prototype called"); 5066 5067 if (UNLIKELY((PL_op->op_private & OPpENTERSUB_DB) && GvCV(PL_DBsub) 5068 && !CvNODEBUG(cv))) 5069 { 5070 Perl_get_db_sub(aTHX_ &sv, cv); 5071 if (CvISXSUB(cv)) 5072 PL_curcopdb = PL_curcop; 5073 if (CvLVALUE(cv)) { 5074 /* check for lsub that handles lvalue subroutines */ 5075 cv = GvCV(gv_fetchpvs("DB::lsub", GV_ADDMULTI, SVt_PVCV)); 5076 /* if lsub not found then fall back to DB::sub */ 5077 if (!cv) cv = GvCV(PL_DBsub); 5078 } else { 5079 cv = GvCV(PL_DBsub); 5080 } 5081 5082 if (!cv || (!CvXSUB(cv) && !CvSTART(cv))) 5083 DIE(aTHX_ "No DB::sub routine defined"); 5084 } 5085 5086 if (!(CvISXSUB(cv))) { 5087 /* This path taken at least 75% of the time */ 5088 dMARK; 5089 PADLIST *padlist; 5090 I32 depth; 5091 bool hasargs; 5092 U8 gimme; 5093 5094 /* keep PADTMP args alive throughout the call (we need to do this 5095 * because @_ isn't refcounted). Note that we create the mortals 5096 * in the caller's tmps frame, so they won't be freed until after 5097 * we return from the sub. 5098 */ 5099 { 5100 SV **svp = MARK; 5101 while (svp < SP) { 5102 SV *sv = *++svp; 5103 if (!sv) 5104 continue; 5105 if (SvPADTMP(sv)) 5106 *svp = sv = sv_mortalcopy(sv); 5107 SvTEMP_off(sv); 5108 } 5109 } 5110 5111 gimme = GIMME_V; 5112 cx = cx_pushblock(CXt_SUB, gimme, MARK, old_savestack_ix); 5113 hasargs = cBOOL(PL_op->op_flags & OPf_STACKED); 5114 cx_pushsub(cx, cv, PL_op->op_next, hasargs); 5115 5116 padlist = CvPADLIST(cv); 5117 if (UNLIKELY((depth = ++CvDEPTH(cv)) >= 2)) 5118 pad_push(padlist, depth); 5119 PAD_SET_CUR_NOSAVE(padlist, depth); 5120 if (LIKELY(hasargs)) { 5121 AV *const av = MUTABLE_AV(PAD_SVl(0)); 5122 SSize_t items; 5123 AV **defavp; 5124 5125 defavp = &GvAV(PL_defgv); 5126 cx->blk_sub.savearray = *defavp; 5127 *defavp = MUTABLE_AV(SvREFCNT_inc_simple_NN(av)); 5128 5129 /* it's the responsibility of whoever leaves a sub to ensure 5130 * that a clean, empty AV is left in pad[0]. This is normally 5131 * done by cx_popsub() */ 5132 assert(!AvREAL(av) && AvFILLp(av) == -1); 5133 5134 items = SP - MARK; 5135 if (UNLIKELY(items - 1 > AvMAX(av))) { 5136 SV **ary = AvALLOC(av); 5137 Renew(ary, items, SV*); 5138 AvMAX(av) = items - 1; 5139 AvALLOC(av) = ary; 5140 AvARRAY(av) = ary; 5141 } 5142 5143 if (items) 5144 Copy(MARK+1,AvARRAY(av),items,SV*); 5145 AvFILLp(av) = items - 1; 5146 } 5147 if (UNLIKELY((cx->blk_u16 & OPpENTERSUB_LVAL_MASK) == OPpLVAL_INTRO && 5148 !CvLVALUE(cv))) 5149 DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%" SVf, 5150 SVfARG(cv_name(cv, NULL, 0))); 5151 /* warning must come *after* we fully set up the context 5152 * stuff so that __WARN__ handlers can safely dounwind() 5153 * if they want to 5154 */ 5155 if (UNLIKELY(depth == PERL_SUB_DEPTH_WARN 5156 && ckWARN(WARN_RECURSION) 5157 && !(PERLDB_SUB && cv == GvCV(PL_DBsub)))) 5158 sub_crush_depth(cv); 5159 RETURNOP(CvSTART(cv)); 5160 } 5161 else { 5162 SSize_t markix = TOPMARK; 5163 bool is_scalar; 5164 5165 ENTER; 5166 /* pretend we did the ENTER earlier */ 5167 PL_scopestack[PL_scopestack_ix - 1] = old_savestack_ix; 5168 5169 SAVETMPS; 5170 PUTBACK; 5171 5172 if (UNLIKELY(((PL_op->op_private 5173 & CX_PUSHSUB_GET_LVALUE_MASK(Perl_is_lvalue_sub) 5174 ) & OPpENTERSUB_LVAL_MASK) == OPpLVAL_INTRO && 5175 !CvLVALUE(cv))) 5176 DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%" SVf, 5177 SVfARG(cv_name(cv, NULL, 0))); 5178 5179 if (UNLIKELY(!(PL_op->op_flags & OPf_STACKED) && GvAV(PL_defgv))) { 5180 /* Need to copy @_ to stack. Alternative may be to 5181 * switch stack to @_, and copy return values 5182 * back. This would allow popping @_ in XSUB, e.g.. XXXX */ 5183 AV * const av = GvAV(PL_defgv); 5184 const SSize_t items = AvFILL(av) + 1; 5185 5186 if (items) { 5187 SSize_t i = 0; 5188 const bool m = cBOOL(SvRMAGICAL(av)); 5189 /* Mark is at the end of the stack. */ 5190 EXTEND(SP, items); 5191 for (; i < items; ++i) 5192 { 5193 SV *sv; 5194 if (m) { 5195 SV ** const svp = av_fetch(av, i, 0); 5196 sv = svp ? *svp : NULL; 5197 } 5198 else sv = AvARRAY(av)[i]; 5199 if (sv) SP[i+1] = sv; 5200 else { 5201 SP[i+1] = av_nonelem(av, i); 5202 } 5203 } 5204 SP += items; 5205 PUTBACK ; 5206 } 5207 } 5208 else { 5209 SV **mark = PL_stack_base + markix; 5210 SSize_t items = SP - mark; 5211 while (items--) { 5212 mark++; 5213 if (*mark && SvPADTMP(*mark)) { 5214 *mark = sv_mortalcopy(*mark); 5215 } 5216 } 5217 } 5218 /* We assume first XSUB in &DB::sub is the called one. */ 5219 if (UNLIKELY(PL_curcopdb)) { 5220 SAVEVPTR(PL_curcop); 5221 PL_curcop = PL_curcopdb; 5222 PL_curcopdb = NULL; 5223 } 5224 /* Do we need to open block here? XXXX */ 5225 5226 /* calculate gimme here as PL_op might get changed and then not 5227 * restored until the LEAVE further down */ 5228 is_scalar = (GIMME_V == G_SCALAR); 5229 5230 /* CvXSUB(cv) must not be NULL because newXS() refuses NULL xsub address */ 5231 assert(CvXSUB(cv)); 5232 CvXSUB(cv)(aTHX_ cv); 5233 5234 #if defined DEBUGGING && !defined DEBUGGING_RE_ONLY 5235 /* This duplicates the check done in runops_debug(), but provides more 5236 * information in the common case of the fault being with an XSUB. 5237 * 5238 * It should also catch an XSUB pushing more than it extends 5239 * in scalar context. 5240 */ 5241 if (PL_curstackinfo->si_stack_hwm < PL_stack_sp - PL_stack_base) 5242 Perl_croak_nocontext( 5243 "panic: XSUB %s::%s (%s) failed to extend arg stack: " 5244 "base=%p, sp=%p, hwm=%p\n", 5245 HvNAME(GvSTASH(CvGV(cv))), GvNAME(CvGV(cv)), CvFILE(cv), 5246 PL_stack_base, PL_stack_sp, 5247 PL_stack_base + PL_curstackinfo->si_stack_hwm); 5248 #endif 5249 /* Enforce some sanity in scalar context. */ 5250 if (is_scalar) { 5251 SV **svp = PL_stack_base + markix + 1; 5252 if (svp != PL_stack_sp) { 5253 *svp = svp > PL_stack_sp ? &PL_sv_undef : *PL_stack_sp; 5254 PL_stack_sp = svp; 5255 } 5256 } 5257 LEAVE; 5258 return NORMAL; 5259 } 5260 } 5261 5262 void 5263 Perl_sub_crush_depth(pTHX_ CV *cv) 5264 { 5265 PERL_ARGS_ASSERT_SUB_CRUSH_DEPTH; 5266 5267 if (CvANON(cv)) 5268 Perl_warner(aTHX_ packWARN(WARN_RECURSION), "Deep recursion on anonymous subroutine"); 5269 else { 5270 Perl_warner(aTHX_ packWARN(WARN_RECURSION), "Deep recursion on subroutine \"%" SVf "\"", 5271 SVfARG(cv_name(cv,NULL,0))); 5272 } 5273 } 5274 5275 5276 5277 /* like croak, but report in context of caller */ 5278 5279 void 5280 Perl_croak_caller(const char *pat, ...) 5281 { 5282 dTHX; 5283 va_list args; 5284 const PERL_CONTEXT *cx = caller_cx(0, NULL); 5285 5286 /* make error appear at call site */ 5287 assert(cx); 5288 PL_curcop = cx->blk_oldcop; 5289 5290 va_start(args, pat); 5291 vcroak(pat, &args); 5292 NOT_REACHED; /* NOTREACHED */ 5293 va_end(args); 5294 } 5295 5296 5297 PP(pp_aelem) 5298 { 5299 dSP; 5300 SV** svp; 5301 SV* const elemsv = POPs; 5302 IV elem = SvIV(elemsv); 5303 AV *const av = MUTABLE_AV(POPs); 5304 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET; 5305 const U32 defer = PL_op->op_private & OPpLVAL_DEFER; 5306 const bool localizing = PL_op->op_private & OPpLVAL_INTRO; 5307 bool preeminent = TRUE; 5308 SV *sv; 5309 5310 if (UNLIKELY(SvROK(elemsv) && !SvGAMAGIC(elemsv) && ckWARN(WARN_MISC))) 5311 Perl_warner(aTHX_ packWARN(WARN_MISC), 5312 "Use of reference \"%" SVf "\" as array index", 5313 SVfARG(elemsv)); 5314 if (UNLIKELY(SvTYPE(av) != SVt_PVAV)) 5315 RETPUSHUNDEF; 5316 5317 if (UNLIKELY(localizing)) { 5318 MAGIC *mg; 5319 HV *stash; 5320 5321 /* If we can determine whether the element exist, 5322 * Try to preserve the existenceness of a tied array 5323 * element by using EXISTS and DELETE if possible. 5324 * Fallback to FETCH and STORE otherwise. */ 5325 if (SvCANEXISTDELETE(av)) 5326 preeminent = av_exists(av, elem); 5327 } 5328 5329 svp = av_fetch(av, elem, lval && !defer); 5330 if (lval) { 5331 #ifdef PERL_MALLOC_WRAP 5332 if (SvUOK(elemsv)) { 5333 const UV uv = SvUV(elemsv); 5334 elem = uv > IV_MAX ? IV_MAX : uv; 5335 } 5336 else if (SvNOK(elemsv)) 5337 elem = (IV)SvNV(elemsv); 5338 if (elem > 0) { 5339 MEM_WRAP_CHECK_s(elem,SV*,"Out of memory during array extend"); 5340 } 5341 #endif 5342 if (!svp || !*svp) { 5343 IV len; 5344 if (!defer) 5345 DIE(aTHX_ PL_no_aelem, elem); 5346 len = av_tindex(av); 5347 /* Resolve a negative index that falls within the array. Leave 5348 it negative it if falls outside the array. */ 5349 if (elem < 0 && len + elem >= 0) 5350 elem = len + elem; 5351 if (elem >= 0 && elem <= len) 5352 /* Falls within the array. */ 5353 PUSHs(av_nonelem(av,elem)); 5354 else 5355 /* Falls outside the array. If it is negative, 5356 magic_setdefelem will use the index for error reporting. 5357 */ 5358 mPUSHs(newSVavdefelem(av, elem, 1)); 5359 RETURN; 5360 } 5361 if (UNLIKELY(localizing)) { 5362 if (preeminent) 5363 save_aelem(av, elem, svp); 5364 else 5365 SAVEADELETE(av, elem); 5366 } 5367 else if (PL_op->op_private & OPpDEREF) { 5368 PUSHs(vivify_ref(*svp, PL_op->op_private & OPpDEREF)); 5369 RETURN; 5370 } 5371 } 5372 sv = (svp ? *svp : &PL_sv_undef); 5373 if (!lval && SvRMAGICAL(av) && SvGMAGICAL(sv)) /* see note in pp_helem() */ 5374 mg_get(sv); 5375 PUSHs(sv); 5376 RETURN; 5377 } 5378 5379 SV* 5380 Perl_vivify_ref(pTHX_ SV *sv, U32 to_what) 5381 { 5382 PERL_ARGS_ASSERT_VIVIFY_REF; 5383 5384 SvGETMAGIC(sv); 5385 if (!SvOK(sv)) { 5386 if (SvREADONLY(sv)) 5387 Perl_croak_no_modify(); 5388 prepare_SV_for_RV(sv); 5389 switch (to_what) { 5390 case OPpDEREF_SV: 5391 SvRV_set(sv, newSV(0)); 5392 break; 5393 case OPpDEREF_AV: 5394 SvRV_set(sv, MUTABLE_SV(newAV())); 5395 break; 5396 case OPpDEREF_HV: 5397 SvRV_set(sv, MUTABLE_SV(newHV())); 5398 break; 5399 } 5400 SvROK_on(sv); 5401 SvSETMAGIC(sv); 5402 SvGETMAGIC(sv); 5403 } 5404 if (SvGMAGICAL(sv)) { 5405 /* copy the sv without magic to prevent magic from being 5406 executed twice */ 5407 SV* msv = sv_newmortal(); 5408 sv_setsv_nomg(msv, sv); 5409 return msv; 5410 } 5411 return sv; 5412 } 5413 5414 PERL_STATIC_INLINE HV * 5415 S_opmethod_stash(pTHX_ SV* meth) 5416 { 5417 SV* ob; 5418 HV* stash; 5419 5420 SV* const sv = PL_stack_base + TOPMARK == PL_stack_sp 5421 ? (Perl_croak(aTHX_ "Can't call method \"%" SVf "\" without a " 5422 "package or object reference", SVfARG(meth)), 5423 (SV *)NULL) 5424 : *(PL_stack_base + TOPMARK + 1); 5425 5426 PERL_ARGS_ASSERT_OPMETHOD_STASH; 5427 5428 if (UNLIKELY(!sv)) 5429 undefined: 5430 Perl_croak(aTHX_ "Can't call method \"%" SVf "\" on an undefined value", 5431 SVfARG(meth)); 5432 5433 if (UNLIKELY(SvGMAGICAL(sv))) mg_get(sv); 5434 else if (SvIsCOW_shared_hash(sv)) { /* MyClass->meth() */ 5435 stash = gv_stashsv(sv, GV_CACHE_ONLY); 5436 if (stash) return stash; 5437 } 5438 5439 if (SvROK(sv)) 5440 ob = MUTABLE_SV(SvRV(sv)); 5441 else if (!SvOK(sv)) goto undefined; 5442 else if (isGV_with_GP(sv)) { 5443 if (!GvIO(sv)) 5444 Perl_croak(aTHX_ "Can't call method \"%" SVf "\" " 5445 "without a package or object reference", 5446 SVfARG(meth)); 5447 ob = sv; 5448 if (SvTYPE(ob) == SVt_PVLV && LvTYPE(ob) == 'y') { 5449 assert(!LvTARGLEN(ob)); 5450 ob = LvTARG(ob); 5451 assert(ob); 5452 } 5453 *(PL_stack_base + TOPMARK + 1) = sv_2mortal(newRV(ob)); 5454 } 5455 else { 5456 /* this isn't a reference */ 5457 GV* iogv; 5458 STRLEN packlen; 5459 const char * const packname = SvPV_nomg_const(sv, packlen); 5460 const U32 packname_utf8 = SvUTF8(sv); 5461 stash = gv_stashpvn(packname, packlen, packname_utf8 | GV_CACHE_ONLY); 5462 if (stash) return stash; 5463 5464 if (!(iogv = gv_fetchpvn_flags( 5465 packname, packlen, packname_utf8, SVt_PVIO 5466 )) || 5467 !(ob=MUTABLE_SV(GvIO(iogv)))) 5468 { 5469 /* this isn't the name of a filehandle either */ 5470 if (!packlen) 5471 { 5472 Perl_croak(aTHX_ "Can't call method \"%" SVf "\" " 5473 "without a package or object reference", 5474 SVfARG(meth)); 5475 } 5476 /* assume it's a package name */ 5477 stash = gv_stashpvn(packname, packlen, packname_utf8); 5478 if (stash) return stash; 5479 else return MUTABLE_HV(sv); 5480 } 5481 /* it _is_ a filehandle name -- replace with a reference */ 5482 *(PL_stack_base + TOPMARK + 1) = sv_2mortal(newRV(MUTABLE_SV(iogv))); 5483 } 5484 5485 /* if we got here, ob should be an object or a glob */ 5486 if (!ob || !(SvOBJECT(ob) 5487 || (isGV_with_GP(ob) 5488 && (ob = MUTABLE_SV(GvIO((const GV *)ob))) 5489 && SvOBJECT(ob)))) 5490 { 5491 Perl_croak(aTHX_ "Can't call method \"%" SVf "\" on unblessed reference", 5492 SVfARG((SvPOK(meth) && SvPVX(meth) == PL_isa_DOES) 5493 ? newSVpvs_flags("DOES", SVs_TEMP) 5494 : meth)); 5495 } 5496 5497 return SvSTASH(ob); 5498 } 5499 5500 PP(pp_method) 5501 { 5502 dSP; 5503 GV* gv; 5504 HV* stash; 5505 SV* const meth = TOPs; 5506 5507 if (SvROK(meth)) { 5508 SV* const rmeth = SvRV(meth); 5509 if (SvTYPE(rmeth) == SVt_PVCV) { 5510 SETs(rmeth); 5511 RETURN; 5512 } 5513 } 5514 5515 stash = opmethod_stash(meth); 5516 5517 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK); 5518 assert(gv); 5519 5520 SETs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv)); 5521 RETURN; 5522 } 5523 5524 #define METHOD_CHECK_CACHE(stash,cache,meth) \ 5525 const HE* const he = hv_fetch_ent(cache, meth, 0, 0); \ 5526 if (he) { \ 5527 gv = MUTABLE_GV(HeVAL(he)); \ 5528 if (isGV(gv) && GvCV(gv) && (!GvCVGEN(gv) || GvCVGEN(gv) \ 5529 == (PL_sub_generation + HvMROMETA(stash)->cache_gen))) \ 5530 { \ 5531 XPUSHs(MUTABLE_SV(GvCV(gv))); \ 5532 RETURN; \ 5533 } \ 5534 } \ 5535 5536 PP(pp_method_named) 5537 { 5538 dSP; 5539 GV* gv; 5540 SV* const meth = cMETHOPx_meth(PL_op); 5541 HV* const stash = opmethod_stash(meth); 5542 5543 if (LIKELY(SvTYPE(stash) == SVt_PVHV)) { 5544 METHOD_CHECK_CACHE(stash, stash, meth); 5545 } 5546 5547 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK); 5548 assert(gv); 5549 5550 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv)); 5551 RETURN; 5552 } 5553 5554 PP(pp_method_super) 5555 { 5556 dSP; 5557 GV* gv; 5558 HV* cache; 5559 SV* const meth = cMETHOPx_meth(PL_op); 5560 HV* const stash = CopSTASH(PL_curcop); 5561 /* Actually, SUPER doesn't need real object's (or class') stash at all, 5562 * as it uses CopSTASH. However, we must ensure that object(class) is 5563 * correct (this check is done by S_opmethod_stash) */ 5564 opmethod_stash(meth); 5565 5566 if ((cache = HvMROMETA(stash)->super)) { 5567 METHOD_CHECK_CACHE(stash, cache, meth); 5568 } 5569 5570 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK|GV_SUPER); 5571 assert(gv); 5572 5573 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv)); 5574 RETURN; 5575 } 5576 5577 PP(pp_method_redir) 5578 { 5579 dSP; 5580 GV* gv; 5581 SV* const meth = cMETHOPx_meth(PL_op); 5582 HV* stash = gv_stashsv(cMETHOPx_rclass(PL_op), 0); 5583 opmethod_stash(meth); /* not used but needed for error checks */ 5584 5585 if (stash) { METHOD_CHECK_CACHE(stash, stash, meth); } 5586 else stash = MUTABLE_HV(cMETHOPx_rclass(PL_op)); 5587 5588 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK); 5589 assert(gv); 5590 5591 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv)); 5592 RETURN; 5593 } 5594 5595 PP(pp_method_redir_super) 5596 { 5597 dSP; 5598 GV* gv; 5599 HV* cache; 5600 SV* const meth = cMETHOPx_meth(PL_op); 5601 HV* stash = gv_stashsv(cMETHOPx_rclass(PL_op), 0); 5602 opmethod_stash(meth); /* not used but needed for error checks */ 5603 5604 if (UNLIKELY(!stash)) stash = MUTABLE_HV(cMETHOPx_rclass(PL_op)); 5605 else if ((cache = HvMROMETA(stash)->super)) { 5606 METHOD_CHECK_CACHE(stash, cache, meth); 5607 } 5608 5609 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK|GV_SUPER); 5610 assert(gv); 5611 5612 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv)); 5613 RETURN; 5614 } 5615 5616 /* 5617 * ex: set ts=8 sts=4 sw=4 et: 5618 */ 5619