1#!./perl -w 2 3BEGIN { 4 chdir 't' if -d 't'; 5 require './test.pl'; 6 set_up_inc('../lib'); 7} 8 9use strict; 10 11# This will crash perl if it fails 12 13use constant PVBM => 'foo'; 14 15my $dummy = index 'foo', PVBM; 16eval { my %h = (a => PVBM); 1 }; 17 18ok (!$@, 'fbm scalar can be inserted into a hash'); 19 20 21my $destroyed; 22{ package Class; DESTROY { ++$destroyed; } } 23 24$destroyed = 0; 25{ 26 my %h; 27 keys(%h) = 1; 28 $h{key} = bless({}, 'Class'); 29} 30is($destroyed, 1, 'Timely hash destruction with lvalue keys'); 31 32 33# [perl #79178] Hash keys must not be stringified during compilation 34# Run perl -MO=Concise -e '$a{\"foo"}' on a non-threaded pre-5.13.8 version 35# to see why. 36{ 37 my $key; 38 package bar; 39 sub TIEHASH { bless {}, $_[0] } 40 sub FETCH { $key = $_[1] } 41 package main; 42 tie my %h, "bar"; 43 () = $h{\'foo'}; 44 is ref $key, SCALAR => 45 'ref hash keys are not stringified during compilation'; 46 use constant u => undef; 47 no warnings 'uninitialized'; # work around unfixed bug #105918 48 () = $h{+u}; 49 is $key, undef, 50 'undef hash keys are not stringified during compilation, either'; 51} 52 53# Part of RT #85026: Deleting the current iterator in void context does not 54# free it. 55{ 56 my $gone; 57 no warnings 'once'; 58 local *::DESTROY = sub { ++$gone }; 59 my %a=(a=>bless[]); 60 each %a; # make the entry with the obj the current iterator 61 delete $a{a}; 62 ok $gone, 'deleting the current iterator in void context frees the val' 63} 64 65# [perl #99660] Deleted hash element visible to destructor 66{ 67 my %h; 68 $h{k} = bless []; 69 my $normal_exit; 70 local *::DESTROY = sub { my $x = $h{k}; ++$normal_exit }; 71 delete $h{k}; # must be in void context to trigger the bug 72 ok $normal_exit, 'freed hash elems are not visible to DESTROY'; 73} 74 75# [perl #100340] Similar bug: freeing a hash elem during a delete 76sub guard::DESTROY { 77 ${$_[0]}->(); 78}; 79*guard = sub (&) { 80 my $callback = shift; 81 return bless \$callback, "guard" 82}; 83{ 84 my $ok; 85 my %t; %t = ( 86 stash => { 87 guard => guard(sub{ 88 $ok++; 89 delete $t{stash}; 90 }), 91 foo => "bar", 92 bar => "baz", 93 }, 94 ); 95 ok eval { delete $t{stash}{guard}; # must be in void context 96 1 }, 97 'freeing a hash elem from destructor called by delete does not die'; 98 diag $@ if $@; # panic: free from wrong pool 99 is $ok, 1, 'the destructor was called'; 100} 101 102# Weak references to pad hashes 103SKIP: { 104 skip_if_miniperl("No Scalar::Util::weaken under miniperl", 1); 105 my $ref; 106 require Scalar::Util; 107 { 108 my %hash; 109 Scalar::Util::weaken($ref = \%hash); 110 1; # the previous statement must not be the last 111 } 112 is $ref, undef, 'weak refs to pad hashes go stale on scope exit'; 113} 114 115# [perl #107440] 116sub A::DESTROY { $::ra = 0 } 117$::ra = {a=>bless [], 'A'}; 118undef %$::ra; 119pass 'no crash when freeing hash that is being undeffed'; 120$::ra = {a=>bless [], 'A'}; 121%$::ra = ('a'..'z'); 122pass 'no crash when freeing hash that is being exonerated, ahem, cleared'; 123 124# If I have these correct then removing any part of the lazy hash fill handling 125# code in hv.c will cause some of these tests to start failing. 126sub validate_hash { 127 my ($desc, $h) = @_; 128 local $::Level = $::Level + 1; 129 130 # test that scalar(%hash) works as expected, which as of perl 5.25 is 131 # the same as 0+keys %hash; 132 my $scalar= scalar %$h; 133 my $count= 0+keys %$h; 134 135 is($scalar, $count, "$desc scalar() should be the same as 0+keys() as of perl 5.25"); 136 137 require Hash::Util; 138 sub Hash::Util::bucket_ratio (\%); 139 140 # back compat tests, via Hash::Util::bucket_ratio(); 141 my $ratio = Hash::Util::bucket_ratio(%$h); 142 my $expect = qr!\A(\d+)/(\d+)\z!; 143 like($ratio, $expect, "$desc bucket_ratio matches pattern"); 144 my ($used, $total)= (0,0); 145 ($used, $total)= ($1,$2) if $ratio =~ /$expect/; 146 cmp_ok($total, '>', 0, "$desc has >0 array size ($total)"); 147 cmp_ok($used, '>', 0, "$desc uses >0 heads ($used)"); 148 cmp_ok($used, '<=', $total, 149 "$desc doesn't use more heads than are available"); 150 return ($used, $total); 151} 152 153sub torture_hash { 154 my $desc = shift; 155 # Intentionally use an anon hash rather than a lexical, as lexicals default 156 # to getting reused on subsequent calls 157 my $h = {}; 158 ++$h->{$_} foreach @_; 159 160 my ($used0, $total0) = validate_hash($desc, $h); 161 # Remove half the keys each time round, until there are only 1 or 2 left 162 my @groups; 163 my ($h2, $h3, $h4); 164 while (keys %$h > 2) { 165 my $take = (keys %$h) / 2 - 1; 166 my @keys = (sort keys %$h)[0..$take]; 167 168 my $scalar = %$h; 169 delete @$h{@keys}; 170 push @groups, $scalar, \@keys; 171 172 my $count = keys %$h; 173 my ($used, $total) = validate_hash("$desc (-$count)", $h); 174 is($total, $total0, "$desc ($count) has same array size"); 175 cmp_ok($used, '<=', $used0, "$desc ($count) has same or fewer heads"); 176 ++$h2->{$_} foreach @keys; 177 my (undef, $total2) = validate_hash("$desc (+$count)", $h2); 178 cmp_ok($total2, '<=', $total0, "$desc ($count) array size no larger"); 179 180 # Each time this will get emptied then repopulated. If the fill isn't reset 181 # when the hash is emptied, the used count will likely exceed the array 182 %$h3 = %$h2; 183 is(join(",", sort keys %$h3),join(",",sort keys %$h2),"$desc (+$count copy) has same keys"); 184 my (undef, $total3) = validate_hash("$desc (+$count copy)", $h3); 185 # We now only split when we collide on insert AND exceed the load factor 186 # when we did so. Building a hash via %x=%y means a pseudo-random key 187 # order inserting into %x, and we may end up encountering a collision 188 # at a different point in the load order, resulting in a possible power of 189 # two difference under the current load factor expectations. If this test 190 # fails then it is probably because DO_HSPLIT was changed, and this test 191 # needs to be adjusted accordingly. 192 ok( $total2 == $total3 || $total2*2==$total3 || $total2==$total3*2, 193 "$desc (+$count copy) array size within a power of 2 of each other"); 194 195 # This might use fewer buckets than the original 196 %$h4 = %$h; 197 my (undef, $total4) = validate_hash("$desc ($count copy)", $h4); 198 cmp_ok($total4, '<=', $total0, "$desc ($count copy) array size no larger"); 199 } 200 201 my $scalar = %$h; 202 my @keys = sort keys %$h; 203 delete @$h{@keys}; 204 is(scalar %$h, 0, "scalar keys for empty $desc"); 205 206 # Rebuild the original hash, and build a copy 207 # These will fail if hash key addition and deletion aren't handled correctly 208 my $h1; 209 foreach (@keys) { 210 ++$h->{$_}; 211 ++$h1->{$_}; 212 } 213 is(scalar %$h, $scalar, "scalar keys restored when rebuilding"); 214 215 while (@groups) { 216 my $keys = pop @groups; 217 ++$h->{$_} foreach @$keys; 218 my (undef, $total) = validate_hash($desc, $h); 219 ok($total == $total0 || $total == ($total0*2), "bucket count is expected size when rebuilding"); 220 is(scalar %$h, pop @groups, "scalar keys is identical when rebuilding"); 221 ++$h1->{$_} foreach @$keys; 222 validate_hash("$desc copy", $h1); 223 } 224 # This will fail if the fill count isn't handled correctly on hash split 225 is(scalar %$h1, scalar %$h, "scalar keys is identical on copy and original"); 226} 227 228if (is_miniperl) { 229 print "# skipping torture_hash tests on miniperl because no Hash::Util\n"; 230} else { 231 torture_hash('a .. zz', 'a' .. 'zz'); 232 torture_hash('0 .. 9', 0 .. 9); 233 torture_hash("'Perl'", 'Rules'); 234} 235 236{ 237 my %h = qw(a x b y c z); 238 no warnings qw(misc uninitialized); 239 %h = $h{a}; 240 is(join(':', %h), 'x:', 'hash self-assign'); 241} 242 243# magic keys and values should be evaluated before the hash on the LHS is 244# cleared 245 246package Magic { 247 my %inner; 248 sub TIEHASH { bless [] } 249 sub FETCH { $inner{$_[1]} } 250 sub STORE { $inner{$_[1]} = $_[2]; } 251 sub CLEAR { %inner = () } 252 253 my (%t1, %t2); 254 tie %t1, 'Magic'; 255 tie %t2, 'Magic'; 256 257 %inner = qw(a x b y); 258 %t1 = (@t2{'a','b'}); 259 ::is(join( ':', %inner), "x:y", "magic keys"); 260} 261 262 263 264done_testing(); 265