1#!./perl 2 3use strict; 4use warnings; 5 6use Config; 7use Storable qw(dclone); 8use Test::More; 9 10BEGIN { 11 plan skip_all => 'Storable was not built' 12 if $ENV{PERL_CORE} && $Config{'extensions'} !~ /\b Storable \b/x; 13 plan skip_all => 'Need 64-bit pointers for this test' 14 if $Config{ptrsize} < 8 and $] > 5.013; 15 plan skip_all => 'Need 64-bit int for this test on older versions' 16 if $Config{uvsize} < 8 and $] < 5.013; 17 plan skip_all => 'Need ~4 GiB memory for this test, set PERL_TEST_MEMORY > 4' 18 if !$ENV{PERL_TEST_MEMORY} || $ENV{PERL_TEST_MEMORY} < 4; 19} 20 21# Just too big to fit in an I32. 22my $huge = int(2 ** 31); 23# v5.24.1c/v5.25.1c switched to die earlier with "Too many elements", 24# which is much safer. 25my $has_too_many = ($Config{usecperl} and 26 (($] >= 5.024001 and $] < 5.025000) 27 or $] >= 5.025001)) ? 1 : 0; 28 29# These overlarge sizes are enabled only since Storable 3.00 and some 30# cases need cperl support. Perl5 (as of 5.24) has some internal 31# problems with >I32 sizes, which only cperl has fixed. 32# perl5 is not yet 2GB safe, esp. with hashes. 33 34# string len (xpv_cur): STRLEN (ptrsize>=8) 35# array size (xav_max): SSize_t (I32/I64) (ptrsize>=8) 36# hash size (xhv_keys): 37# IV - 5.12 (ivsize>=8) 38# STRLEN 5.14 - 5.24 (size_t: U32/U64) 39# SSize_t 5.22c - 5.24c (I32/I64) 40# U32 5.25c - 41# hash key: I32 42 43my @cases = ( 44 ['huge string', 45 sub { my $s = 'x' x $huge; \$s }], 46 47 ['array with huge element', 48 sub { my $s = 'x' x $huge; [$s] }], 49 50 ['hash with huge value', 51 sub { my $s = 'x' x $huge; +{ foo => $s } }], 52 53 # There's no huge key, limited to I32. 54 ) if $Config{ptrsize} > 4; 55 56 57# An array with a huge number of elements requires several gigabytes of 58# virtual memory. On darwin it is evtl killed. 59if ($Config{ptrsize} > 4 and !$has_too_many) { 60 # needs 20-55G virtual memory, 4.6M heap and several minutes on a fast machine 61 if ($ENV{PERL_TEST_MEMORY} >= 55) { 62 push @cases, 63 [ 'huge array', 64 sub { my @x; $x[$huge] = undef; \@x } ]; 65 } else { 66 diag "skip huge array, need PERL_TEST_MEMORY >= 8"; 67 } 68} 69 70# A hash with a huge number of keys would require tens of gigabytes of 71# memory, which doesn't seem like a good idea even for this test file. 72# Unfortunately even older 32bit perls do allow this. 73if (!$has_too_many) { 74 # needs >90G virtual mem, and is evtl. killed 75 if ($ENV{PERL_TEST_MEMORY} >= 96) { 76 # number of keys >I32. impossible to handle with perl5, but Storable can. 77 push @cases, 78 ['huge hash', 79 sub { my %x = (0 .. $huge); \%x } ]; 80 } else { 81 diag "skip huge hash, need PERL_TEST_MEMORY >= 16"; 82 } 83} 84 85 86plan tests => 2 * scalar @cases; 87 88for (@cases) { 89 my ($desc, $build) = @$_; 90 diag "building test input: $desc"; 91 my ($input, $exn, $clone); 92 diag "these huge subtests need a lot of memory and time!" if $desc eq 'huge array'; 93 $input = $build->(); 94 diag "running test: $desc"; 95 $exn = $@ if !eval { $clone = dclone($input); 1 }; 96 97 is($exn, undef, "$desc no exception"); 98 is_deeply($input, $clone, "$desc cloned"); 99 #ok($clone, "$desc cloned"); 100 101 # Ensure the huge objects are freed right now: 102 undef $input; 103 undef $clone; 104} 105