1package main;
2
3use strict;
4use warnings;
5
6use Test::More 0.88;
7
8use Cwd qw{ cwd };
9
10use lib qw{ inc/mock inc };
11use Astro::App::Satpass2::Utils qw{ HASH_REF REGEXP_REF };
12use My::Module::Test::App;	# for environment clean-up.
13
14use File::HomeDir;	# Mocked
15
16sub dump_tokens;
17sub new;
18
19use Astro::App::Satpass2;
20use Astro::App::Satpass2::Utils qw{ my_dist_config };
21
22new;
23
24tokenize( 'foo', [ [ 'foo' ], {} ] )
25    or dump_tokens;
26
27tokenize( 'foo  bar', [ [ qw{ foo bar } ], {} ] )
28    or dump_tokens;
29
30=begin comment
31
32tokenize( "foo\nbar", [ [ qw{ foo } ], {} ]; )
33
34tokenize( undef, [ [ qw{ bar } ] ], 'tokenize remainder of source'; )
35
36tokenize( "foo\\\nbar", [ [ 'foobar' ], {} ]; )
37
38=end comment
39
40=cut
41
42tokenize( q{foo'bar'}, [ [ qw{ foobar } ], {} ] )
43    or dump_tokens;
44
45tokenize( qq{foo'bar\nbaz'}, [ [ "foobar\nbaz" ], {} ] )
46    or dump_tokens;
47
48=begin comment
49
50# $'...' not understood by built-in tokenizer.
51
52tokenize( q{foo$'bar'}, [ [ 'foobar' ], {} ] )
53    or dump_tokens;
54
55tokenize( qq{foo\$'bar\nbaz'}, [ [ "foobar\nbaz" ], {} ] )
56    or dump_tokens;
57
58=end comment
59
60=cut
61
62tokenize( q{foo"bar"}, [ [ 'foobar' ], {} ] )
63    or dump_tokens;
64
65tokenize( qq{foo"bar\nbaz"}, [ [ "foobar\nbaz" ], {} ] )
66    or dump_tokens;
67
68tokenize( <<'EOD', [ [ "foobar\nbaz" ], {} ] )
69foo"bar
70baz"
71EOD
72    or dump_tokens;
73
74tokenize( <<'EOD', [ [ "foo bar\nbaz\n" ], {} ] )
75<<END_OF_DATA
76foo bar
77baz
78END_OF_DATA
79EOD
80    or dump_tokens;
81
82tokenize( q{foo"bar\\nbaz"}, [ [ "foobar\nbaz" ], {} ] )
83    or dump_tokens;
84
85tokenize( q{foo#bar}, [ [ 'foo#bar' ], {} ] )
86    or dump_tokens;
87
88tokenize( q{foo # bar}, [ [ 'foo' ], {} ] )
89    or dump_tokens;
90
91tokenize( q{# foo bar}, [ [], {} ] )
92    or dump_tokens;
93
94tokenize( q<foo{bar}>, [ [ 'foo{bar}' ], {} ] )
95    or dump_tokens;
96
97tokenize( q<foo{bar>, [ [ 'foo{bar' ], {} ] )
98    or dump_tokens;
99
100tokenize( q<foobar}>, [ [ 'foobar}' ], {} ] )
101    or dump_tokens;
102
103=begin comment
104
105# brace expansion not supported.
106
107tokenize( q<foo{bar,baz}>, [ [ qw{ foobar foobaz ], {} ] )
108    or dump_tokens;
109
110tokenize( q<foo{bar,{baz,burfle}}>, )
111    [ [ qw{ foobar foobaz fooburfle } ], {} ]
112    or dump_tokens;
113
114tokenize( q<foo{bar,x{baz,burfle}}>, )
115    [ [ qw{ foobar fooxbaz fooxburfle ], {} ]
116    or dump_tokens;
117
118=end comment
119
120=cut
121
122tokenize( q{x~+}, [ [ 'x~+' ], {} ] )
123    or dump_tokens;
124
125tokenize( q{~+}, [ [ cwd() ], {} ] )
126    or dump_tokens;
127
128tokenize( q{~+/foo}, [ [ cwd() . '/foo' ], {} ] )
129    or dump_tokens;
130
131tokenize( q{x~}, [ [ 'x~' ], {} ] )
132    or dump_tokens;
133
134{
135
136
137    my $home = '/home/menuhin';
138    local $File::HomeDir::MOCK_FILE_HOMEDIR_MY_HOME = $home;
139
140    tokenize( q{~}, [ [ $home ], {} ] )
141	or dump_tokens;
142
143    tokenize( q{~/foo}, [ [ "$home/foo" ], {} ] )
144	or dump_tokens;
145
146}
147
148{
149    my $home = {
150	menuhin	=> '/home/menuhin',
151    };
152    local $File::HomeDir::MOCK_FILE_HOMEDIR_USERS_HOME = $home;
153
154    tokenize( q{~menuhin}, [ [ $home->{menuhin} ], {} ] )
155	or dump_tokens;
156
157    tokenize( q{~menuhin/foo}, [ [ "$home->{menuhin}/foo" ], {} ] )
158	or dump_tokens;
159
160    tokenize( { fail => 1 }, q{~pearlman},
161	qr{ \A Unable \s to \s find \s home \s for \s pearlman }smx,
162	'Tokenize ~pearlman should fail' );
163
164    tokenize( { fail => 1 }, q{~pearlman/foo},
165	qr{ \A Unable \s to \s find \s home \s for \s pearlman }smx,
166	'Tokenize ~pearlman/foo should fail' );
167
168}
169
170{
171
172    my $cfg = '/home/menuhin/.local/perl/Astro-App-Satpass2';
173    local $File::HomeDir::MOCK_FILE_HOMEDIR_MY_DIST_CONFIG = $cfg;
174
175    tokenize( q{~~}, [ [ $cfg ], {} ] )
176	or dump_tokens;
177
178    tokenize( q{~~/foo}, [ [ "$cfg/foo" ], {} ] )
179	or dump_tokens;
180
181}
182
183{
184
185    local $File::HomeDir::MOCK_FILE_HOMEDIR_MY_DIST_CONFIG = undef;
186
187    tokenize( { fail => 1 }, q{~~},
188	qr{ \A Unable \s to \s find \s ~~ }smx,
189	'Tokenize ~~ without dist dir should fail' );
190
191    tokenize( { fail => 1 }, q{~~/foo},
192	qr{ \A Unable \s to \s find \s ~~ }smx,
193	'Tokenize ~~/foo without dist dir should fail' );
194}
195
196local $ENV{foo} = 'bar';
197local $ENV{bar} = 'baz';
198local @ENV{ qw{ fooz yehudi } };
199delete $ENV{fooz};
200delete $ENV{yehudi};
201
202tokenize( q{$foo}, [ [ 'bar' ], {} ] )
203    or dump_tokens;
204
205tokenize( q{"$foo"}, [ [ 'bar' ], {} ] )
206    or dump_tokens;
207
208tokenize( q{'$foo'}, [ [ '$foo' ], {} ] )
209    or dump_tokens;
210
211tokenize( <<'EOD', [ [ "bar\n" ], {} ] )
212<<END_OF_DOCUMENT
213$foo
214END_OF_DOCUMENT
215EOD
216    or dump_tokens;
217
218tokenize( <<'EOD', [ [ "bar\n" ], {} ] )
219<<"END_OF_DOCUMENT"
220$foo
221END_OF_DOCUMENT
222EOD
223    or dump_tokens;
224
225tokenize( <<'EOD', [ [ "\$foo\n" ], {} ] )
226<<'END_OF_DOCUMENT'
227$foo
228END_OF_DOCUMENT
229EOD
230    or dump_tokens;
231
232=begin comment
233
234# $'...' not supported
235
236tokenize( q{$'$foo'}, [ [ '$foo' ], {} ] )
237    or dump_tokens;
238
239=end comment
240
241=cut
242
243tokenize( q<${foo}bar>, [ [ 'barbar' ], {} ] )
244    or dump_tokens;
245
246=begin comment
247
248# ${#..} not supported except on $@ and $*
249
250tokenize( q<${#foo}>, [ [ '3' ], {} ] )
251    or dump_tokens;
252
253=end comment
254
255=cut
256
257tokenize( q<${!foo}>, [ [ 'baz' ], {} ] )
258    or dump_tokens;
259
260tokenize( q<$burfle>, [ [], {} ] )
261    or dump_tokens;
262
263set_positional( qw{ one two three } );
264
265=begin comment
266
267# Arrays not supported
268
269tokenize( q<${plural[0]}>, [ [ 'zero' ], {} ] )
270    or dump_tokens;
271
272tokenize( q<${plural[1]}>, [ [ 'one' ], {} ] )
273    or dump_tokens;
274
275tokenize( q<${plural[2]}>, [ [ 'two' ], {} ] )
276    or dump_tokens;
277
278tokenize( q<${#plural}>, [ [ '4' ], {} ] )
279    or dump_tokens;
280
281tokenize( q<${#@}>, [ [ '3' ], {} ] )
282    or dump_tokens;
283
284tokenize( q<${#plural[*]}>, [ )
285    { type => 'word', content => '3' } ]
286    or dump_tokens;
287
288tokenize( q<${#plural[0]}>, [ )
289    { type => 'word', content => '4' } ]
290    or dump_tokens;
291
292tokenize( q<${#plural[1]}>, [ )
293    { type => 'word', content => '3' } ]
294    or dump_tokens;
295
296tokenize( q<${#plural[2]}>, [ )
297    { type => 'word', content => '3' } ]
298    or dump_tokens;
299
300tokenize( q<${#plural[3]}>, [ )
301    { type => 'word', content => '0' } ]
302    or dump_tokens;
303
304=end comment
305
306=cut
307
308tokenize( q<$#>, [ [ '3' ], {} ] )
309    or dump_tokens;
310
311tokenize( q<$*>, [ [ qw{ one two three } ], {} ] )
312    or dump_tokens;
313
314tokenize( q<$@>, [ [ qw{ one two three } ], {} ] )
315    or dump_tokens;
316
317tokenize( q<'$*'>, [ [ '$*' ], {} ] )
318    or dump_tokens;
319
320tokenize( q<'$@'>, [ [ '$@' ], {} ] )
321    or dump_tokens;
322
323tokenize( q<"$*">, [ [ 'one two three' ], {} ] )
324    or dump_tokens;
325
326tokenize( q<"$@">, [ [ qw{ one two three } ], {} ] )
327    or dump_tokens;
328
329tokenize( q<"xx$@yy">, [ [ qw{ xxone two threeyy } ], {} ] )
330    or dump_tokens;
331
332set_positional( 'o ne', 'two' );
333
334tokenize( q<xx$@yy>, [ [ qw{ xxo ne twoyy } ], {} ] )
335    or dump_tokens;
336
337tokenize( q<"xx$@yy">, [ [ 'xxo ne', 'twoyy' ], {} ] )
338    or dump_tokens;
339
340tokenize( q<xx$*yy>, [ [ qw{ xxo ne twoyy } ], {} ] )
341    or dump_tokens;
342
343tokenize( q<"xx$*yy">, [ [ 'xxo ne twoyy' ], {} ] )
344    or dump_tokens;
345
346tokenize( q<${foo:-flurfle}>, [ [ 'bar' ], {} ] )
347    or dump_tokens;
348
349tokenize( q<${fooz:-flurfle}>, [ [ 'flurfle' ], {} ] )
350    or dump_tokens;
351
352tokenize( q<${fooz}>, [ [], {} ] )
353    or dump_tokens;
354
355tokenize( q<${fooz:=flurfle}>, [ [ 'flurfle' ], {} ] )
356    or dump_tokens;
357
358tokenize( q<$fooz>, [ [ 'flurfle' ], {} ] )
359    or dump_tokens;
360
361tokenize( q<${foo:?not foolish}>, [ [ 'bar' ], {} ] )
362    or dump_tokens;
363
364tokenize_fail( q<${yehudi:?not foolish}>, qr{\Qnot foolish}smx );
365
366tokenize( q<${foo:+foolish}>, [ [ 'foolish' ], {} ] )
367    or dump_tokens;
368
369tokenize( q<${yehudi:+foolish}>, [ [], {} ] )
370    or dump_tokens;
371
372tokenize( q<${foo:1}>, [ [ 'ar' ], {} ] )
373    or dump_tokens;
374
375tokenize( q<${foo:1:1}>, [ [ 'a' ], {} ] )
376    or dump_tokens;
377
378tokenize( q<${foo: -1}>, [ [ 'r' ], {} ] )
379    or dump_tokens;
380
381=begin comment
382
383# Arrays not supported except $@
384
385tokenize( q<${plural[*]:1}>, [ )
386    { type => 'word', content => 'one' },
387    { type => 'white_space', content => ' ' },
388    { type => 'word', content => 'two' } ]
389    or dump_tokens;
390
391tokenize( q<${plural[*]:1:1}>, [ )
392    { type => 'word', content => 'one' } ]
393    or dump_tokens;
394
395tokenize( q<${plural[*]: -1}>, [ )
396    { type => 'word', content => 'two' } ]
397    or dump_tokens;
398
399=end comment
400
401=cut
402
403set_positional( qw{ fee } );
404
405tokenize( '${@:1:2}', [ [], {} ] )
406    or dump_tokens;
407
408set_positional( qw{ fee fie } );
409
410tokenize( '${@:1:2}', [ [ 'fie' ], {} ] )
411    or dump_tokens;
412
413set_positional( qw{ fee fie foe } );
414
415tokenize( '${@:1:2}', [ [ qw{ fie foe } ], {} ] )
416    or dump_tokens;
417
418set_positional( qw{ fee fie foe fum } );
419
420tokenize( '${@:1:2}', [ [ qw{ fie foe } ], {} ] )
421    or dump_tokens;
422
423tokenize( '$0', [ [ $0 ], {} ] )
424    or dump_tokens;
425
426tokenize( '$_', [ [ $^X ], {} ] )
427    or dump_tokens;
428
429tokenize( '$$', [ [ $$ ], {} ] )
430    or dump_tokens;
431
432tokenize( '"\u\LFEE FIE FOE\E FOO"', [ [ 'Fee fie foe FOO' ], {} ] )
433    or dump_tokens;
434
435tokenize( '"Fee \U$2\E foe \u$foo"', [ [ 'Fee FIE foe Bar' ], {} ] )
436    or dump_tokens;
437
438done_testing;
439
440{
441
442    my @got;
443    my @positional;
444    my $tt;
445
446    sub _format_method_args {
447	my @args = @_;
448	my @rslt;
449	my $name = shift( @args ) . '(';
450	while ( @args ) {
451	    my ( $name, $value ) = splice @args, 0, 2;
452	    if ( defined $value ) {
453		$value =~ m/ \A \d+ \z /smx
454		    or $value = "'$value'";
455	    } else {
456		$value = 'undef';
457	    }
458	    push @rslt, "$name => $value";
459	}
460	return $name . join( ', ', @rslt ) . ')';
461    }
462
463    sub dump_tokens {
464	diag( explain( \@got ) );
465	return;
466    }
467
468    sub new {	## no critic (RequireArgUnpacking)
469	my @args = @_;
470	@got = ();
471	my $name = _format_method_args( new => @args );
472	if ( $tt = eval {
473		Astro::App::Satpass2->new( @args );
474	    } ) {
475	    @_ = ( $name );
476	    goto &pass;
477	} else {
478	    $name.= " failed: $@";
479	    chomp $name;
480	    @_ = ( $name );
481	    goto &fail;
482	}
483    }
484
485    sub set_positional {
486	@positional = @_;
487	return;
488    }
489
490    my ( %escape_char, $escape_re );
491    BEGIN {
492	%escape_char = (
493	    '\\'	=> '\\\\',
494	    "\n"	=> '\\n',
495	    "\t"	=> '\\t',
496	);
497	$escape_re = join '', sort values %escape_char;
498	$escape_re = qr{ [$escape_re] }smx;
499    }
500
501    sub tokenize {	## no critic (RequireArgUnpacking)
502	my @args = @_;
503	my $opt = HASH_REF eq ref $args[0] ? shift @args : {};
504	my ( $source, $tokens, $name ) = @args;
505	if ( $source =~ m/ \n /sxm ) {
506	    my @src = split qr{ (?<= \n ) }sxm, $source;
507	    $source = shift @src;
508	    $opt->{in} = sub { return shift @src };
509	}
510	@got = ();
511	if ( ! defined $name ) {
512	    ( $name = $source ) =~ s/ ( $escape_re ) / $escape_char{$1}
513	    /smxeg;
514	    $name = 'tokenize ' . $name;
515	}
516	SKIP: {
517	    $tt or skip( 'Failed to instantiate application', 1 );
518	    if ( eval {
519		    @got = $tt->__tokenize( $opt, $source, \@positional );
520		    1;
521		} ) {
522		if ( $opt->{fail} ) {
523		    @_ = ( "$name unexpectedly succeeded" );
524		    goto &fail;
525		} else {
526		    @_ = ( \@got, $tokens, $name );
527		    goto &is_deeply;
528		}
529	    } else {
530		my $err = $@;
531		if ( $opt->{fail} ) {
532		    if ( $err =~ m/$tokens/ ) {
533			@_ = ( $name );
534			goto &pass;
535		    } else {
536			$name .= ": $err";
537			chomp $name;
538			@_ = ( $name );
539			goto &fail;
540		    }
541		} else {
542		    $name .= ": $err";
543		    chomp $name;
544		    @_ = ( $name );
545		    goto &fail;
546		}
547	    }
548	}
549	return;
550    }
551
552    sub tokenize_fail {	## no critic (RequireArgUnpacking)
553	my @args = @_;
554	my $opt = HASH_REF eq ref $args[0] ? shift @args : {};
555	my ( $source, $message, $name ) = @args;
556	@got = ();
557	if ( ! defined $name ) {
558	    ( $name = $source ) =~ s/ ( $escape_re ) / $escape_char{$1}
559	    /smxeg;
560	    $name = 'tokenize ' . $name . ' fails';
561	}
562	SKIP: {
563	    $tt or skip( 'Failed to instantiate application', 1 );
564	    if ( eval {
565		    @got = $tt->__tokenize( $opt, $source, \@positional );
566		    1;
567		} ) {
568		@_ = ( "$name succeeded unexpectedly" );
569		goto &fail;
570	    } else {
571		REGEXP_REF eq ref $message
572		    or $message = qr{ $message }smx;
573		@_ = ( $@, $message, $name );
574		goto &like;
575	    }
576	}
577	return;
578    }
579
580}
581
582
5831;
584
585# ex: set textwidth=72 :
586