7 use Getopt::Long qw(:config posix_default no_ignore_case gnu_compat);
8 use Parallel::ForkManager;
10 use Time::HiRes qw(sleep);
12 usage() if (@ARGV == 0);
15 'h|help' => \ my $help,
16 'i|inputfile=s' => \ my $file,
17 'c|concurrency=i' => \ my $concurrency,
18 'n|loops=i' => \ my $loops,
19 'w|wait=f' => \ my $wait,
28 my @urls = file2urls($file) if ($file);
31 my $num = scalar @urls;
32 warn "$num urls with $concurrency clients, $loops loops\n";
33 warn "Total: ", $num * $concurrency * $loops, " requests\n";
34 warn "wait for $wait second between requests\n";
38 my $pm = Parallel::ForkManager->new($concurrency);
39 for (my $child = 0; $child < $concurrency; $child++) {
41 warn "forks $child/$concurrency child ...\n";
44 for (my $i = 0; $i < $loops; $i++) {
45 print STDERR "processing $i/$loops loop\r";
46 foreach my $url (@urls) {
47 get($url) or warn "fail: $url\n";
53 $pm->wait_all_children;
59 warn "$0 -i urls.txt -c concurrency -n loops -w wait_interval\n",
70 open my $fh, '<', $file or die "$file: $!";
73 while ($url = <$fh>) {