8 use Parallel::ForkManager;
10 use Time::HiRes qw(sleep);
13 'h|help' => \ my $help,
14 'i|inputfile=s' => \ my $file,
15 'c|concurrency=i' => \ my $concurrency,
16 'n|loops=i' => \ my $loops,
17 'w|wait=f' => \ my $wait,
26 my @urls = file2urls($file) if ($file);
29 my $num = scalar @urls;
30 warn "$num urls with $concurrency clients, $loops loops\n";
31 warn "Total: ", $num * $concurrency * $loops, " requests\n";
32 warn "wait for $wait second between requests\n";
36 my $pm = Parallel::ForkManager->new($concurrency);
37 for (my $child = 0; $child < $concurrency; $child++) {
39 warn "forks $child/$concurrency child ...\n";
42 for (my $i = 0; $i < $loops; $i++) {
43 print STDERR "processing $i/$loops loop\r";
44 foreach my $url (@urls) {
45 get($url) or warn "fail: $url\n";
51 $pm->wait_all_children;
57 warn "$0 -i urls.txt -c concurrency -n loops -w wait_interval\n",
68 open my $fh, '<', $file or die "$file: $!";
71 while ($url = <$fh>) {