#!/usr/bin/perl

use HTML::LinkExtor;
use LWP::Simple;
use WWW::RobotRules;
use URI::URL;
use Iterator;

sub make_robot_filter {
  my $agent = shift;
  my %seen_site;
  my $rules = WWW::RobotRules->new($agent);
  return sub {
    my $url = url(shift());
    return 1 unless $url->scheme eq 'http';
    unless ($seen_site{$url->netloc}++) {
      my $robots = $url->clone;
      $robots->path('/robots.txt');
      $robots->frag(undef);
      $rules->parse($robots, get($robots));
    }
    $rules->allowed($url)
  };
}

my $ROBOT_NAME = 'Grasshopper/1.0';

sub traverse {
  my $interesting_links = sub { shift; @_ };
  $interesting_links = shift if ref $_[0] eq 'CODE';
  my @queue = map [$_, 'supplied by user'], @_;
  my %seen;

  return Iterator {
    while (@queue) {
      my ($url, $referrer) = @{shift @queue};
      $url =~ s/#.*$//;
      next if $seen{$url}++;

      my (%head, $html);
      @head{qw(TYPE LENGTH LAST_MODIFIED EXPIRES SERVER)} = head($url);
      if ($head{TYPE} eq 'text/html') {
        my $html = get($url);
        push @queue, 
          map [$_, $url],
            $interesting_links->($url, get_links($url, $html));
      }
      return wantarray ? ($url, \%head, $referrer, $html) : $url;
    }
    return;     # exhausted
  }
}

sub get_links {
  my ($base, $html) = @_;
  my @links;
  my $more_links = sub {
    my ($tag, %attrs) = @_;
    push @links, values %attrs;
  };

  HTML::LinkExtor->new($more_links, $base)->parse($html);
  return @links;
}

## Sample usage
my $top = 'http://perl.plover.com/';
my $interesting = sub { my $ref = shift;
                        $ref =~ /^\Q$top/o ? @_ : () };

my $urls = igrep_l { not $_[1]{TYPE} } traverse($interesting, $top);

while (my ($url, $head, $referrer) = NEXTVAL($urls)) {
  print "$referrer -> $url is BAD\n";
}





