shouldn't be any size limit, here's the method in WWW::Mechanize that parses the links:
sub _extract_links {
require WWW::Mechanize::Link;
my $self = shift;
my $p = HTML::TokeParser->new(\$self->{content});
$self->{links} = [];
while (my $token = $p->get_tag( keys %urltags )) {
my $tag = $token->[0];
my $url = $token->[1]{$urltags{$tag}};
my $text;
my $name;
if ( $tag eq "a" ) {
$text = $p->get_trimmed_text("/$tag");
$text = "" unless defined $text;
my $onClick = $token->[1]{onclick};
if ( $onClick && ($onClick =~ /^window\.open\(\s*'([^']+)'/) )
+ {
$url = $1;
}
}
if ( $tag ne "area" ) {
$name = $token->[1]{name};
}
next unless defined $url; # probably just a name link or <AR
+EA NOHREF...>
push( @{$self->{links}}, WWW::Mechanize::Link->new( $url, $tex
+t, $name, $tag, $self->base ) );
}
# Old extract_links() returned a value. Carp if someone expects
# this version to return something.
if ( defined wantarray ) {
my $func = (caller(0))[3];
$self->warn( "$func does not return a useful value" );
}
return;
}