3 require LWP::UserAgent;
4 @ISA = qw(LWP::UserAgent);
7 require WWW::RobotRules;
9 require HTTP::Response;
14 use HTTP::Date qw(time2str);
19 # Additional attributes in addition to those found in LWP::UserAgent:
21 # $self->{'delay'} Required delay between request to the same
24 # $self->{'rules'} A WWW::RobotRules object
33 @cnf{qw(agent from rules)} = @_;
39 Carp::croak('LWP::RobotUA agent required') unless $cnf{agent};
40 Carp::croak('LWP::RobotUA from address required')
41 unless $cnf{from} && $cnf{from} =~ m/\@/;
43 my $delay = delete $cnf{delay} || 1;
44 my $use_sleep = delete $cnf{use_sleep};
45 $use_sleep = 1 unless defined($use_sleep);
46 my $rules = delete $cnf{rules};
48 my $self = LWP::UserAgent->new(%cnf);
49 $self = bless $self, $class;
51 $self->{'delay'} = $delay; # minutes
52 $self->{'use_sleep'} = $use_sleep;
55 $rules->agent($cnf{agent});
56 $self->{'rules'} = $rules;
59 $self->{'rules'} = WWW::RobotRules->new($cnf{agent});
66 sub delay { shift->_elem('delay', @_); }
67 sub use_sleep { shift->_elem('use_sleep', @_); }
73 my $old = $self->SUPER::agent(@_);
75 # Changing our name means to start fresh
76 $self->{'rules'}->agent($self->{'agent'});
84 my $old = $self->_elem('rules', @_);
85 $self->{'rules'}->agent($self->{'agent'}) if @_;
92 my($self, $netloc) = @_;
93 $self->{'rules'}->no_visits($netloc) || 0;
96 *host_count = \&no_visits; # backwards compatibility with LWP-5.02
101 my($self, $netloc) = @_;
102 return undef unless defined $netloc;
103 my $last = $self->{'rules'}->last_visit($netloc);
105 my $wait = int($self->{'delay'} * 60 - (time - $last));
106 $wait = 0 if $wait < 0;
115 my($self, $request, $arg, $size) = @_;
117 LWP::Debug::trace('()');
119 # Do we try to access a new server?
120 my $allowed = $self->{'rules'}->allowed($request->url);
123 LWP::Debug::debug("Host is not visited before, or robots.txt expired.");
125 my $robot_url = $request->url->clone;
126 $robot_url->path("robots.txt");
127 $robot_url->query(undef);
128 LWP::Debug::debug("Requesting $robot_url");
130 # make access to robot.txt legal since this will be a recursive call
131 $self->{'rules'}->parse($robot_url, "");
133 my $robot_req = new HTTP::Request 'GET', $robot_url;
134 my $robot_res = $self->request($robot_req);
135 my $fresh_until = $robot_res->fresh_until;
136 if ($robot_res->is_success) {
137 my $c = $robot_res->content;
138 if ($robot_res->content_type =~ m,^text/, && $c =~ /^\s*Disallow\s*:/mi) {
139 LWP::Debug::debug("Parsing robot rules");
140 $self->{'rules'}->parse($robot_url, $c, $fresh_until);
143 LWP::Debug::debug("Ignoring robots.txt");
144 $self->{'rules'}->parse($robot_url, "", $fresh_until);
149 LWP::Debug::debug("No robots.txt file found");
150 $self->{'rules'}->parse($robot_url, "", $fresh_until);
153 # recalculate allowed...
154 $allowed = $self->{'rules'}->allowed($request->url);
159 my $res = new HTTP::Response
160 &HTTP::Status::RC_FORBIDDEN, 'Forbidden by robots.txt';
161 $res->request( $request ); # bind it to that request
165 my $netloc = eval { local $SIG{__DIE__}; $request->url->host_port; };
166 my $wait = $self->host_wait($netloc);
169 LWP::Debug::debug("Must wait $wait seconds");
170 if ($self->{'use_sleep'}) {
174 my $res = new HTTP::Response
175 &HTTP::Status::RC_SERVICE_UNAVAILABLE, 'Please, slow down';
176 $res->header('Retry-After', time2str(time + $wait));
177 $res->request( $request ); # bind it to that request
182 # Perform the request
183 my $res = $self->SUPER::simple_request($request, $arg, $size);
185 $self->{'rules'}->visit($netloc);
195 push(@s, "Robot: $self->{'agent'} operated by $self->{'from'} [$self]");
196 push(@s, " Minimum delay: " . int($self->{'delay'}*60) . "s");
197 push(@s, " Will sleep if too early") if $self->{'use_sleep'};
198 push(@s, " Rules = $self->{'rules'}");
209 LWP::RobotUA - a class for well-behaved Web robots
214 my $ua = LWP::RobotUA->new('my-robot/0.1', 'me@foo.com');
215 $ua->delay(10); # be very nice -- max one hit every ten minutes!
218 # Then just use it just like a normal LWP::UserAgent:
219 my $response = $ua->get('http://whatever.int/...');
224 This class implements a user agent that is suitable for robot
225 applications. Robots should be nice to the servers they visit. They
226 should consult the F</robots.txt> file to ensure that they are welcomed
227 and they should not make requests too frequently.
229 But before you consider writing a robot, take a look at
230 <URL:http://www.robotstxt.org/>.
232 When you use a I<LWP::RobotUA> object as your user agent, then you do not
233 really have to think about these things yourself; C<robots.txt> files
234 are automatically consulted and obeyed, the server isn't queried
235 too rapidly, and so on. Just send requests
236 as you do when you are using a normal I<LWP::UserAgent>
237 object (using C<< $ua->get(...) >>, C<< $ua->head(...) >>,
238 C<< $ua->request(...) >>, etc.), and this
239 special agent will make sure you are nice.
243 The LWP::RobotUA is a sub-class of LWP::UserAgent and implements the
244 same methods. In addition the following methods are provided:
248 =item $ua = LWP::RobotUA->new( %options )
250 =item $ua = LWP::RobotUA->new( $agent, $from )
252 =item $ua = LWP::RobotUA->new( $agent, $from, $rules )
254 The LWP::UserAgent options C<agent> and C<from> are mandatory. The
255 options C<delay>, C<use_sleep> and C<rules> initialize attributes
256 private to the RobotUA. If C<rules> are not provided, then
257 C<WWW::RobotRules> is instantiated providing an internal database of
260 It is also possible to just pass the value of C<agent>, C<from> and
261 optionally C<rules> as plain positional arguments.
265 =item $ua->delay( $minutes )
267 Get/set the minimum delay between requests to the same server, in
268 I<minutes>. The default is 1 minute. Note that this number doesn't
269 have to be an integer; for example, this sets the delay to 10 seconds:
275 =item $ua->use_sleep( $boolean )
277 Get/set a value indicating whether the UA should sleep() if requests
278 arrive too fast, defined as $ua->delay minutes not passed since
279 last request to the given server. The default is TRUE. If this value is
280 FALSE then an internal SERVICE_UNAVAILABLE response will be generated.
281 It will have an Retry-After header that indicates when it is OK to
282 send another request to this server.
286 =item $ua->rules( $rules )
288 Set/get which I<WWW::RobotRules> object to use.
290 =item $ua->no_visits( $netloc )
292 Returns the number of documents fetched from this server host. Yeah I
293 know, this method should probably have been named num_visits() or
294 something like that. :-(
296 =item $ua->host_wait( $netloc )
298 Returns the number of I<seconds> (from now) you must wait before you can
299 make a new request to this host.
303 Returns a string that describes the state of the UA.
304 Mainly useful for debugging.
310 L<LWP::UserAgent>, L<WWW::RobotRules>
314 Copyright 1996-2004 Gisle Aas.
316 This library is free software; you can redistribute it and/or
317 modify it under the same terms as Perl itself.