2001-11-17 21:17:30 -05:00
|
|
|
|
/* Support for Robot Exclusion Standard (RES).
|
2008-01-25 08:04:01 -05:00
|
|
|
|
Copyright (C) 2001, 2006, 2007, 2008 Free Software Foundation, Inc.
|
2001-11-17 21:17:30 -05:00
|
|
|
|
|
|
|
|
|
This file is part of Wget.
|
|
|
|
|
|
|
|
|
|
This program is free software; you can redistribute it and/or modify
|
|
|
|
|
it under the terms of the GNU General Public License as published by
|
2007-07-10 01:53:22 -04:00
|
|
|
|
the Free Software Foundation; either version 3 of the License, or (at
|
2001-11-17 21:17:30 -05:00
|
|
|
|
your option) any later version.
|
|
|
|
|
|
|
|
|
|
This program is distributed in the hope that it will be useful, but
|
|
|
|
|
WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
|
|
|
General Public License for more details.
|
|
|
|
|
|
|
|
|
|
You should have received a copy of the GNU General Public License
|
2007-07-10 01:53:22 -04:00
|
|
|
|
along with Wget. If not, see <http://www.gnu.org/licenses/>.
|
2002-05-17 22:16:36 -04:00
|
|
|
|
|
2007-11-28 03:05:33 -05:00
|
|
|
|
Additional permission under GNU GPL version 3 section 7
|
|
|
|
|
|
|
|
|
|
If you modify this program, or any covered work, by linking or
|
|
|
|
|
combining it with the OpenSSL project's OpenSSL library (or a
|
|
|
|
|
modified version of that library), containing parts covered by the
|
|
|
|
|
terms of the OpenSSL or SSLeay licenses, the Free Software Foundation
|
|
|
|
|
grants you additional permission to convey the resulting work.
|
|
|
|
|
Corresponding Source for a non-source form of such a combination
|
|
|
|
|
shall include the source code for the parts of OpenSSL used as well
|
|
|
|
|
as that of the covered work. */
|
2001-11-17 21:17:30 -05:00
|
|
|
|
|
|
|
|
|
/* This file implements the Robot Exclusion Standard (RES).
|
|
|
|
|
|
|
|
|
|
RES is a simple protocol that enables site admins to signalize to
|
|
|
|
|
the web crawlers that certain parts of the site should not be
|
|
|
|
|
accessed. All the admin needs to do is create a "robots.txt" file
|
|
|
|
|
in the web server root, and use simple commands to allow or
|
|
|
|
|
disallow access to certain parts of the site.
|
|
|
|
|
|
|
|
|
|
The first specification was written by Martijn Koster in 1994, and
|
|
|
|
|
is still available at <http://www.robotstxt.org/wc/norobots.html>.
|
|
|
|
|
In 1996, Martijn wrote an Internet Draft specifying an improved RES
|
|
|
|
|
specification; however, that work was apparently abandoned since
|
|
|
|
|
the draft has expired in 1997 and hasn't been replaced since. The
|
|
|
|
|
draft is available at
|
|
|
|
|
<http://www.robotstxt.org/wc/norobots-rfc.html>.
|
|
|
|
|
|
|
|
|
|
This file implements RES as specified by the draft. Note that this
|
|
|
|
|
only handles the "robots.txt" support. The META tag that controls
|
|
|
|
|
whether the links should be followed is handled in `html-url.c'.
|
|
|
|
|
|
|
|
|
|
Known deviations:
|
|
|
|
|
|
|
|
|
|
* The end-of-line comment recognition is more in the spirit of the
|
|
|
|
|
Bourne Shell (as specified by RES-1994). That means that
|
|
|
|
|
"foo#bar" is taken literally, whereas "foo #bar" is interpreted
|
|
|
|
|
as "foo". The Draft apparently specifies that both should be
|
|
|
|
|
interpreted as "foo".
|
|
|
|
|
|
|
|
|
|
* We don't recognize sole CR as the line ending.
|
|
|
|
|
|
|
|
|
|
* We don't implement expiry mechanism for /robots.txt specs. I
|
|
|
|
|
consider it non-necessary for a relatively short-lived
|
|
|
|
|
application such as Wget. Besides, it is highly questionable
|
|
|
|
|
whether anyone deploys the recommended expiry scheme for
|
|
|
|
|
robots.txt.
|
|
|
|
|
|
|
|
|
|
Entry points are functions res_parse, res_parse_from_file,
|
|
|
|
|
res_match_path, res_register_specs, res_get_specs, and
|
|
|
|
|
res_retrieve_file. */
|
|
|
|
|
|
2007-10-18 23:50:40 -04:00
|
|
|
|
#include "wget.h"
|
2001-11-17 21:17:30 -05:00
|
|
|
|
|
|
|
|
|
#include <stdio.h>
|
|
|
|
|
#include <stdlib.h>
|
2005-06-19 18:34:58 -04:00
|
|
|
|
#include <string.h>
|
2001-11-17 21:17:30 -05:00
|
|
|
|
#include <errno.h>
|
|
|
|
|
#include <assert.h>
|
|
|
|
|
|
|
|
|
|
#include "utils.h"
|
|
|
|
|
#include "hash.h"
|
|
|
|
|
#include "url.h"
|
|
|
|
|
#include "retr.h"
|
|
|
|
|
#include "res.h"
|
|
|
|
|
|
2006-06-28 07:09:30 -04:00
|
|
|
|
#ifdef TESTING
|
|
|
|
|
#include "test.h"
|
|
|
|
|
#endif
|
|
|
|
|
|
2001-11-17 21:17:30 -05:00
|
|
|
|
struct path_info {
|
|
|
|
|
char *path;
|
2005-06-22 15:38:10 -04:00
|
|
|
|
bool allowedp;
|
|
|
|
|
bool user_agent_exact_p;
|
2001-11-17 21:17:30 -05:00
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
struct robot_specs {
|
|
|
|
|
int count;
|
|
|
|
|
int size;
|
|
|
|
|
struct path_info *paths;
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
/* Parsing the robot spec. */
|
|
|
|
|
|
|
|
|
|
/* Check whether AGENT (a string of length LENGTH) equals "wget" or
|
|
|
|
|
"*". If it is either of them, *matches is set to one. If it is
|
|
|
|
|
"wget", *exact_match is set to one. */
|
|
|
|
|
|
|
|
|
|
static void
|
|
|
|
|
match_user_agent (const char *agent, int length,
|
2006-06-28 07:09:30 -04:00
|
|
|
|
bool *matches, bool *exact_match)
|
2001-11-17 21:17:30 -05:00
|
|
|
|
{
|
|
|
|
|
if (length == 1 && *agent == '*')
|
|
|
|
|
{
|
2005-06-22 15:38:10 -04:00
|
|
|
|
*matches = true;
|
|
|
|
|
*exact_match = false;
|
2001-11-17 21:17:30 -05:00
|
|
|
|
}
|
|
|
|
|
else if (BOUNDED_EQUAL_NO_CASE (agent, agent + length, "wget"))
|
|
|
|
|
{
|
2005-06-22 15:38:10 -04:00
|
|
|
|
*matches = true;
|
|
|
|
|
*exact_match = true;
|
2001-11-17 21:17:30 -05:00
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
{
|
2005-06-22 15:38:10 -04:00
|
|
|
|
*matches = false;
|
|
|
|
|
*exact_match = false;
|
2001-11-17 21:17:30 -05:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/* Add a path specification between PATH_B and PATH_E as one of the
|
|
|
|
|
paths in SPECS. */
|
|
|
|
|
|
|
|
|
|
static void
|
|
|
|
|
add_path (struct robot_specs *specs, const char *path_b, const char *path_e,
|
2006-06-28 07:09:30 -04:00
|
|
|
|
bool allowedp, bool exactp)
|
2001-11-17 21:17:30 -05:00
|
|
|
|
{
|
|
|
|
|
struct path_info pp;
|
2001-11-24 22:10:34 -05:00
|
|
|
|
if (path_b < path_e && *path_b == '/')
|
|
|
|
|
/* Our path representation doesn't use a leading slash, so remove
|
|
|
|
|
one from theirs. */
|
|
|
|
|
++path_b;
|
2001-11-17 21:17:30 -05:00
|
|
|
|
pp.path = strdupdelim (path_b, path_e);
|
|
|
|
|
pp.allowedp = allowedp;
|
|
|
|
|
pp.user_agent_exact_p = exactp;
|
|
|
|
|
++specs->count;
|
|
|
|
|
if (specs->count > specs->size)
|
|
|
|
|
{
|
|
|
|
|
if (specs->size == 0)
|
2006-06-28 07:09:30 -04:00
|
|
|
|
specs->size = 1;
|
2001-11-17 21:17:30 -05:00
|
|
|
|
else
|
2006-06-28 07:09:30 -04:00
|
|
|
|
specs->size <<= 1;
|
2001-11-17 21:17:30 -05:00
|
|
|
|
specs->paths = xrealloc (specs->paths,
|
2006-06-28 07:09:30 -04:00
|
|
|
|
specs->size * sizeof (struct path_info));
|
2001-11-17 21:17:30 -05:00
|
|
|
|
}
|
|
|
|
|
specs->paths[specs->count - 1] = pp;
|
|
|
|
|
}
|
|
|
|
|
|
2005-06-22 15:38:10 -04:00
|
|
|
|
/* Recreate SPECS->paths with only those paths that have
|
|
|
|
|
user_agent_exact_p set to true. */
|
2001-11-17 21:17:30 -05:00
|
|
|
|
|
|
|
|
|
static void
|
|
|
|
|
prune_non_exact (struct robot_specs *specs)
|
|
|
|
|
{
|
|
|
|
|
struct path_info *newpaths;
|
|
|
|
|
int i, j, cnt;
|
|
|
|
|
cnt = 0;
|
|
|
|
|
for (i = 0; i < specs->count; i++)
|
|
|
|
|
if (specs->paths[i].user_agent_exact_p)
|
|
|
|
|
++cnt;
|
2003-10-31 09:55:50 -05:00
|
|
|
|
newpaths = xnew_array (struct path_info, cnt);
|
2001-11-17 21:17:30 -05:00
|
|
|
|
for (i = 0, j = 0; i < specs->count; i++)
|
|
|
|
|
if (specs->paths[i].user_agent_exact_p)
|
|
|
|
|
newpaths[j++] = specs->paths[i];
|
|
|
|
|
assert (j == cnt);
|
|
|
|
|
xfree (specs->paths);
|
|
|
|
|
specs->paths = newpaths;
|
|
|
|
|
specs->count = cnt;
|
|
|
|
|
specs->size = cnt;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#define EOL(p) ((p) >= lineend)
|
|
|
|
|
|
2006-06-28 07:09:30 -04:00
|
|
|
|
#define SKIP_SPACE(p) do { \
|
2007-10-14 17:46:24 -04:00
|
|
|
|
while (!EOL (p) && c_isspace (*p)) \
|
2006-06-28 07:09:30 -04:00
|
|
|
|
++p; \
|
2001-11-17 21:17:30 -05:00
|
|
|
|
} while (0)
|
|
|
|
|
|
2006-06-28 07:09:30 -04:00
|
|
|
|
#define FIELD_IS(string_literal) \
|
2001-11-17 21:17:30 -05:00
|
|
|
|
BOUNDED_EQUAL_NO_CASE (field_b, field_e, string_literal)
|
|
|
|
|
|
|
|
|
|
/* Parse textual RES specs beginning with SOURCE of length LENGTH.
|
|
|
|
|
Return a specs objects ready to be fed to res_match_path.
|
|
|
|
|
|
|
|
|
|
The parsing itself is trivial, but creating a correct SPECS object
|
|
|
|
|
is trickier than it seems, because RES is surprisingly byzantine if
|
|
|
|
|
you attempt to implement it correctly.
|
|
|
|
|
|
|
|
|
|
A "record" is a block of one or more `User-Agent' lines followed by
|
|
|
|
|
one or more `Allow' or `Disallow' lines. Record is accepted by
|
|
|
|
|
Wget if one of the `User-Agent' lines was "wget", or if the user
|
|
|
|
|
agent line was "*".
|
|
|
|
|
|
|
|
|
|
After all the lines have been read, we examine whether an exact
|
|
|
|
|
("wget") user-agent field was specified. If so, we delete all the
|
|
|
|
|
lines read under "User-Agent: *" blocks because we have our own
|
|
|
|
|
Wget-specific blocks. This enables the admin to say:
|
|
|
|
|
|
|
|
|
|
User-Agent: *
|
|
|
|
|
Disallow: /
|
|
|
|
|
|
|
|
|
|
User-Agent: google
|
|
|
|
|
User-Agent: wget
|
|
|
|
|
Disallow: /cgi-bin
|
|
|
|
|
|
|
|
|
|
This means that to Wget and to Google, /cgi-bin is disallowed,
|
|
|
|
|
whereas for all other crawlers, everything is disallowed.
|
|
|
|
|
res_parse is implemented so that the order of records doesn't
|
|
|
|
|
matter. In the case above, the "User-Agent: *" could have come
|
|
|
|
|
after the other one. */
|
|
|
|
|
|
|
|
|
|
struct robot_specs *
|
|
|
|
|
res_parse (const char *source, int length)
|
|
|
|
|
{
|
|
|
|
|
int line_count = 1;
|
|
|
|
|
|
|
|
|
|
const char *p = source;
|
|
|
|
|
const char *end = source + length;
|
|
|
|
|
|
2005-06-22 15:38:10 -04:00
|
|
|
|
/* true if last applicable user-agent field matches Wget. */
|
|
|
|
|
bool user_agent_applies = false;
|
2001-11-17 21:17:30 -05:00
|
|
|
|
|
2005-06-22 15:38:10 -04:00
|
|
|
|
/* true if last applicable user-agent field *exactly* matches
|
2001-11-17 21:17:30 -05:00
|
|
|
|
Wget. */
|
2005-06-22 15:38:10 -04:00
|
|
|
|
bool user_agent_exact = false;
|
2001-11-17 21:17:30 -05:00
|
|
|
|
|
|
|
|
|
/* whether we ever encountered exact user agent. */
|
2005-06-22 15:38:10 -04:00
|
|
|
|
bool found_exact = false;
|
2001-11-17 21:17:30 -05:00
|
|
|
|
|
|
|
|
|
/* count of allow/disallow lines in the current "record", i.e. after
|
|
|
|
|
the last `user-agent' instructions. */
|
|
|
|
|
int record_count = 0;
|
|
|
|
|
|
2003-10-31 09:55:50 -05:00
|
|
|
|
struct robot_specs *specs = xnew0 (struct robot_specs);
|
2001-11-17 21:17:30 -05:00
|
|
|
|
|
|
|
|
|
while (1)
|
|
|
|
|
{
|
|
|
|
|
const char *lineend, *lineend_real;
|
|
|
|
|
const char *field_b, *field_e;
|
|
|
|
|
const char *value_b, *value_e;
|
|
|
|
|
|
|
|
|
|
if (p == end)
|
2006-06-28 07:09:30 -04:00
|
|
|
|
break;
|
2001-11-17 21:17:30 -05:00
|
|
|
|
lineend_real = memchr (p, '\n', end - p);
|
|
|
|
|
if (lineend_real)
|
2006-06-28 07:09:30 -04:00
|
|
|
|
++lineend_real;
|
2001-11-17 21:17:30 -05:00
|
|
|
|
else
|
2006-06-28 07:09:30 -04:00
|
|
|
|
lineend_real = end;
|
2001-11-17 21:17:30 -05:00
|
|
|
|
lineend = lineend_real;
|
|
|
|
|
|
|
|
|
|
/* Before doing anything else, check whether the line is empty
|
2006-06-28 07:09:30 -04:00
|
|
|
|
or comment-only. */
|
2001-11-17 21:17:30 -05:00
|
|
|
|
SKIP_SPACE (p);
|
|
|
|
|
if (EOL (p) || *p == '#')
|
2006-06-28 07:09:30 -04:00
|
|
|
|
goto next;
|
2001-11-17 21:17:30 -05:00
|
|
|
|
|
|
|
|
|
/* Make sure the end-of-line comments are respected by setting
|
2006-06-28 07:09:30 -04:00
|
|
|
|
lineend to a location preceding the first comment. Real line
|
|
|
|
|
ending remains in lineend_real. */
|
2001-11-17 21:17:30 -05:00
|
|
|
|
for (lineend = p; lineend < lineend_real; lineend++)
|
2007-10-14 17:46:24 -04:00
|
|
|
|
if ((lineend == p || c_isspace (*(lineend - 1)))
|
2006-06-28 07:09:30 -04:00
|
|
|
|
&& *lineend == '#')
|
|
|
|
|
break;
|
2001-11-17 21:17:30 -05:00
|
|
|
|
|
|
|
|
|
/* Ignore trailing whitespace in the same way. */
|
2007-10-14 17:46:24 -04:00
|
|
|
|
while (lineend > p && c_isspace (*(lineend - 1)))
|
2006-06-28 07:09:30 -04:00
|
|
|
|
--lineend;
|
2001-11-17 21:17:30 -05:00
|
|
|
|
|
|
|
|
|
assert (!EOL (p));
|
|
|
|
|
|
|
|
|
|
field_b = p;
|
2007-10-14 17:46:24 -04:00
|
|
|
|
while (!EOL (p) && (c_isalnum (*p) || *p == '-'))
|
2006-06-28 07:09:30 -04:00
|
|
|
|
++p;
|
2001-11-17 21:17:30 -05:00
|
|
|
|
field_e = p;
|
|
|
|
|
|
|
|
|
|
SKIP_SPACE (p);
|
|
|
|
|
if (field_b == field_e || EOL (p) || *p != ':')
|
2006-06-28 07:09:30 -04:00
|
|
|
|
{
|
|
|
|
|
DEBUGP (("Ignoring malformed line %d", line_count));
|
|
|
|
|
goto next;
|
|
|
|
|
}
|
|
|
|
|
++p; /* skip ':' */
|
2001-11-17 21:17:30 -05:00
|
|
|
|
SKIP_SPACE (p);
|
|
|
|
|
|
|
|
|
|
value_b = p;
|
|
|
|
|
while (!EOL (p))
|
2006-06-28 07:09:30 -04:00
|
|
|
|
++p;
|
2001-11-17 21:17:30 -05:00
|
|
|
|
value_e = p;
|
|
|
|
|
|
|
|
|
|
/* Finally, we have a syntactically valid line. */
|
|
|
|
|
if (FIELD_IS ("user-agent"))
|
2006-06-28 07:09:30 -04:00
|
|
|
|
{
|
|
|
|
|
/* We have to support several cases:
|
|
|
|
|
|
|
|
|
|
--previous records--
|
|
|
|
|
|
|
|
|
|
User-Agent: foo
|
|
|
|
|
User-Agent: Wget
|
|
|
|
|
User-Agent: bar
|
|
|
|
|
... matching record ...
|
|
|
|
|
|
|
|
|
|
User-Agent: baz
|
|
|
|
|
User-Agent: qux
|
|
|
|
|
... non-matching record ...
|
|
|
|
|
|
|
|
|
|
User-Agent: *
|
|
|
|
|
... matching record, but will be pruned later ...
|
|
|
|
|
|
|
|
|
|
We have to respect `User-Agent' at the beginning of each
|
|
|
|
|
new record simply because we don't know if we're going to
|
|
|
|
|
encounter "Wget" among the agents or not. Hence,
|
|
|
|
|
match_user_agent is called when record_count != 0.
|
|
|
|
|
|
|
|
|
|
But if record_count is 0, we have to keep calling it
|
|
|
|
|
until it matches, and if that happens, we must not call
|
|
|
|
|
it any more, until the next record. Hence the other part
|
|
|
|
|
of the condition. */
|
|
|
|
|
if (record_count != 0 || user_agent_applies == false)
|
|
|
|
|
match_user_agent (value_b, value_e - value_b,
|
|
|
|
|
&user_agent_applies, &user_agent_exact);
|
|
|
|
|
if (user_agent_exact)
|
|
|
|
|
found_exact = true;
|
|
|
|
|
record_count = 0;
|
|
|
|
|
}
|
2001-11-17 21:17:30 -05:00
|
|
|
|
else if (FIELD_IS ("allow"))
|
2006-06-28 07:09:30 -04:00
|
|
|
|
{
|
|
|
|
|
if (user_agent_applies)
|
|
|
|
|
{
|
|
|
|
|
add_path (specs, value_b, value_e, true, user_agent_exact);
|
|
|
|
|
}
|
|
|
|
|
++record_count;
|
|
|
|
|
}
|
2001-11-17 21:17:30 -05:00
|
|
|
|
else if (FIELD_IS ("disallow"))
|
2006-06-28 07:09:30 -04:00
|
|
|
|
{
|
|
|
|
|
if (user_agent_applies)
|
|
|
|
|
{
|
|
|
|
|
bool allowed = false;
|
|
|
|
|
if (value_b == value_e)
|
|
|
|
|
/* Empty "disallow" line means everything is *allowed*! */
|
|
|
|
|
allowed = true;
|
|
|
|
|
add_path (specs, value_b, value_e, allowed, user_agent_exact);
|
|
|
|
|
}
|
|
|
|
|
++record_count;
|
|
|
|
|
}
|
2001-11-17 21:17:30 -05:00
|
|
|
|
else
|
2006-06-28 07:09:30 -04:00
|
|
|
|
{
|
|
|
|
|
DEBUGP (("Ignoring unknown field at line %d", line_count));
|
|
|
|
|
goto next;
|
|
|
|
|
}
|
2001-11-17 21:17:30 -05:00
|
|
|
|
|
|
|
|
|
next:
|
|
|
|
|
p = lineend_real;
|
|
|
|
|
++line_count;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (found_exact)
|
|
|
|
|
{
|
|
|
|
|
/* We've encountered an exactly matching user-agent. Throw out
|
2006-06-28 07:09:30 -04:00
|
|
|
|
all the stuff with user-agent: *. */
|
2001-11-17 21:17:30 -05:00
|
|
|
|
prune_non_exact (specs);
|
|
|
|
|
}
|
|
|
|
|
else if (specs->size > specs->count)
|
|
|
|
|
{
|
|
|
|
|
/* add_path normally over-allocates specs->paths. Reallocate it
|
2006-06-28 07:09:30 -04:00
|
|
|
|
to the correct size in order to conserve some memory. */
|
2001-11-17 21:17:30 -05:00
|
|
|
|
specs->paths = xrealloc (specs->paths,
|
2006-06-28 07:09:30 -04:00
|
|
|
|
specs->count * sizeof (struct path_info));
|
2001-11-17 21:17:30 -05:00
|
|
|
|
specs->size = specs->count;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return specs;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/* The same like res_parse, but first map the FILENAME into memory,
|
|
|
|
|
and then parse it. */
|
|
|
|
|
|
|
|
|
|
struct robot_specs *
|
|
|
|
|
res_parse_from_file (const char *filename)
|
|
|
|
|
{
|
|
|
|
|
struct robot_specs *specs;
|
|
|
|
|
struct file_memory *fm = read_file (filename);
|
|
|
|
|
if (!fm)
|
|
|
|
|
{
|
2005-06-15 16:26:37 -04:00
|
|
|
|
logprintf (LOG_NOTQUIET, _("Cannot open %s: %s"),
|
2006-06-28 07:09:30 -04:00
|
|
|
|
filename, strerror (errno));
|
2001-11-17 21:17:30 -05:00
|
|
|
|
return NULL;
|
|
|
|
|
}
|
|
|
|
|
specs = res_parse (fm->content, fm->length);
|
|
|
|
|
read_file_free (fm);
|
|
|
|
|
return specs;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static void
|
|
|
|
|
free_specs (struct robot_specs *specs)
|
|
|
|
|
{
|
2001-11-24 22:10:34 -05:00
|
|
|
|
int i;
|
|
|
|
|
for (i = 0; i < specs->count; i++)
|
|
|
|
|
xfree (specs->paths[i].path);
|
2003-11-02 14:56:37 -05:00
|
|
|
|
xfree_null (specs->paths);
|
2001-11-17 21:17:30 -05:00
|
|
|
|
xfree (specs);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/* Matching of a path according to the specs. */
|
|
|
|
|
|
|
|
|
|
/* If C is '%' and (ptr[1], ptr[2]) form a hexadecimal number, and if
|
|
|
|
|
that number is not a numerical representation of '/', decode C and
|
|
|
|
|
advance the pointer. */
|
|
|
|
|
|
2006-06-28 07:09:30 -04:00
|
|
|
|
#define DECODE_MAYBE(c, ptr) do { \
|
2007-10-14 17:46:24 -04:00
|
|
|
|
if (c == '%' && c_isxdigit (ptr[1]) && c_isxdigit (ptr[2])) \
|
2006-06-28 07:09:30 -04:00
|
|
|
|
{ \
|
|
|
|
|
char decoded = X2DIGITS_TO_NUM (ptr[1], ptr[2]); \
|
|
|
|
|
if (decoded != '/') \
|
|
|
|
|
{ \
|
|
|
|
|
c = decoded; \
|
|
|
|
|
ptr += 2; \
|
|
|
|
|
} \
|
|
|
|
|
} \
|
2001-11-17 21:17:30 -05:00
|
|
|
|
} while (0)
|
|
|
|
|
|
2005-06-22 15:38:10 -04:00
|
|
|
|
/* The inner matching engine: return true if RECORD_PATH matches
|
2001-11-17 21:17:30 -05:00
|
|
|
|
URL_PATH. The rules for matching are described at
|
2001-12-13 02:29:05 -05:00
|
|
|
|
<http://www.robotstxt.org/wc/norobots-rfc.txt>, section 3.2.2. */
|
2001-11-17 21:17:30 -05:00
|
|
|
|
|
2005-06-22 15:38:10 -04:00
|
|
|
|
static bool
|
2001-11-17 21:17:30 -05:00
|
|
|
|
matches (const char *record_path, const char *url_path)
|
|
|
|
|
{
|
|
|
|
|
const char *rp = record_path;
|
|
|
|
|
const char *up = url_path;
|
|
|
|
|
|
|
|
|
|
for (; ; ++rp, ++up)
|
|
|
|
|
{
|
|
|
|
|
char rc = *rp;
|
|
|
|
|
char uc = *up;
|
|
|
|
|
if (!rc)
|
2006-06-28 07:09:30 -04:00
|
|
|
|
return true;
|
2001-11-17 21:17:30 -05:00
|
|
|
|
if (!uc)
|
2006-06-28 07:09:30 -04:00
|
|
|
|
return false;
|
2001-11-17 21:17:30 -05:00
|
|
|
|
DECODE_MAYBE(rc, rp);
|
|
|
|
|
DECODE_MAYBE(uc, up);
|
|
|
|
|
if (rc != uc)
|
2006-06-28 07:09:30 -04:00
|
|
|
|
return false;
|
2001-11-17 21:17:30 -05:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/* Iterate through all paths in SPECS. For the first one that
|
|
|
|
|
matches, return its allow/reject status. If none matches,
|
|
|
|
|
retrieval is by default allowed. */
|
|
|
|
|
|
2005-06-22 15:38:10 -04:00
|
|
|
|
bool
|
2001-11-17 21:17:30 -05:00
|
|
|
|
res_match_path (const struct robot_specs *specs, const char *path)
|
|
|
|
|
{
|
|
|
|
|
int i;
|
|
|
|
|
if (!specs)
|
2005-06-22 15:38:10 -04:00
|
|
|
|
return true;
|
2001-11-17 21:17:30 -05:00
|
|
|
|
for (i = 0; i < specs->count; i++)
|
|
|
|
|
if (matches (specs->paths[i].path, path))
|
|
|
|
|
{
|
2006-06-28 07:09:30 -04:00
|
|
|
|
bool allowedp = specs->paths[i].allowedp;
|
2008-04-16 06:31:17 -04:00
|
|
|
|
DEBUGP (("%s path %s because of rule %s.\n",
|
2006-06-28 07:09:30 -04:00
|
|
|
|
allowedp ? "Allowing" : "Rejecting",
|
2008-04-16 06:31:17 -04:00
|
|
|
|
path, quote (specs->paths[i].path)));
|
2006-06-28 07:09:30 -04:00
|
|
|
|
return allowedp;
|
2001-11-17 21:17:30 -05:00
|
|
|
|
}
|
2005-06-22 15:38:10 -04:00
|
|
|
|
return true;
|
2001-11-17 21:17:30 -05:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/* Registering the specs. */
|
|
|
|
|
|
2001-12-08 20:24:41 -05:00
|
|
|
|
static struct hash_table *registered_specs;
|
2001-11-17 21:17:30 -05:00
|
|
|
|
|
|
|
|
|
/* Stolen from cookies.c. */
|
2006-06-28 07:09:30 -04:00
|
|
|
|
#define SET_HOSTPORT(host, port, result) do { \
|
|
|
|
|
int HP_len = strlen (host); \
|
|
|
|
|
result = alloca (HP_len + 1 + numdigit (port) + 1); \
|
|
|
|
|
memcpy (result, host, HP_len); \
|
|
|
|
|
result[HP_len] = ':'; \
|
|
|
|
|
number_to_string (result + HP_len + 1, port); \
|
2001-11-17 21:17:30 -05:00
|
|
|
|
} while (0)
|
|
|
|
|
|
|
|
|
|
/* Register RES specs that below to server on HOST:PORT. They will
|
|
|
|
|
later be retrievable using res_get_specs. */
|
|
|
|
|
|
|
|
|
|
void
|
|
|
|
|
res_register_specs (const char *host, int port, struct robot_specs *specs)
|
|
|
|
|
{
|
|
|
|
|
struct robot_specs *old;
|
|
|
|
|
char *hp, *hp_old;
|
|
|
|
|
SET_HOSTPORT (host, port, hp);
|
|
|
|
|
|
|
|
|
|
if (!registered_specs)
|
|
|
|
|
registered_specs = make_nocase_string_hash_table (0);
|
|
|
|
|
|
2005-05-10 15:07:57 -04:00
|
|
|
|
if (hash_table_get_pair (registered_specs, hp, &hp_old, &old))
|
2001-11-17 21:17:30 -05:00
|
|
|
|
{
|
|
|
|
|
if (old)
|
2006-06-28 07:09:30 -04:00
|
|
|
|
free_specs (old);
|
2001-11-17 21:17:30 -05:00
|
|
|
|
hash_table_put (registered_specs, hp_old, specs);
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
{
|
|
|
|
|
hash_table_put (registered_specs, xstrdup (hp), specs);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/* Get the specs that belong to HOST:PORT. */
|
|
|
|
|
|
|
|
|
|
struct robot_specs *
|
|
|
|
|
res_get_specs (const char *host, int port)
|
|
|
|
|
{
|
|
|
|
|
char *hp;
|
|
|
|
|
SET_HOSTPORT (host, port, hp);
|
|
|
|
|
if (!registered_specs)
|
|
|
|
|
return NULL;
|
|
|
|
|
return hash_table_get (registered_specs, hp);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/* Loading the robots file. */
|
|
|
|
|
|
|
|
|
|
#define RES_SPECS_LOCATION "/robots.txt"
|
|
|
|
|
|
|
|
|
|
/* Retrieve the robots.txt from the server root of the server that
|
|
|
|
|
serves URL. The file will be named according to the currently
|
|
|
|
|
active rules, and the file name will be returned in *file.
|
|
|
|
|
|
2005-06-22 15:38:10 -04:00
|
|
|
|
Return true if robots were retrieved OK, false otherwise. */
|
2001-11-17 21:17:30 -05:00
|
|
|
|
|
2005-06-22 15:38:10 -04:00
|
|
|
|
bool
|
2008-07-23 18:56:29 -04:00
|
|
|
|
res_retrieve_file (const char *url, char **file, struct iri *iri)
|
2001-11-17 21:17:30 -05:00
|
|
|
|
{
|
2008-07-23 18:56:29 -04:00
|
|
|
|
struct iri *i = iri_new ();
|
2001-11-17 21:17:30 -05:00
|
|
|
|
uerr_t err;
|
|
|
|
|
char *robots_url = uri_merge (url, RES_SPECS_LOCATION);
|
2006-08-24 11:27:57 -04:00
|
|
|
|
int saved_ts_val = opt.timestamping;
|
2009-02-01 13:03:51 -05:00
|
|
|
|
int saved_sp_val = opt.spider, url_err;
|
|
|
|
|
struct url * url_parsed;
|
2001-11-17 21:17:30 -05:00
|
|
|
|
|
2008-07-23 18:56:29 -04:00
|
|
|
|
/* Copy server URI encoding for a possible IDNA transformation, no need to
|
|
|
|
|
encode the full URI in UTF-8 because "robots.txt" is plain ASCII */
|
2008-07-30 04:15:55 -04:00
|
|
|
|
set_uri_encoding (i, iri->uri_encoding, false);
|
2008-07-23 18:56:29 -04:00
|
|
|
|
i->utf8_encode = false;
|
|
|
|
|
|
2001-11-17 21:17:30 -05:00
|
|
|
|
logputs (LOG_VERBOSE, _("Loading robots.txt; please ignore errors.\n"));
|
|
|
|
|
*file = NULL;
|
2006-08-24 11:27:57 -04:00
|
|
|
|
opt.timestamping = false;
|
|
|
|
|
opt.spider = false;
|
2009-02-01 13:03:51 -05:00
|
|
|
|
|
2009-06-25 04:14:11 -04:00
|
|
|
|
url_parsed = url_parse (robots_url, &url_err, iri, true);
|
2009-02-01 13:03:51 -05:00
|
|
|
|
if (!url_parsed)
|
|
|
|
|
{
|
|
|
|
|
char *error = url_error (robots_url, url_err);
|
|
|
|
|
logprintf (LOG_NOTQUIET, "%s: %s.\n", robots_url, error);
|
|
|
|
|
xfree (error);
|
|
|
|
|
err = URLERROR;
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
{
|
|
|
|
|
err = retrieve_url (url_parsed, robots_url, file, NULL, NULL, NULL,
|
2009-08-28 02:08:58 -04:00
|
|
|
|
false, i, false);
|
2009-02-01 13:03:51 -05:00
|
|
|
|
url_free(url_parsed);
|
|
|
|
|
}
|
|
|
|
|
|
2006-08-24 11:27:57 -04:00
|
|
|
|
opt.timestamping = saved_ts_val;
|
2008-07-23 18:56:29 -04:00
|
|
|
|
opt.spider = saved_sp_val;
|
2001-11-17 21:17:30 -05:00
|
|
|
|
xfree (robots_url);
|
2008-07-23 18:56:29 -04:00
|
|
|
|
iri_free (i);
|
2001-11-17 21:17:30 -05:00
|
|
|
|
|
|
|
|
|
if (err != RETROK && *file != NULL)
|
|
|
|
|
{
|
|
|
|
|
/* If the file is not retrieved correctly, but retrieve_url
|
2006-06-28 07:09:30 -04:00
|
|
|
|
allocated the file name, deallocate is here so that the
|
|
|
|
|
caller doesn't have to worry about it. */
|
2001-11-17 21:17:30 -05:00
|
|
|
|
xfree (*file);
|
|
|
|
|
*file = NULL;
|
|
|
|
|
}
|
|
|
|
|
return err == RETROK;
|
|
|
|
|
}
|
2001-11-24 22:10:34 -05:00
|
|
|
|
|
2006-06-28 07:09:30 -04:00
|
|
|
|
bool
|
|
|
|
|
is_robots_txt_url (const char *url)
|
|
|
|
|
{
|
|
|
|
|
char *robots_url = uri_merge (url, RES_SPECS_LOCATION);
|
|
|
|
|
bool ret = are_urls_equal (url, robots_url);
|
|
|
|
|
|
|
|
|
|
xfree (robots_url);
|
|
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
|
}
|
|
|
|
|
|
2001-11-24 22:10:34 -05:00
|
|
|
|
void
|
|
|
|
|
res_cleanup (void)
|
|
|
|
|
{
|
|
|
|
|
if (registered_specs)
|
|
|
|
|
{
|
2005-08-27 09:05:39 -04:00
|
|
|
|
hash_table_iterator iter;
|
|
|
|
|
for (hash_table_iterate (registered_specs, &iter);
|
2006-06-28 07:09:30 -04:00
|
|
|
|
hash_table_iter_next (&iter);
|
|
|
|
|
)
|
|
|
|
|
{
|
|
|
|
|
xfree (iter.key);
|
|
|
|
|
free_specs (iter.value);
|
|
|
|
|
}
|
2001-11-24 22:10:34 -05:00
|
|
|
|
hash_table_destroy (registered_specs);
|
|
|
|
|
registered_specs = NULL;
|
|
|
|
|
}
|
|
|
|
|
}
|
2006-06-28 07:09:30 -04:00
|
|
|
|
|
|
|
|
|
#ifdef TESTING
|
|
|
|
|
|
|
|
|
|
const char *
|
|
|
|
|
test_is_robots_txt_url()
|
|
|
|
|
{
|
|
|
|
|
int i;
|
|
|
|
|
struct {
|
|
|
|
|
char *url;
|
|
|
|
|
bool expected_result;
|
|
|
|
|
} test_array[] = {
|
|
|
|
|
{ "http://www.yoyodyne.com/robots.txt", true },
|
|
|
|
|
{ "http://www.yoyodyne.com/somepath/", false },
|
|
|
|
|
{ "http://www.yoyodyne.com/somepath/robots.txt", false },
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
for (i = 0; i < sizeof(test_array)/sizeof(test_array[0]); ++i)
|
|
|
|
|
{
|
|
|
|
|
mu_assert ("test_is_robots_txt_url: wrong result",
|
|
|
|
|
is_robots_txt_url (test_array[i].url) == test_array[i].expected_result);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return NULL;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#endif /* TESTING */
|
|
|
|
|
|
|
|
|
|
/*
|
|
|
|
|
* vim: et ts=2 sw=2
|
|
|
|
|
*/
|
|
|
|
|
|