mirror of
https://github.com/moparisthebest/wget
synced 2024-07-03 16:38:41 -04:00
[svn] Handle starting URL of recursing download being non-parsable.
Published in <sxszo26t33k.fsf@florida.arsdigita.de>.
This commit is contained in:
parent
562ab4ae8a
commit
1fa3b90235
@ -1,3 +1,8 @@
|
||||
2002-02-19 Hrvoje Niksic <hniksic@arsdigita.com>
|
||||
|
||||
* recur.c (retrieve_tree): Handle the case when start_url doesn't
|
||||
parse.
|
||||
|
||||
2002-02-19 Andreas Damm <andreas-sourceforge@radab.org>
|
||||
|
||||
* wget.h (DO_REALLOC_FROM_ALLOCA): Multiply with sizeof(type) when
|
||||
|
19
src/recur.c
19
src/recur.c
@ -186,15 +186,24 @@ retrieve_tree (const char *start_url)
|
||||
uerr_t status = RETROK;
|
||||
|
||||
/* The queue of URLs we need to load. */
|
||||
struct url_queue *queue = url_queue_new ();
|
||||
struct url_queue *queue;
|
||||
|
||||
/* The URLs we do not wish to enqueue, because they are already in
|
||||
the queue, but haven't been downloaded yet. */
|
||||
struct hash_table *blacklist = make_string_hash_table (0);
|
||||
struct hash_table *blacklist;
|
||||
|
||||
/* We'll need various components of this, so better get it over with
|
||||
now. */
|
||||
struct url *start_url_parsed = url_parse (start_url, NULL);
|
||||
int up_error_code;
|
||||
struct url *start_url_parsed = url_parse (start_url, &up_error_code);
|
||||
|
||||
if (!start_url_parsed)
|
||||
{
|
||||
logprintf (LOG_NOTQUIET, "%s: %s.\n", start_url,
|
||||
url_error (up_error_code));
|
||||
return URLERROR;
|
||||
}
|
||||
|
||||
queue = url_queue_new ();
|
||||
blacklist = make_string_hash_table (0);
|
||||
|
||||
/* Enqueue the starting URL. Use start_url_parsed->url rather than
|
||||
just URL so we enqueue the canonical form of the URL. */
|
||||
|
Loading…
Reference in New Issue
Block a user