mirror of
https://github.com/moparisthebest/wget
synced 2024-07-03 16:38:41 -04:00
[svn] Fix crash introduced by previous patch.
This commit is contained in:
parent
40fd876c57
commit
2cf87bea8b
@ -1,3 +1,8 @@
|
||||
2001-12-18 Hrvoje Niksic <hniksic@arsdigita.com>
|
||||
|
||||
* recur.c (retrieve_tree): Make a copy of file obtained from
|
||||
dl_url_file_map because the code calls xfree(file) later.
|
||||
|
||||
2001-12-18 Hrvoje Niksic <hniksic@arsdigita.com>
|
||||
|
||||
* recur.c (register_html): Maintain a hash table of HTML files
|
||||
|
@ -228,16 +228,11 @@ retrieve_tree (const char *start_url)
|
||||
the second time. */
|
||||
if (dl_url_file_map && hash_table_contains (dl_url_file_map, url))
|
||||
{
|
||||
file = hash_table_get (dl_url_file_map, url);
|
||||
file = xstrdup (hash_table_get (dl_url_file_map, url));
|
||||
|
||||
DEBUGP (("Already downloaded \"%s\", reusing it from \"%s\".\n",
|
||||
url, file));
|
||||
|
||||
/* #### This check might be horribly slow when downloading
|
||||
sites with a huge number of HTML docs. Use a hash table
|
||||
instead! Thankfully, it gets tripped only when you use
|
||||
`wget -r URL1 URL2 ...', as explained above. */
|
||||
|
||||
if (string_set_contains (downloaded_html_set, file))
|
||||
descend = 1;
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user