1
0
mirror of https://github.com/moparisthebest/wget synced 2024-07-03 16:38:41 -04:00

[svn] Fix crash introduced by previous patch.

This commit is contained in:
hniksic 2001-12-18 14:20:14 -08:00
parent 40fd876c57
commit 2cf87bea8b
2 changed files with 6 additions and 6 deletions

View File

@ -1,3 +1,8 @@
2001-12-18 Hrvoje Niksic <hniksic@arsdigita.com>
* recur.c (retrieve_tree): Make a copy of file obtained from
dl_url_file_map because the code calls xfree(file) later.
2001-12-18 Hrvoje Niksic <hniksic@arsdigita.com> 2001-12-18 Hrvoje Niksic <hniksic@arsdigita.com>
* recur.c (register_html): Maintain a hash table of HTML files * recur.c (register_html): Maintain a hash table of HTML files

View File

@ -228,16 +228,11 @@ retrieve_tree (const char *start_url)
the second time. */ the second time. */
if (dl_url_file_map && hash_table_contains (dl_url_file_map, url)) if (dl_url_file_map && hash_table_contains (dl_url_file_map, url))
{ {
file = hash_table_get (dl_url_file_map, url); file = xstrdup (hash_table_get (dl_url_file_map, url));
DEBUGP (("Already downloaded \"%s\", reusing it from \"%s\".\n", DEBUGP (("Already downloaded \"%s\", reusing it from \"%s\".\n",
url, file)); url, file));
/* #### This check might be horribly slow when downloading
sites with a huge number of HTML docs. Use a hash table
instead! Thankfully, it gets tripped only when you use
`wget -r URL1 URL2 ...', as explained above. */
if (string_set_contains (downloaded_html_set, file)) if (string_set_contains (downloaded_html_set, file))
descend = 1; descend = 1;
} }