mirror of
https://github.com/moparisthebest/wget
synced 2024-07-03 16:38:41 -04:00
[svn] Gracefully handle opt.downloaded overflowing.
Published in <sxsd7gfnv17.fsf@florida.arsdigita.de>.
This commit is contained in:
parent
28293eafd0
commit
b7a8c6d3f5
@ -1,3 +1,34 @@
|
|||||||
|
2000-11-01 Hrvoje Niksic <hniksic@arsdigita.com>
|
||||||
|
|
||||||
|
* main.c (main): In case of opt.downloaded overflowing, print
|
||||||
|
<overflow> instead of a totally bogus random value.
|
||||||
|
|
||||||
|
* retr.c (retrieve_from_file): Ditto.
|
||||||
|
|
||||||
|
* recur.c (recursive_retrieve): Ditto.
|
||||||
|
|
||||||
|
* main.c (main): Ditto.
|
||||||
|
|
||||||
|
* http.c (http_loop): Ditto.
|
||||||
|
|
||||||
|
* ftp.c (ftp_loop_internal): Use downloaded_increase() instead of
|
||||||
|
`+=', and downloaded_exceeds_quota() instead of the simple-minded
|
||||||
|
check.
|
||||||
|
(ftp_retrieve_list): Ditto.
|
||||||
|
(ftp_retrieve_dirs): Ditto.
|
||||||
|
(ftp_retrieve_glob): Ditto.
|
||||||
|
|
||||||
|
* retr.c (downloaded_increase): New function. Notice overflows of
|
||||||
|
opt.downloaded.
|
||||||
|
(downloaded_exceeds_quota): Make sure that opt.downloaded is not
|
||||||
|
used if it overflowed.
|
||||||
|
|
||||||
|
* options.h (struct options): New member downloaded_overflow.
|
||||||
|
|
||||||
|
2000-11-01 Hrvoje Niksic <hniksic@arsdigita.com>
|
||||||
|
|
||||||
|
* wget.h (enum): Remove extra space after last enumeration.
|
||||||
|
|
||||||
2000-11-01 Hrvoje Niksic <hniksic@arsdigita.com>
|
2000-11-01 Hrvoje Niksic <hniksic@arsdigita.com>
|
||||||
|
|
||||||
* main.c (main): Use legible_very_long() for printing
|
* main.c (main): Use legible_very_long() for printing
|
||||||
|
10
src/ftp.c
10
src/ftp.c
@ -1006,7 +1006,7 @@ ftp_loop_internal (struct urlinfo *u, struct fileinfo *f, ccon *con)
|
|||||||
/* --dont-remove-listing was specified, so do count this towards the
|
/* --dont-remove-listing was specified, so do count this towards the
|
||||||
number of bytes and files downloaded. */
|
number of bytes and files downloaded. */
|
||||||
{
|
{
|
||||||
opt.downloaded += len;
|
downloaded_increase (len);
|
||||||
opt.numurls++;
|
opt.numurls++;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1021,7 +1021,7 @@ ftp_loop_internal (struct urlinfo *u, struct fileinfo *f, ccon *con)
|
|||||||
downloaded if they're going to be deleted. People seeding proxies,
|
downloaded if they're going to be deleted. People seeding proxies,
|
||||||
for instance, may want to know how many bytes and files they've
|
for instance, may want to know how many bytes and files they've
|
||||||
downloaded through it. */
|
downloaded through it. */
|
||||||
opt.downloaded += len;
|
downloaded_increase (len);
|
||||||
opt.numurls++;
|
opt.numurls++;
|
||||||
|
|
||||||
if (opt.delete_after)
|
if (opt.delete_after)
|
||||||
@ -1142,7 +1142,7 @@ ftp_retrieve_list (struct urlinfo *u, struct fileinfo *f, ccon *con)
|
|||||||
|
|
||||||
while (f)
|
while (f)
|
||||||
{
|
{
|
||||||
if (opt.quota && opt.downloaded > opt.quota)
|
if (downloaded_exceeds_quota ())
|
||||||
{
|
{
|
||||||
--depth;
|
--depth;
|
||||||
return QUOTEXC;
|
return QUOTEXC;
|
||||||
@ -1308,7 +1308,7 @@ ftp_retrieve_dirs (struct urlinfo *u, struct fileinfo *f, ccon *con)
|
|||||||
{
|
{
|
||||||
int len;
|
int len;
|
||||||
|
|
||||||
if (opt.quota && opt.downloaded > opt.quota)
|
if (downloaded_exceeds_quota ())
|
||||||
break;
|
break;
|
||||||
if (f->type != FT_DIRECTORY)
|
if (f->type != FT_DIRECTORY)
|
||||||
continue;
|
continue;
|
||||||
@ -1429,7 +1429,7 @@ ftp_retrieve_glob (struct urlinfo *u, ccon *con, int action)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
freefileinfo (start);
|
freefileinfo (start);
|
||||||
if (opt.quota && opt.downloaded > opt.quota)
|
if (downloaded_exceeds_quota ())
|
||||||
return QUOTEXC;
|
return QUOTEXC;
|
||||||
else
|
else
|
||||||
/* #### Should we return `res' here? */
|
/* #### Should we return `res' here? */
|
||||||
|
@ -1223,7 +1223,7 @@ The sizes do not match (local %ld) -- retrieving.\n"), local_size);
|
|||||||
tms, u->url, hstat.len, hstat.contlen, locf, count);
|
tms, u->url, hstat.len, hstat.contlen, locf, count);
|
||||||
}
|
}
|
||||||
++opt.numurls;
|
++opt.numurls;
|
||||||
opt.downloaded += hstat.len;
|
downloaded_increase (hstat.len);
|
||||||
|
|
||||||
/* Remember that we downloaded the file for later ".orig" code. */
|
/* Remember that we downloaded the file for later ".orig" code. */
|
||||||
if (*dt & ADDED_HTML_EXTENSION)
|
if (*dt & ADDED_HTML_EXTENSION)
|
||||||
@ -1249,7 +1249,7 @@ The sizes do not match (local %ld) -- retrieving.\n"), local_size);
|
|||||||
tms, u->url, hstat.len, locf, count);
|
tms, u->url, hstat.len, locf, count);
|
||||||
}
|
}
|
||||||
++opt.numurls;
|
++opt.numurls;
|
||||||
opt.downloaded += hstat.len;
|
downloaded_increase (hstat.len);
|
||||||
|
|
||||||
/* Remember that we downloaded the file for later ".orig" code. */
|
/* Remember that we downloaded the file for later ".orig" code. */
|
||||||
if (*dt & ADDED_HTML_EXTENSION)
|
if (*dt & ADDED_HTML_EXTENSION)
|
||||||
@ -1278,7 +1278,7 @@ The sizes do not match (local %ld) -- retrieving.\n"), local_size);
|
|||||||
"%s URL:%s [%ld/%ld] -> \"%s\" [%d]\n",
|
"%s URL:%s [%ld/%ld] -> \"%s\" [%d]\n",
|
||||||
tms, u->url, hstat.len, hstat.contlen, locf, count);
|
tms, u->url, hstat.len, hstat.contlen, locf, count);
|
||||||
++opt.numurls;
|
++opt.numurls;
|
||||||
opt.downloaded += hstat.len;
|
downloaded_increase (hstat.len);
|
||||||
|
|
||||||
/* Remember that we downloaded the file for later ".orig" code. */
|
/* Remember that we downloaded the file for later ".orig" code. */
|
||||||
if (*dt & ADDED_HTML_EXTENSION)
|
if (*dt & ADDED_HTML_EXTENSION)
|
||||||
|
@ -771,10 +771,12 @@ Can't timestamp and not clobber old files at the same time.\n"));
|
|||||||
{
|
{
|
||||||
logprintf (LOG_NOTQUIET,
|
logprintf (LOG_NOTQUIET,
|
||||||
_("\nFINISHED --%s--\nDownloaded: %s bytes in %d files\n"),
|
_("\nFINISHED --%s--\nDownloaded: %s bytes in %d files\n"),
|
||||||
time_str (NULL), legible_very_long (opt.downloaded),
|
time_str (NULL),
|
||||||
|
(opt.downloaded_overflow ?
|
||||||
|
"<overflow>" : legible_very_long (opt.downloaded)),
|
||||||
opt.numurls);
|
opt.numurls);
|
||||||
/* Print quota warning, if exceeded. */
|
/* Print quota warning, if exceeded. */
|
||||||
if (opt.quota && opt.downloaded > opt.quota)
|
if (downloaded_exceeds_quota ())
|
||||||
logprintf (LOG_NOTQUIET,
|
logprintf (LOG_NOTQUIET,
|
||||||
_("Download quota (%s bytes) EXCEEDED!\n"),
|
_("Download quota (%s bytes) EXCEEDED!\n"),
|
||||||
legible (opt.quota));
|
legible (opt.quota));
|
||||||
|
@ -105,6 +105,7 @@ struct options
|
|||||||
long quota; /* Maximum number of bytes to
|
long quota; /* Maximum number of bytes to
|
||||||
retrieve. */
|
retrieve. */
|
||||||
VERY_LONG_TYPE downloaded; /* How much we downloaded already. */
|
VERY_LONG_TYPE downloaded; /* How much we downloaded already. */
|
||||||
|
int downloaded_overflow; /* Whether the above overflowed. */
|
||||||
int numurls; /* Number of successfully downloaded
|
int numurls; /* Number of successfully downloaded
|
||||||
URLs */
|
URLs */
|
||||||
|
|
||||||
|
@ -127,7 +127,7 @@ recursive_retrieve (const char *file, const char *this_url)
|
|||||||
assert (this_url != NULL);
|
assert (this_url != NULL);
|
||||||
assert (file != NULL);
|
assert (file != NULL);
|
||||||
/* If quota was exceeded earlier, bail out. */
|
/* If quota was exceeded earlier, bail out. */
|
||||||
if (opt.quota && (opt.downloaded > opt.quota))
|
if (downloaded_exceeds_quota ())
|
||||||
return QUOTEXC;
|
return QUOTEXC;
|
||||||
/* Cache the current URL in the list. */
|
/* Cache the current URL in the list. */
|
||||||
if (first_time)
|
if (first_time)
|
||||||
@ -198,7 +198,7 @@ recursive_retrieve (const char *file, const char *this_url)
|
|||||||
for (cur_url = url_list; cur_url; cur_url = cur_url->next)
|
for (cur_url = url_list; cur_url; cur_url = cur_url->next)
|
||||||
{
|
{
|
||||||
/* If quota was exceeded earlier, bail out. */
|
/* If quota was exceeded earlier, bail out. */
|
||||||
if (opt.quota && (opt.downloaded > opt.quota))
|
if (downloaded_exceeds_quota ())
|
||||||
break;
|
break;
|
||||||
/* Parse the URL for convenient use in other functions, as well
|
/* Parse the URL for convenient use in other functions, as well
|
||||||
as to get the optimized form. It also checks URL integrity. */
|
as to get the optimized form. It also checks URL integrity. */
|
||||||
@ -496,7 +496,7 @@ recursive_retrieve (const char *file, const char *this_url)
|
|||||||
FREE_MAYBE (canon_this_url);
|
FREE_MAYBE (canon_this_url);
|
||||||
/* Decrement the recursion depth. */
|
/* Decrement the recursion depth. */
|
||||||
--depth;
|
--depth;
|
||||||
if (opt.quota && (opt.downloaded > opt.quota))
|
if (downloaded_exceeds_quota ())
|
||||||
return QUOTEXC;
|
return QUOTEXC;
|
||||||
else
|
else
|
||||||
return RETROK;
|
return RETROK;
|
||||||
|
36
src/retr.c
36
src/retr.c
@ -534,7 +534,7 @@ retrieve_from_file (const char *file, int html, int *count)
|
|||||||
char *filename, *new_file;
|
char *filename, *new_file;
|
||||||
int dt;
|
int dt;
|
||||||
|
|
||||||
if (opt.quota && opt.downloaded > opt.quota)
|
if (downloaded_exceeds_quota ())
|
||||||
{
|
{
|
||||||
status = QUOTEXC;
|
status = QUOTEXC;
|
||||||
break;
|
break;
|
||||||
@ -571,3 +571,37 @@ printwhat (int n1, int n2)
|
|||||||
{
|
{
|
||||||
logputs (LOG_VERBOSE, (n1 == n2) ? _("Giving up.\n\n") : _("Retrying.\n\n"));
|
logputs (LOG_VERBOSE, (n1 == n2) ? _("Giving up.\n\n") : _("Retrying.\n\n"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* Increment opt.downloaded by BY_HOW_MUCH. If an overflow occurs,
|
||||||
|
set opt.downloaded_overflow to 1. */
|
||||||
|
void
|
||||||
|
downloaded_increase (unsigned long by_how_much)
|
||||||
|
{
|
||||||
|
VERY_LONG_TYPE old;
|
||||||
|
if (opt.downloaded_overflow)
|
||||||
|
return;
|
||||||
|
old = opt.downloaded;
|
||||||
|
opt.downloaded += by_how_much;
|
||||||
|
if (opt.downloaded < old) /* carry flag, where are you when I
|
||||||
|
need you? */
|
||||||
|
{
|
||||||
|
/* Overflow. */
|
||||||
|
opt.downloaded_overflow = 1;
|
||||||
|
opt.downloaded = ~((VERY_LONG_TYPE)0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Return non-zero if the downloaded amount of bytes exceeds the
|
||||||
|
desired quota. If quota is not set or if the amount overflowed, 0
|
||||||
|
is returned. */
|
||||||
|
int
|
||||||
|
downloaded_exceeds_quota (void)
|
||||||
|
{
|
||||||
|
if (!opt.quota)
|
||||||
|
return 0;
|
||||||
|
if (opt.downloaded_overflow)
|
||||||
|
/* We don't really no. (Wildly) assume not. */
|
||||||
|
return 0;
|
||||||
|
|
||||||
|
return opt.downloaded > opt.quota;
|
||||||
|
}
|
||||||
|
@ -34,4 +34,7 @@ char *rate PARAMS ((long, long));
|
|||||||
|
|
||||||
void printwhat PARAMS ((int, int));
|
void printwhat PARAMS ((int, int));
|
||||||
|
|
||||||
|
void downloaded_increase PARAMS ((unsigned long));
|
||||||
|
int downloaded_exceeds_quota PARAMS ((void));
|
||||||
|
|
||||||
#endif /* RETR_H */
|
#endif /* RETR_H */
|
||||||
|
@ -210,7 +210,7 @@ enum
|
|||||||
HEAD_ONLY = 0x0004, /* only send the HEAD request */
|
HEAD_ONLY = 0x0004, /* only send the HEAD request */
|
||||||
SEND_NOCACHE = 0x0008, /* send Pragma: no-cache directive */
|
SEND_NOCACHE = 0x0008, /* send Pragma: no-cache directive */
|
||||||
ACCEPTRANGES = 0x0010, /* Accept-ranges header was found */
|
ACCEPTRANGES = 0x0010, /* Accept-ranges header was found */
|
||||||
ADDED_HTML_EXTENSION = 0x0020, /* added ".html" extension due to -E */
|
ADDED_HTML_EXTENSION = 0x0020 /* added ".html" extension due to -E */
|
||||||
};
|
};
|
||||||
|
|
||||||
/* Universal error type -- used almost everywhere.
|
/* Universal error type -- used almost everywhere.
|
||||||
|
Loading…
Reference in New Issue
Block a user