mirror of
https://github.com/moparisthebest/curl
synced 2024-12-21 23:58:49 -05:00
replaced tabs with spaces
This commit is contained in:
parent
f442dd6496
commit
46b112bcd4
@ -503,7 +503,7 @@ main ()
|
||||
|
||||
/* IoctlSocket source code */
|
||||
if(0 != IoctlSocket(0, 0, 0))
|
||||
return 1;
|
||||
return 1;
|
||||
;
|
||||
return 0;
|
||||
}
|
||||
|
@ -6,12 +6,12 @@
|
||||
# CARES_FOUND, If false, do not try to use c-ares.
|
||||
# also defined, but not for general use are
|
||||
# CARES_LIBRARY, where to find the c-ares library.
|
||||
|
||||
|
||||
FIND_PATH(CARES_INCLUDE_DIR ares.h
|
||||
/usr/local/include
|
||||
/usr/include
|
||||
)
|
||||
|
||||
|
||||
SET(CARES_NAMES ${CARES_NAMES} cares)
|
||||
FIND_LIBRARY(CARES_LIBRARY
|
||||
NAMES ${CARES_NAMES}
|
||||
|
12
docs/INSTALL
12
docs/INSTALL
@ -672,8 +672,8 @@ eCos
|
||||
of running curl in this way is the contents of the configuration file
|
||||
printed to the console.
|
||||
|
||||
--- src/main.c 19 Jul 2006 19:09:56 -0000 1.363
|
||||
+++ src/main.c 24 Jul 2006 21:37:23 -0000
|
||||
--- src/main.c 19 Jul 2006 19:09:56 -0000 1.363
|
||||
+++ src/main.c 24 Jul 2006 21:37:23 -0000
|
||||
@@ -4286,11 +4286,31 @@
|
||||
}
|
||||
|
||||
@ -808,10 +808,10 @@ CROSS COMPILE
|
||||
export NM=ppc_405-nm
|
||||
|
||||
./configure --target=powerpc-hardhat-linux \
|
||||
--host=powerpc-hardhat-linux \
|
||||
--build=i586-pc-linux-gnu \
|
||||
--prefix=/opt/hardhat/devkit/ppc/405/target/usr/local \
|
||||
--exec-prefix=/usr/local
|
||||
--host=powerpc-hardhat-linux \
|
||||
--build=i586-pc-linux-gnu \
|
||||
--prefix=/opt/hardhat/devkit/ppc/405/target/usr/local \
|
||||
--exec-prefix=/usr/local
|
||||
|
||||
(end script)
|
||||
|
||||
|
@ -46,7 +46,7 @@ SIMPLE USAGE
|
||||
Get a file from an SSH server using SCP using a private key to authenticate:
|
||||
|
||||
curl -u username: --key ~/.ssh/id_dsa --pubkey ~/.ssh/id_dsa.pub \
|
||||
scp://shell.example.com/~/personal.txt
|
||||
scp://shell.example.com/~/personal.txt
|
||||
|
||||
Get the main page from an IPv6 web server:
|
||||
|
||||
|
@ -136,7 +136,7 @@ int main(int argc, char **argv)
|
||||
/* and give the size of the upload, this supports large file sizes
|
||||
on systems that have general support for it */
|
||||
curl_easy_setopt(curl, CURLOPT_INFILESIZE_LARGE,
|
||||
(curl_off_t)file_info.st_size);
|
||||
(curl_off_t)file_info.st_size);
|
||||
|
||||
/* tell libcurl we can use "any" auth, which lets the lib pick one, but it
|
||||
also costs one extra round-trip and possibly sending of all the PUT
|
||||
|
@ -34,126 +34,126 @@ int __cdecl _snscanf(const char * input, size_t length, const char * format, ...
|
||||
|
||||
/* parse headers for Content-Length */
|
||||
size_t getcontentlengthfunc(void *ptr, size_t size, size_t nmemb, void *stream) {
|
||||
int r;
|
||||
long len = 0;
|
||||
int r;
|
||||
long len = 0;
|
||||
|
||||
/* _snscanf() is Win32 specific */
|
||||
r = _snscanf(ptr, size * nmemb, "Content-Length: %ld\n", &len);
|
||||
/* _snscanf() is Win32 specific */
|
||||
r = _snscanf(ptr, size * nmemb, "Content-Length: %ld\n", &len);
|
||||
|
||||
if (r) /* Microsoft: we don't read the specs */
|
||||
*((long *) stream) = len;
|
||||
if (r) /* Microsoft: we don't read the specs */
|
||||
*((long *) stream) = len;
|
||||
|
||||
return size * nmemb;
|
||||
return size * nmemb;
|
||||
}
|
||||
|
||||
/* discard downloaded data */
|
||||
size_t discardfunc(void *ptr, size_t size, size_t nmemb, void *stream) {
|
||||
return size * nmemb;
|
||||
return size * nmemb;
|
||||
}
|
||||
|
||||
/* read data to upload */
|
||||
size_t readfunc(void *ptr, size_t size, size_t nmemb, void *stream)
|
||||
{
|
||||
FILE *f = stream;
|
||||
size_t n;
|
||||
FILE *f = stream;
|
||||
size_t n;
|
||||
|
||||
if (ferror(f))
|
||||
return CURL_READFUNC_ABORT;
|
||||
if (ferror(f))
|
||||
return CURL_READFUNC_ABORT;
|
||||
|
||||
n = fread(ptr, size, nmemb, f) * size;
|
||||
n = fread(ptr, size, nmemb, f) * size;
|
||||
|
||||
return n;
|
||||
return n;
|
||||
}
|
||||
|
||||
|
||||
int upload(CURL *curlhandle, const char * remotepath, const char * localpath,
|
||||
long timeout, long tries)
|
||||
{
|
||||
FILE *f;
|
||||
long uploaded_len = 0;
|
||||
CURLcode r = CURLE_GOT_NOTHING;
|
||||
int c;
|
||||
FILE *f;
|
||||
long uploaded_len = 0;
|
||||
CURLcode r = CURLE_GOT_NOTHING;
|
||||
int c;
|
||||
|
||||
f = fopen(localpath, "rb");
|
||||
if (f == NULL) {
|
||||
perror(NULL);
|
||||
return 0;
|
||||
}
|
||||
f = fopen(localpath, "rb");
|
||||
if (f == NULL) {
|
||||
perror(NULL);
|
||||
return 0;
|
||||
}
|
||||
|
||||
curl_easy_setopt(curlhandle, CURLOPT_UPLOAD, 1L);
|
||||
curl_easy_setopt(curlhandle, CURLOPT_UPLOAD, 1L);
|
||||
|
||||
curl_easy_setopt(curlhandle, CURLOPT_URL, remotepath);
|
||||
curl_easy_setopt(curlhandle, CURLOPT_URL, remotepath);
|
||||
|
||||
if (timeout)
|
||||
curl_easy_setopt(curlhandle, CURLOPT_FTP_RESPONSE_TIMEOUT, timeout);
|
||||
if (timeout)
|
||||
curl_easy_setopt(curlhandle, CURLOPT_FTP_RESPONSE_TIMEOUT, timeout);
|
||||
|
||||
curl_easy_setopt(curlhandle, CURLOPT_HEADERFUNCTION, getcontentlengthfunc);
|
||||
curl_easy_setopt(curlhandle, CURLOPT_HEADERDATA, &uploaded_len);
|
||||
curl_easy_setopt(curlhandle, CURLOPT_HEADERFUNCTION, getcontentlengthfunc);
|
||||
curl_easy_setopt(curlhandle, CURLOPT_HEADERDATA, &uploaded_len);
|
||||
|
||||
curl_easy_setopt(curlhandle, CURLOPT_WRITEFUNCTION, discardfunc);
|
||||
curl_easy_setopt(curlhandle, CURLOPT_WRITEFUNCTION, discardfunc);
|
||||
|
||||
curl_easy_setopt(curlhandle, CURLOPT_READFUNCTION, readfunc);
|
||||
curl_easy_setopt(curlhandle, CURLOPT_READDATA, f);
|
||||
curl_easy_setopt(curlhandle, CURLOPT_READFUNCTION, readfunc);
|
||||
curl_easy_setopt(curlhandle, CURLOPT_READDATA, f);
|
||||
|
||||
curl_easy_setopt(curlhandle, CURLOPT_FTPPORT, "-"); /* disable passive mode */
|
||||
curl_easy_setopt(curlhandle, CURLOPT_FTP_CREATE_MISSING_DIRS, 1L);
|
||||
curl_easy_setopt(curlhandle, CURLOPT_FTPPORT, "-"); /* disable passive mode */
|
||||
curl_easy_setopt(curlhandle, CURLOPT_FTP_CREATE_MISSING_DIRS, 1L);
|
||||
|
||||
curl_easy_setopt(curlhandle, CURLOPT_VERBOSE, 1L);
|
||||
curl_easy_setopt(curlhandle, CURLOPT_VERBOSE, 1L);
|
||||
|
||||
for (c = 0; (r != CURLE_OK) && (c < tries); c++) {
|
||||
/* are we resuming? */
|
||||
if (c) { /* yes */
|
||||
/* determine the length of the file already written */
|
||||
for (c = 0; (r != CURLE_OK) && (c < tries); c++) {
|
||||
/* are we resuming? */
|
||||
if (c) { /* yes */
|
||||
/* determine the length of the file already written */
|
||||
|
||||
/*
|
||||
* With NOBODY and NOHEADER, libcurl will issue a SIZE
|
||||
* command, but the only way to retrieve the result is
|
||||
* to parse the returned Content-Length header. Thus,
|
||||
* getcontentlengthfunc(). We need discardfunc() above
|
||||
* because HEADER will dump the headers to stdout
|
||||
* without it.
|
||||
*/
|
||||
curl_easy_setopt(curlhandle, CURLOPT_NOBODY, 1L);
|
||||
curl_easy_setopt(curlhandle, CURLOPT_HEADER, 1L);
|
||||
/*
|
||||
* With NOBODY and NOHEADER, libcurl will issue a SIZE
|
||||
* command, but the only way to retrieve the result is
|
||||
* to parse the returned Content-Length header. Thus,
|
||||
* getcontentlengthfunc(). We need discardfunc() above
|
||||
* because HEADER will dump the headers to stdout
|
||||
* without it.
|
||||
*/
|
||||
curl_easy_setopt(curlhandle, CURLOPT_NOBODY, 1L);
|
||||
curl_easy_setopt(curlhandle, CURLOPT_HEADER, 1L);
|
||||
|
||||
r = curl_easy_perform(curlhandle);
|
||||
if (r != CURLE_OK)
|
||||
continue;
|
||||
r = curl_easy_perform(curlhandle);
|
||||
if (r != CURLE_OK)
|
||||
continue;
|
||||
|
||||
curl_easy_setopt(curlhandle, CURLOPT_NOBODY, 0L);
|
||||
curl_easy_setopt(curlhandle, CURLOPT_HEADER, 0L);
|
||||
curl_easy_setopt(curlhandle, CURLOPT_NOBODY, 0L);
|
||||
curl_easy_setopt(curlhandle, CURLOPT_HEADER, 0L);
|
||||
|
||||
fseek(f, uploaded_len, SEEK_SET);
|
||||
fseek(f, uploaded_len, SEEK_SET);
|
||||
|
||||
curl_easy_setopt(curlhandle, CURLOPT_APPEND, 1L);
|
||||
}
|
||||
else { /* no */
|
||||
curl_easy_setopt(curlhandle, CURLOPT_APPEND, 0L);
|
||||
}
|
||||
curl_easy_setopt(curlhandle, CURLOPT_APPEND, 1L);
|
||||
}
|
||||
else { /* no */
|
||||
curl_easy_setopt(curlhandle, CURLOPT_APPEND, 0L);
|
||||
}
|
||||
|
||||
r = curl_easy_perform(curlhandle);
|
||||
}
|
||||
r = curl_easy_perform(curlhandle);
|
||||
}
|
||||
|
||||
fclose(f);
|
||||
fclose(f);
|
||||
|
||||
if (r == CURLE_OK)
|
||||
return 1;
|
||||
else {
|
||||
fprintf(stderr, "%s\n", curl_easy_strerror(r));
|
||||
return 0;
|
||||
}
|
||||
if (r == CURLE_OK)
|
||||
return 1;
|
||||
else {
|
||||
fprintf(stderr, "%s\n", curl_easy_strerror(r));
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
int main(int c, char **argv) {
|
||||
CURL *curlhandle = NULL;
|
||||
CURL *curlhandle = NULL;
|
||||
|
||||
curl_global_init(CURL_GLOBAL_ALL);
|
||||
curlhandle = curl_easy_init();
|
||||
curl_global_init(CURL_GLOBAL_ALL);
|
||||
curlhandle = curl_easy_init();
|
||||
|
||||
upload(curlhandle, "ftp://user:pass@host/path/file", "C:\\file", 0, 3);
|
||||
upload(curlhandle, "ftp://user:pass@host/path/file", "C:\\file", 0, 3);
|
||||
|
||||
curl_easy_cleanup(curlhandle);
|
||||
curl_global_cleanup();
|
||||
curl_easy_cleanup(curlhandle);
|
||||
curl_global_cleanup();
|
||||
|
||||
return 0;
|
||||
return 0;
|
||||
}
|
||||
|
@ -165,8 +165,8 @@ to a CURL_FORMADD_* constant defined in
|
||||
|
||||
/* Add ptrname/ptrcontent section */
|
||||
curl_formadd(&post, &last, CURLFORM_PTRNAME, namebuffer,
|
||||
CURLFORM_PTRCONTENTS, buffer, CURLFORM_NAMELENGTH,
|
||||
namelength, CURLFORM_END);
|
||||
CURLFORM_PTRCONTENTS, buffer, CURLFORM_NAMELENGTH,
|
||||
namelength, CURLFORM_END);
|
||||
|
||||
/* Add name/ptrcontent/contenttype section */
|
||||
curl_formadd(&post, &last, CURLFORM_COPYNAME, "html_code_with_hole",
|
||||
|
@ -78,30 +78,30 @@ AC_DEFUN([LIBCURL_CHECK_CONFIG],
|
||||
AC_PATH_PROG([_libcurl_config],[curl-config],["$withval/bin"],
|
||||
["$withval/bin"])
|
||||
else
|
||||
AC_PATH_PROG([_libcurl_config],[curl-config])
|
||||
AC_PATH_PROG([_libcurl_config],[curl-config])
|
||||
fi
|
||||
|
||||
if test x$_libcurl_config != "x" ; then
|
||||
AC_CACHE_CHECK([for the version of libcurl],
|
||||
[libcurl_cv_lib_curl_version],
|
||||
[libcurl_cv_lib_curl_version],
|
||||
[libcurl_cv_lib_curl_version=`$_libcurl_config --version | $AWK '{print $[]2}'`])
|
||||
|
||||
_libcurl_version=`echo $libcurl_cv_lib_curl_version | $_libcurl_version_parse`
|
||||
_libcurl_wanted=`echo ifelse([$2],,[0],[$2]) | $_libcurl_version_parse`
|
||||
_libcurl_version=`echo $libcurl_cv_lib_curl_version | $_libcurl_version_parse`
|
||||
_libcurl_wanted=`echo ifelse([$2],,[0],[$2]) | $_libcurl_version_parse`
|
||||
|
||||
if test $_libcurl_wanted -gt 0 ; then
|
||||
AC_CACHE_CHECK([for libcurl >= version $2],
|
||||
[libcurl_cv_lib_version_ok],
|
||||
AC_CACHE_CHECK([for libcurl >= version $2],
|
||||
[libcurl_cv_lib_version_ok],
|
||||
[
|
||||
if test $_libcurl_version -ge $_libcurl_wanted ; then
|
||||
libcurl_cv_lib_version_ok=yes
|
||||
else
|
||||
libcurl_cv_lib_version_ok=no
|
||||
fi
|
||||
if test $_libcurl_version -ge $_libcurl_wanted ; then
|
||||
libcurl_cv_lib_version_ok=yes
|
||||
else
|
||||
libcurl_cv_lib_version_ok=no
|
||||
fi
|
||||
])
|
||||
fi
|
||||
|
||||
if test $_libcurl_wanted -eq 0 || test x$libcurl_cv_lib_version_ok = xyes ; then
|
||||
if test $_libcurl_wanted -eq 0 || test x$libcurl_cv_lib_version_ok = xyes ; then
|
||||
if test x"$LIBCURL_CPPFLAGS" = "x" ; then
|
||||
LIBCURL_CPPFLAGS=`$_libcurl_config --cflags`
|
||||
fi
|
||||
@ -109,8 +109,8 @@ AC_DEFUN([LIBCURL_CHECK_CONFIG],
|
||||
LIBCURL=`$_libcurl_config --libs`
|
||||
|
||||
# This is so silly, but Apple actually has a bug in their
|
||||
# curl-config script. Fixed in Tiger, but there are still
|
||||
# lots of Panther installs around.
|
||||
# curl-config script. Fixed in Tiger, but there are still
|
||||
# lots of Panther installs around.
|
||||
case "${host}" in
|
||||
powerpc-apple-darwin7*)
|
||||
LIBCURL=`echo $LIBCURL | sed -e 's|-arch i386||g'`
|
||||
@ -118,18 +118,18 @@ AC_DEFUN([LIBCURL_CHECK_CONFIG],
|
||||
esac
|
||||
fi
|
||||
|
||||
# All curl-config scripts support --feature
|
||||
_libcurl_features=`$_libcurl_config --feature`
|
||||
# All curl-config scripts support --feature
|
||||
_libcurl_features=`$_libcurl_config --feature`
|
||||
|
||||
# Is it modern enough to have --protocols? (7.12.4)
|
||||
if test $_libcurl_version -ge 461828 ; then
|
||||
if test $_libcurl_version -ge 461828 ; then
|
||||
_libcurl_protocols=`$_libcurl_config --protocols`
|
||||
fi
|
||||
else
|
||||
else
|
||||
_libcurl_try_link=no
|
||||
fi
|
||||
fi
|
||||
|
||||
unset _libcurl_wanted
|
||||
unset _libcurl_wanted
|
||||
fi
|
||||
|
||||
if test $_libcurl_try_link = yes ; then
|
||||
@ -167,8 +167,8 @@ x=CURLOPT_VERBOSE;
|
||||
|
||||
if test $libcurl_cv_lib_curl_usable = yes ; then
|
||||
|
||||
# Does curl_free() exist in this version of libcurl?
|
||||
# If not, fake it with free()
|
||||
# Does curl_free() exist in this version of libcurl?
|
||||
# If not, fake it with free()
|
||||
|
||||
_libcurl_save_cppflags=$CPPFLAGS
|
||||
CPPFLAGS="$CPPFLAGS $LIBCURL_CPPFLAGS"
|
||||
@ -176,8 +176,8 @@ x=CURLOPT_VERBOSE;
|
||||
LIBS="$LIBS $LIBCURL"
|
||||
|
||||
AC_CHECK_FUNC(curl_free,,
|
||||
AC_DEFINE(curl_free,free,
|
||||
[Define curl_free() as free() if our version of curl lacks curl_free.]))
|
||||
AC_DEFINE(curl_free,free,
|
||||
[Define curl_free() as free() if our version of curl lacks curl_free.]))
|
||||
|
||||
CPPFLAGS=$_libcurl_save_cppflags
|
||||
LIBS=$_libcurl_save_libs
|
||||
@ -190,40 +190,40 @@ x=CURLOPT_VERBOSE;
|
||||
AC_SUBST(LIBCURL)
|
||||
|
||||
for _libcurl_feature in $_libcurl_features ; do
|
||||
AC_DEFINE_UNQUOTED(AS_TR_CPP(libcurl_feature_$_libcurl_feature),[1])
|
||||
eval AS_TR_SH(libcurl_feature_$_libcurl_feature)=yes
|
||||
AC_DEFINE_UNQUOTED(AS_TR_CPP(libcurl_feature_$_libcurl_feature),[1])
|
||||
eval AS_TR_SH(libcurl_feature_$_libcurl_feature)=yes
|
||||
done
|
||||
|
||||
if test "x$_libcurl_protocols" = "x" ; then
|
||||
if test "x$_libcurl_protocols" = "x" ; then
|
||||
|
||||
# We don't have --protocols, so just assume that all
|
||||
# protocols are available
|
||||
_libcurl_protocols="HTTP FTP FILE TELNET LDAP DICT TFTP"
|
||||
# We don't have --protocols, so just assume that all
|
||||
# protocols are available
|
||||
_libcurl_protocols="HTTP FTP FILE TELNET LDAP DICT TFTP"
|
||||
|
||||
if test x$libcurl_feature_SSL = xyes ; then
|
||||
_libcurl_protocols="$_libcurl_protocols HTTPS"
|
||||
if test x$libcurl_feature_SSL = xyes ; then
|
||||
_libcurl_protocols="$_libcurl_protocols HTTPS"
|
||||
|
||||
# FTPS wasn't standards-compliant until version
|
||||
# 7.11.0 (0x070b00 == 461568)
|
||||
if test $_libcurl_version -ge 461568; then
|
||||
_libcurl_protocols="$_libcurl_protocols FTPS"
|
||||
fi
|
||||
fi
|
||||
# FTPS wasn't standards-compliant until version
|
||||
# 7.11.0 (0x070b00 == 461568)
|
||||
if test $_libcurl_version -ge 461568; then
|
||||
_libcurl_protocols="$_libcurl_protocols FTPS"
|
||||
fi
|
||||
fi
|
||||
|
||||
# RTSP, IMAP, POP3 and SMTP were added in
|
||||
# RTSP, IMAP, POP3 and SMTP were added in
|
||||
# 7.20.0 (0x071400 == 463872)
|
||||
if test $_libcurl_version -ge 463872; then
|
||||
_libcurl_protocols="$_libcurl_protocols RTSP IMAP POP3 SMTP"
|
||||
fi
|
||||
fi
|
||||
if test $_libcurl_version -ge 463872; then
|
||||
_libcurl_protocols="$_libcurl_protocols RTSP IMAP POP3 SMTP"
|
||||
fi
|
||||
fi
|
||||
|
||||
for _libcurl_protocol in $_libcurl_protocols ; do
|
||||
AC_DEFINE_UNQUOTED(AS_TR_CPP(libcurl_protocol_$_libcurl_protocol),[1])
|
||||
eval AS_TR_SH(libcurl_protocol_$_libcurl_protocol)=yes
|
||||
for _libcurl_protocol in $_libcurl_protocols ; do
|
||||
AC_DEFINE_UNQUOTED(AS_TR_CPP(libcurl_protocol_$_libcurl_protocol),[1])
|
||||
eval AS_TR_SH(libcurl_protocol_$_libcurl_protocol)=yes
|
||||
done
|
||||
else
|
||||
unset LIBCURL
|
||||
unset LIBCURL_CPPFLAGS
|
||||
else
|
||||
unset LIBCURL
|
||||
unset LIBCURL_CPPFLAGS
|
||||
fi
|
||||
fi
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
|
||||
Content Encoding Support for libcurl
|
||||
Content Encoding Support for libcurl
|
||||
|
||||
* About content encodings:
|
||||
|
||||
|
@ -110,7 +110,7 @@ options:
|
||||
CURLOPT_RTSP_STREAM_URI
|
||||
CURLOPT_RTSP_TRANSPORT
|
||||
CURLOPT_SOCKS5_GSSAPI_SERVICE
|
||||
CURLOPT_MAIL_FROM
|
||||
CURLOPT_MAIL_FROM
|
||||
Else it is the same as for curl_easy_setopt().
|
||||
Note that CURLOPT_ERRORBUFFER is not in the list above, since it gives the
|
||||
address of an (empty) character buffer, not the address of a string.
|
||||
|
@ -12,7 +12,7 @@ line or use the curl interface.
|
||||
|
||||
The latest release of Curl_easy, a Perl interface to curl is available from
|
||||
|
||||
http://curl.haxx.se/libcurl/perl/
|
||||
http://curl.haxx.se/libcurl/perl/
|
||||
|
||||
(Georg Horn's original version of Curl_easy, supporting curl versions
|
||||
before 7.7 is still available from: http://www.koblenz-net.de/~horn/export/ )
|
||||
|
@ -88,32 +88,32 @@ sub SplitURL {
|
||||
my $inurl = $_[0];
|
||||
|
||||
if($inurl=~ /^([^:]+):\/\/([^\/]*)\/(.*)\/(.*)/ ) {
|
||||
$getprotocol = $1;
|
||||
$getserver = $2;
|
||||
$getpath = $3;
|
||||
$getdocument = $4;
|
||||
$getprotocol = $1;
|
||||
$getserver = $2;
|
||||
$getpath = $3;
|
||||
$getdocument = $4;
|
||||
}
|
||||
elsif ($inurl=~ /^([^:]+):\/\/([^\/]*)\/(.*)/ ) {
|
||||
$getprotocol = $1;
|
||||
$getserver = $2;
|
||||
$getpath = $3;
|
||||
$getdocument = "";
|
||||
|
||||
if($getpath !~ /\//) {
|
||||
$getpath ="";
|
||||
$getdocument = $3;
|
||||
}
|
||||
$getprotocol = $1;
|
||||
$getserver = $2;
|
||||
$getpath = $3;
|
||||
$getdocument = "";
|
||||
|
||||
if($getpath !~ /\//) {
|
||||
$getpath ="";
|
||||
$getdocument = $3;
|
||||
}
|
||||
|
||||
}
|
||||
elsif ($inurl=~ /^([^:]+):\/\/(.*)/ ) {
|
||||
$getprotocol = $1;
|
||||
$getserver = $2;
|
||||
$getpath = "";
|
||||
$getdocument = "";
|
||||
$getprotocol = $1;
|
||||
$getserver = $2;
|
||||
$getpath = "";
|
||||
$getdocument = "";
|
||||
}
|
||||
else {
|
||||
print "Couldn't parse the specified URL, retry please!\n";
|
||||
exit;
|
||||
print "Couldn't parse the specified URL, retry please!\n";
|
||||
exit;
|
||||
}
|
||||
}
|
||||
|
||||
@ -127,52 +127,52 @@ sub SplitURL {
|
||||
|
||||
if(!$usestdin) {
|
||||
open(HEADGET, "$linkcheck $geturl|") ||
|
||||
die "Couldn't get web page for some reason";
|
||||
die "Couldn't get web page for some reason";
|
||||
headget:
|
||||
while(<HEADGET>) {
|
||||
# print $_;
|
||||
if($_ =~ /HTTP\/.*3\d\d /) {
|
||||
$pagemoved=1;
|
||||
}
|
||||
elsif($pagemoved &&
|
||||
($_ =~ /^Location: (.*)/)) {
|
||||
$geturl = $1;
|
||||
# print $_;
|
||||
if($_ =~ /HTTP\/.*3\d\d /) {
|
||||
$pagemoved=1;
|
||||
}
|
||||
elsif($pagemoved &&
|
||||
($_ =~ /^Location: (.*)/)) {
|
||||
$geturl = $1;
|
||||
|
||||
&SplitURL($geturl);
|
||||
&SplitURL($geturl);
|
||||
|
||||
$pagemoved++;
|
||||
last headget;
|
||||
}
|
||||
$pagemoved++;
|
||||
last headget;
|
||||
}
|
||||
}
|
||||
close(HEADGET);
|
||||
|
||||
if($pagemoved == 1) {
|
||||
print "Page is moved but we don't know where. Did you forget the ",
|
||||
"traling slash?\n";
|
||||
exit;
|
||||
print "Page is moved but we don't know where. Did you forget the ",
|
||||
"traling slash?\n";
|
||||
exit;
|
||||
}
|
||||
|
||||
open(WEBGET, "$htmlget $geturl|") ||
|
||||
die "Couldn't get web page for some reason";
|
||||
die "Couldn't get web page for some reason";
|
||||
|
||||
while(<WEBGET>) {
|
||||
$line = $_;
|
||||
push @indoc, $line;
|
||||
$line=~ s/\n//g;
|
||||
$line=~ s/\r//g;
|
||||
# print $line."\n";
|
||||
$in=$in.$line;
|
||||
$line = $_;
|
||||
push @indoc, $line;
|
||||
$line=~ s/\n//g;
|
||||
$line=~ s/\r//g;
|
||||
# print $line."\n";
|
||||
$in=$in.$line;
|
||||
}
|
||||
|
||||
close(WEBGET);
|
||||
}
|
||||
else {
|
||||
while(<STDIN>) {
|
||||
$line = $_;
|
||||
push @indoc, $line;
|
||||
$line=~ s/\n//g;
|
||||
$line=~ s/\r//g;
|
||||
$in=$in.$line;
|
||||
$line = $_;
|
||||
push @indoc, $line;
|
||||
$line=~ s/\n//g;
|
||||
$line=~ s/\r//g;
|
||||
$in=$in.$line;
|
||||
}
|
||||
}
|
||||
|
||||
@ -193,21 +193,21 @@ sub LinkWorks {
|
||||
|
||||
boo:
|
||||
if( $doc[0] =~ /^HTTP[^ ]+ (\d+)/ ) {
|
||||
$error = $1;
|
||||
$error = $1;
|
||||
|
||||
if($error < 400 ) {
|
||||
return "GOOD";
|
||||
}
|
||||
else {
|
||||
if($error < 400 ) {
|
||||
return "GOOD";
|
||||
}
|
||||
else {
|
||||
|
||||
if($head && ($error >= 500)) {
|
||||
# This server doesn't like HEAD!
|
||||
@doc = `$linkcheckfull \"$check\"`;
|
||||
$head = 0;
|
||||
goto boo;
|
||||
}
|
||||
return "BAD";
|
||||
}
|
||||
if($head && ($error >= 500)) {
|
||||
# This server doesn't like HEAD!
|
||||
@doc = `$linkcheckfull \"$check\"`;
|
||||
$head = 0;
|
||||
goto boo;
|
||||
}
|
||||
return "BAD";
|
||||
}
|
||||
}
|
||||
return "BAD";
|
||||
}
|
||||
@ -219,43 +219,43 @@ sub GetLinks {
|
||||
|
||||
getlinkloop:
|
||||
while($in =~ /[^<]*(<[^>]+>)/g ) {
|
||||
# we have a tag in $1
|
||||
$tag = $1;
|
||||
|
||||
if($tag =~ /^<!--/) {
|
||||
# this is a comment tag, ignore it
|
||||
}
|
||||
else {
|
||||
if($tag =~ /(src|href|background|archive) *= *(\"[^\"]\"|[^ )>]*)/i) {
|
||||
$url=$2;
|
||||
if($url =~ /^\"(.*)\"$/) {
|
||||
# this was a "string" now $1 has removed the quotes:
|
||||
$url=$1;
|
||||
}
|
||||
# we have a tag in $1
|
||||
$tag = $1;
|
||||
|
||||
if($tag =~ /^<!--/) {
|
||||
# this is a comment tag, ignore it
|
||||
}
|
||||
else {
|
||||
if($tag =~ /(src|href|background|archive) *= *(\"[^\"]\"|[^ )>]*)/i) {
|
||||
$url=$2;
|
||||
if($url =~ /^\"(.*)\"$/) {
|
||||
# this was a "string" now $1 has removed the quotes:
|
||||
$url=$1;
|
||||
}
|
||||
|
||||
|
||||
$url =~ s/([^\#]*)\#.*/$1/g;
|
||||
$url =~ s/([^\#]*)\#.*/$1/g;
|
||||
|
||||
if($url eq "") {
|
||||
# if the link was nothing than a #-link it may now have
|
||||
# been emptied completely so then we skip the rest
|
||||
next getlinkloop;
|
||||
}
|
||||
if($url eq "") {
|
||||
# if the link was nothing than a #-link it may now have
|
||||
# been emptied completely so then we skip the rest
|
||||
next getlinkloop;
|
||||
}
|
||||
|
||||
if($done{$url}) {
|
||||
# if this url already is done, do next
|
||||
$done{$url}++;
|
||||
next getlinkloop;
|
||||
}
|
||||
if($done{$url}) {
|
||||
# if this url already is done, do next
|
||||
$done{$url}++;
|
||||
next getlinkloop;
|
||||
}
|
||||
|
||||
$done{$url} = 1; # this is "done"
|
||||
$done{$url} = 1; # this is "done"
|
||||
|
||||
push @result, $url;
|
||||
if($tag =~ /< *([^ ]+)/) {
|
||||
# print "TAG: $1\n";
|
||||
$tagtype{$url}=$1;
|
||||
}
|
||||
}
|
||||
push @result, $url;
|
||||
if($tag =~ /< *([^ ]+)/) {
|
||||
# print "TAG: $1\n";
|
||||
$tagtype{$url}=$1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return @result;
|
||||
@ -268,36 +268,36 @@ for(@links) {
|
||||
$url = $_;
|
||||
|
||||
if($url =~ /^([^:]+):/) {
|
||||
$prot = $1;
|
||||
# if($prot !~ /(http|ftp)/i) {
|
||||
if($prot !~ /http/i) {
|
||||
# this is an unsupported protocol, we ignore this
|
||||
next linkloop;
|
||||
}
|
||||
$link = $url;
|
||||
$prot = $1;
|
||||
# if($prot !~ /(http|ftp)/i) {
|
||||
if($prot !~ /http/i) {
|
||||
# this is an unsupported protocol, we ignore this
|
||||
next linkloop;
|
||||
}
|
||||
$link = $url;
|
||||
}
|
||||
else {
|
||||
if($external) {
|
||||
next linkloop;
|
||||
}
|
||||
if($external) {
|
||||
next linkloop;
|
||||
}
|
||||
|
||||
# this is a link on the save server:
|
||||
if($url =~ /^\//) {
|
||||
# from root
|
||||
$link = "$getprotocol://$getserver$url";
|
||||
}
|
||||
else {
|
||||
# from the scanned page's dir
|
||||
$nyurl=$url;
|
||||
# this is a link on the save server:
|
||||
if($url =~ /^\//) {
|
||||
# from root
|
||||
$link = "$getprotocol://$getserver$url";
|
||||
}
|
||||
else {
|
||||
# from the scanned page's dir
|
||||
$nyurl=$url;
|
||||
|
||||
if(length($getpath) &&
|
||||
($getpath !~ /\/$/) &&
|
||||
($nyurl !~ /^\//)) {
|
||||
# lacks ending slash, add one to the document part:
|
||||
$nyurl = "/".$nyurl;
|
||||
}
|
||||
$link = "$getprotocol://$getserver/$getpath$nyurl";
|
||||
}
|
||||
if(length($getpath) &&
|
||||
($getpath !~ /\/$/) &&
|
||||
($nyurl !~ /^\//)) {
|
||||
# lacks ending slash, add one to the document part:
|
||||
$nyurl = "/".$nyurl;
|
||||
}
|
||||
$link = "$getprotocol://$getserver/$getpath$nyurl";
|
||||
}
|
||||
}
|
||||
|
||||
#print "test $link\n";
|
||||
@ -313,16 +313,16 @@ for(@links) {
|
||||
|
||||
# If bad and -l, present the line numbers of the usage
|
||||
if("BAD" eq $success) {
|
||||
$badlinks++;
|
||||
if($linenumber) {
|
||||
$line =1;
|
||||
for(@indoc) {
|
||||
if($_ =~ /$url/) {
|
||||
print " line $line\n";
|
||||
}
|
||||
$line++;
|
||||
}
|
||||
}
|
||||
$badlinks++;
|
||||
if($linenumber) {
|
||||
$line =1;
|
||||
for(@indoc) {
|
||||
if($_ =~ /$url/) {
|
||||
print " line $line\n";
|
||||
}
|
||||
$line++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -330,7 +330,7 @@ for(@links) {
|
||||
if($verbose) {
|
||||
print "$allcount links were checked";
|
||||
if($badlinks > 0) {
|
||||
print ", $badlinks were found bad";
|
||||
print ", $badlinks were found bad";
|
||||
}
|
||||
print "\n";
|
||||
}
|
||||
|
@ -125,32 +125,32 @@ my $badlinks=0;
|
||||
sub SplitURL {
|
||||
my $inurl = $_[0];
|
||||
if($inurl=~ /^([^:]+):\/\/([^\/]*)\/(.*)\/(.*)/ ) {
|
||||
$getprotocol = $1;
|
||||
$getserver = $2;
|
||||
$getpath = $3;
|
||||
$getdocument = $4;
|
||||
$getprotocol = $1;
|
||||
$getserver = $2;
|
||||
$getpath = $3;
|
||||
$getdocument = $4;
|
||||
}
|
||||
elsif ($inurl=~ /^([^:]+):\/\/([^\/]*)\/(.*)/ ) {
|
||||
$getprotocol = $1;
|
||||
$getserver = $2;
|
||||
$getpath = $3;
|
||||
$getdocument = "";
|
||||
|
||||
if($getpath !~ /\//) {
|
||||
$getpath ="";
|
||||
$getdocument = $3;
|
||||
}
|
||||
$getprotocol = $1;
|
||||
$getserver = $2;
|
||||
$getpath = $3;
|
||||
$getdocument = "";
|
||||
|
||||
if($getpath !~ /\//) {
|
||||
$getpath ="";
|
||||
$getdocument = $3;
|
||||
}
|
||||
|
||||
}
|
||||
elsif ($inurl=~ /^([^:]+):\/\/(.*)/ ) {
|
||||
$getprotocol = $1;
|
||||
$getserver = $2;
|
||||
$getpath = "";
|
||||
$getdocument = "";
|
||||
$getprotocol = $1;
|
||||
$getserver = $2;
|
||||
$getpath = "";
|
||||
$getdocument = "";
|
||||
}
|
||||
else {
|
||||
print "Couldn't parse the specified URL, retry please!\n";
|
||||
exit;
|
||||
print "Couldn't parse the specified URL, retry please!\n";
|
||||
exit;
|
||||
}
|
||||
}
|
||||
|
||||
@ -164,35 +164,35 @@ sub GetRootPage {
|
||||
|
||||
my $pagemoved=0;
|
||||
open(HEADGET, "$linkcheck $geturl|") ||
|
||||
die "Couldn't get web page for some reason";
|
||||
die "Couldn't get web page for some reason";
|
||||
|
||||
while(<HEADGET>) {
|
||||
#print STDERR $_;
|
||||
if($_ =~ /HTTP\/1\.[01] (\d\d\d) /) {
|
||||
#print STDERR $_;
|
||||
if($_ =~ /HTTP\/1\.[01] (\d\d\d) /) {
|
||||
$code=$1;
|
||||
if($code =~ /^3/) {
|
||||
$pagemoved=1;
|
||||
}
|
||||
}
|
||||
}
|
||||
elsif($_ =~ /^Content-Type: ([\/a-zA-Z]+)/) {
|
||||
$type=$1;
|
||||
}
|
||||
elsif($pagemoved &&
|
||||
($_ =~ /^Location: (.*)/)) {
|
||||
$geturl = $1;
|
||||
elsif($pagemoved &&
|
||||
($_ =~ /^Location: (.*)/)) {
|
||||
$geturl = $1;
|
||||
|
||||
&SplitURL($geturl);
|
||||
&SplitURL($geturl);
|
||||
|
||||
$pagemoved++;
|
||||
last;
|
||||
}
|
||||
$pagemoved++;
|
||||
last;
|
||||
}
|
||||
}
|
||||
close(HEADGET);
|
||||
|
||||
if($pagemoved == 1) {
|
||||
print "Page is moved but we don't know where. Did you forget the ",
|
||||
"traling slash?\n";
|
||||
exit;
|
||||
print "Page is moved but we don't know where. Did you forget the ",
|
||||
"traling slash?\n";
|
||||
exit;
|
||||
}
|
||||
|
||||
if($type ne "text/html") {
|
||||
@ -229,21 +229,21 @@ sub LinkWorks {
|
||||
|
||||
boo:
|
||||
if( $doc[0] =~ /^HTTP[^ ]+ (\d+)/ ) {
|
||||
my $error = $1;
|
||||
my $error = $1;
|
||||
|
||||
if($error < 400 ) {
|
||||
return "GOOD";
|
||||
}
|
||||
else {
|
||||
if($error < 400 ) {
|
||||
return "GOOD";
|
||||
}
|
||||
else {
|
||||
|
||||
if($head && ($error >= 500)) {
|
||||
# This server doesn't like HEAD!
|
||||
@doc = `$linkcheckfull \"$check\"`;
|
||||
$head = 0;
|
||||
goto boo;
|
||||
}
|
||||
return "BAD";
|
||||
}
|
||||
if($head && ($error >= 500)) {
|
||||
# This server doesn't like HEAD!
|
||||
@doc = `$linkcheckfull \"$check\"`;
|
||||
$head = 0;
|
||||
goto boo;
|
||||
}
|
||||
return "BAD";
|
||||
}
|
||||
}
|
||||
return "BAD";
|
||||
}
|
||||
@ -254,45 +254,45 @@ sub GetLinks {
|
||||
my @result;
|
||||
|
||||
while($in =~ /[^<]*(<[^>]+>)/g ) {
|
||||
# we have a tag in $1
|
||||
my $tag = $1;
|
||||
|
||||
if($tag =~ /^<!--/) {
|
||||
# this is a comment tag, ignore it
|
||||
}
|
||||
else {
|
||||
if($tag =~ /(src|href|background|archive) *= *(\"[^\"]\"|[^ \)>]*)/i) {
|
||||
my $url=$2;
|
||||
if($url =~ /^\"(.*)\"$/) {
|
||||
# this was a "string" now $1 has removed the quotes:
|
||||
$url=$1;
|
||||
}
|
||||
# we have a tag in $1
|
||||
my $tag = $1;
|
||||
|
||||
if($tag =~ /^<!--/) {
|
||||
# this is a comment tag, ignore it
|
||||
}
|
||||
else {
|
||||
if($tag =~ /(src|href|background|archive) *= *(\"[^\"]\"|[^ \)>]*)/i) {
|
||||
my $url=$2;
|
||||
if($url =~ /^\"(.*)\"$/) {
|
||||
# this was a "string" now $1 has removed the quotes:
|
||||
$url=$1;
|
||||
}
|
||||
|
||||
|
||||
$url =~ s/([^\#]*)\#.*/$1/g;
|
||||
$url =~ s/([^\#]*)\#.*/$1/g;
|
||||
|
||||
if($url eq "") {
|
||||
# if the link was nothing than a #-link it may now have
|
||||
# been emptied completely so then we skip the rest
|
||||
next;
|
||||
}
|
||||
if($url eq "") {
|
||||
# if the link was nothing than a #-link it may now have
|
||||
# been emptied completely so then we skip the rest
|
||||
next;
|
||||
}
|
||||
|
||||
if($done{$url}) {
|
||||
# if this url already is done, do next
|
||||
$done{$url}++;
|
||||
if($done{$url}) {
|
||||
# if this url already is done, do next
|
||||
$done{$url}++;
|
||||
if($verbose) {
|
||||
print " FOUND $url but that is already checked\n";
|
||||
}
|
||||
next;
|
||||
}
|
||||
next;
|
||||
}
|
||||
|
||||
$done{$url} = 1; # this is "done"
|
||||
$done{$url} = 1; # this is "done"
|
||||
|
||||
push @result, $url;
|
||||
if($tag =~ /< *([^ ]+)/) {
|
||||
$tagtype{$url}=$1;
|
||||
}
|
||||
}
|
||||
push @result, $url;
|
||||
if($tag =~ /< *([^ ]+)/) {
|
||||
$tagtype{$url}=$1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return @result;
|
||||
@ -437,7 +437,7 @@ while(1) {
|
||||
if($verbose) {
|
||||
print "$allcount links were checked";
|
||||
if($badlinks > 0) {
|
||||
print ", $badlinks were found bad";
|
||||
print ", $badlinks were found bad";
|
||||
}
|
||||
print "\n";
|
||||
}
|
||||
|
@ -74,7 +74,7 @@ while(<STDIN>) {
|
||||
while($in =~ /[^<]*(<[^>]+>)/g ) {
|
||||
# we have a tag in $1
|
||||
$tag = $1;
|
||||
|
||||
|
||||
if($tag =~ /^<!--/) {
|
||||
# this is a comment tag, ignore it
|
||||
}
|
||||
@ -99,7 +99,7 @@ while($in =~ /[^<]*(<[^>]+>)/g ) {
|
||||
|
||||
if($enctype eq "multipart/form-data") {
|
||||
$enctype="multipart form upload [use -F]"
|
||||
}
|
||||
}
|
||||
$enctype = "\n--- type: $enctype";
|
||||
}
|
||||
else {
|
||||
@ -134,7 +134,7 @@ while($in =~ /[^<]*(<[^>]+>)/g ) {
|
||||
$mtag = $1;
|
||||
|
||||
($name, $value)=namevalue($tag);
|
||||
|
||||
|
||||
if($mtag =~ /select/i) {
|
||||
print "Select: NAME=\"$name\"\n";
|
||||
push @vars, "$name";
|
||||
|
@ -80,32 +80,32 @@ sub SplitURL {
|
||||
my $inurl = $_[0];
|
||||
|
||||
if($inurl=~ /^([^:]+):\/\/([^\/]*)\/(.*)\/(.*)/ ) {
|
||||
$getprotocol = $1;
|
||||
$getserver = $2;
|
||||
$getpath = $3;
|
||||
$getdocument = $4;
|
||||
$getprotocol = $1;
|
||||
$getserver = $2;
|
||||
$getpath = $3;
|
||||
$getdocument = $4;
|
||||
}
|
||||
elsif ($inurl=~ /^([^:]+):\/\/([^\/]*)\/(.*)/ ) {
|
||||
$getprotocol = $1;
|
||||
$getserver = $2;
|
||||
$getpath = $3;
|
||||
$getdocument = "";
|
||||
|
||||
if($getpath !~ /\//) {
|
||||
$getpath ="";
|
||||
$getdocument = $3;
|
||||
}
|
||||
$getprotocol = $1;
|
||||
$getserver = $2;
|
||||
$getpath = $3;
|
||||
$getdocument = "";
|
||||
|
||||
if($getpath !~ /\//) {
|
||||
$getpath ="";
|
||||
$getdocument = $3;
|
||||
}
|
||||
|
||||
}
|
||||
elsif ($inurl=~ /^([^:]+):\/\/(.*)/ ) {
|
||||
$getprotocol = $1;
|
||||
$getserver = $2;
|
||||
$getpath = "";
|
||||
$getdocument = "";
|
||||
$getprotocol = $1;
|
||||
$getserver = $2;
|
||||
$getpath = "";
|
||||
$getdocument = "";
|
||||
}
|
||||
else {
|
||||
print "Couldn't parse the specified URL, retry please!\n";
|
||||
exit;
|
||||
print "Couldn't parse the specified URL, retry please!\n";
|
||||
exit;
|
||||
}
|
||||
}
|
||||
|
||||
@ -119,52 +119,52 @@ sub SplitURL {
|
||||
|
||||
if(!$usestdin) {
|
||||
open(HEADGET, "$linkcheck $geturl|") ||
|
||||
die "Couldn't get web page for some reason";
|
||||
die "Couldn't get web page for some reason";
|
||||
headget:
|
||||
while(<HEADGET>) {
|
||||
# print $_;
|
||||
if($_ =~ /HTTP\/.*3\d\d /) {
|
||||
$pagemoved=1;
|
||||
}
|
||||
elsif($pagemoved &&
|
||||
($_ =~ /^Location: (.*)/)) {
|
||||
$geturl = $1;
|
||||
# print $_;
|
||||
if($_ =~ /HTTP\/.*3\d\d /) {
|
||||
$pagemoved=1;
|
||||
}
|
||||
elsif($pagemoved &&
|
||||
($_ =~ /^Location: (.*)/)) {
|
||||
$geturl = $1;
|
||||
|
||||
&SplitURL($geturl);
|
||||
&SplitURL($geturl);
|
||||
|
||||
$pagemoved++;
|
||||
last headget;
|
||||
}
|
||||
$pagemoved++;
|
||||
last headget;
|
||||
}
|
||||
}
|
||||
close(HEADGET);
|
||||
|
||||
if($pagemoved == 1) {
|
||||
print "Page is moved but we don't know where. Did you forget the ",
|
||||
"traling slash?\n";
|
||||
exit;
|
||||
print "Page is moved but we don't know where. Did you forget the ",
|
||||
"traling slash?\n";
|
||||
exit;
|
||||
}
|
||||
|
||||
open(WEBGET, "$htmlget $geturl|") ||
|
||||
die "Couldn't get web page for some reason";
|
||||
die "Couldn't get web page for some reason";
|
||||
|
||||
while(<WEBGET>) {
|
||||
$line = $_;
|
||||
push @indoc, $line;
|
||||
$line=~ s/\n//g;
|
||||
$line=~ s/\r//g;
|
||||
# print $line."\n";
|
||||
$in=$in.$line;
|
||||
$line = $_;
|
||||
push @indoc, $line;
|
||||
$line=~ s/\n//g;
|
||||
$line=~ s/\r//g;
|
||||
# print $line."\n";
|
||||
$in=$in.$line;
|
||||
}
|
||||
|
||||
close(WEBGET);
|
||||
}
|
||||
else {
|
||||
while(<STDIN>) {
|
||||
$line = $_;
|
||||
push @indoc, $line;
|
||||
$line=~ s/\n//g;
|
||||
$line=~ s/\r//g;
|
||||
$in=$in.$line;
|
||||
$line = $_;
|
||||
push @indoc, $line;
|
||||
$line=~ s/\n//g;
|
||||
$line=~ s/\r//g;
|
||||
$in=$in.$line;
|
||||
}
|
||||
}
|
||||
|
||||
@ -174,43 +174,43 @@ sub GetLinks {
|
||||
|
||||
getlinkloop:
|
||||
while($in =~ /[^<]*(<[^>]+>)/g ) {
|
||||
# we have a tag in $1
|
||||
$tag = $1;
|
||||
|
||||
if($tag =~ /^<!--/) {
|
||||
# this is a comment tag, ignore it
|
||||
}
|
||||
else {
|
||||
if($tag =~ /(src|href|background|archive) *= *(\"[^\"]\"|[^ )>]*)/i) {
|
||||
$url=$2;
|
||||
if($url =~ /^\"(.*)\"$/) {
|
||||
# this was a "string" now $1 has removed the quotes:
|
||||
$url=$1;
|
||||
}
|
||||
# we have a tag in $1
|
||||
$tag = $1;
|
||||
|
||||
if($tag =~ /^<!--/) {
|
||||
# this is a comment tag, ignore it
|
||||
}
|
||||
else {
|
||||
if($tag =~ /(src|href|background|archive) *= *(\"[^\"]\"|[^ )>]*)/i) {
|
||||
$url=$2;
|
||||
if($url =~ /^\"(.*)\"$/) {
|
||||
# this was a "string" now $1 has removed the quotes:
|
||||
$url=$1;
|
||||
}
|
||||
|
||||
|
||||
$url =~ s/([^\#]*)\#.*/$1/g;
|
||||
$url =~ s/([^\#]*)\#.*/$1/g;
|
||||
|
||||
if($url eq "") {
|
||||
# if the link was nothing than a #-link it may now have
|
||||
# been emptied completely so then we skip the rest
|
||||
next getlinkloop;
|
||||
}
|
||||
if($url eq "") {
|
||||
# if the link was nothing than a #-link it may now have
|
||||
# been emptied completely so then we skip the rest
|
||||
next getlinkloop;
|
||||
}
|
||||
|
||||
if($done{$url}) {
|
||||
# if this url already is done, do next
|
||||
$done{$url}++;
|
||||
next getlinkloop;
|
||||
}
|
||||
if($done{$url}) {
|
||||
# if this url already is done, do next
|
||||
$done{$url}++;
|
||||
next getlinkloop;
|
||||
}
|
||||
|
||||
$done{$url} = 1; # this is "done"
|
||||
$done{$url} = 1; # this is "done"
|
||||
|
||||
push @result, $url;
|
||||
if($tag =~ /< *([^ ]+)/) {
|
||||
# print "TAG: $1\n";
|
||||
$tagtype{$url}=$1;
|
||||
}
|
||||
}
|
||||
push @result, $url;
|
||||
if($tag =~ /< *([^ ]+)/) {
|
||||
# print "TAG: $1\n";
|
||||
$tagtype{$url}=$1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return @result;
|
||||
@ -223,38 +223,38 @@ for(@links) {
|
||||
$url = $_;
|
||||
|
||||
if($url =~ /^([^:]+):/) {
|
||||
$link = $url;
|
||||
$link = $url;
|
||||
}
|
||||
else {
|
||||
# this is an absolute link on the same server:
|
||||
if($url =~ /^\//) {
|
||||
# from root
|
||||
$link = "$getprotocol://$getserver$url";
|
||||
}
|
||||
else {
|
||||
# from the scanned page's dir
|
||||
$nyurl=$url;
|
||||
# this is an absolute link on the same server:
|
||||
if($url =~ /^\//) {
|
||||
# from root
|
||||
$link = "$getprotocol://$getserver$url";
|
||||
}
|
||||
else {
|
||||
# from the scanned page's dir
|
||||
$nyurl=$url;
|
||||
|
||||
if(length($getpath) &&
|
||||
($getpath !~ /\/$/) &&
|
||||
($nyurl !~ /^\//)) {
|
||||
# lacks ending slash, add one to the document part:
|
||||
$nyurl = "/".$nyurl;
|
||||
}
|
||||
$link = "$getprotocol://$getserver/$getpath$nyurl";
|
||||
}
|
||||
if(length($getpath) &&
|
||||
($getpath !~ /\/$/) &&
|
||||
($nyurl !~ /^\//)) {
|
||||
# lacks ending slash, add one to the document part:
|
||||
$nyurl = "/".$nyurl;
|
||||
}
|
||||
$link = "$getprotocol://$getserver/$getpath$nyurl";
|
||||
}
|
||||
}
|
||||
|
||||
if($link =~ /$getregex/) {
|
||||
if($display) {
|
||||
print "$link\n";
|
||||
}
|
||||
else {
|
||||
if($verbose) {
|
||||
print "Gets $link\n";
|
||||
}
|
||||
print `$urlget $link`;
|
||||
}
|
||||
if($display) {
|
||||
print "$link\n";
|
||||
}
|
||||
else {
|
||||
if($verbose) {
|
||||
print "Gets $link\n";
|
||||
}
|
||||
print `$urlget $link`;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
@ -51,7 +51,7 @@ for(@all) {
|
||||
}
|
||||
print "Recursing for dir $dir$name in target $target/$name\n";
|
||||
|
||||
$nextdepth=$maxdepth-1;
|
||||
$nextdepth=$maxdepth-1;
|
||||
print `$0 $dir$name/ $target/$name $nextdepth`;
|
||||
}
|
||||
elsif($firstletter eq "-") {
|
||||
|
@ -20,10 +20,10 @@ if($README eq "") {
|
||||
}
|
||||
|
||||
|
||||
push @out, " _ _ ____ _ \n";
|
||||
push @out, " Project ___| | | | _ \\| | \n";
|
||||
push @out, " / __| | | | |_) | | \n";
|
||||
push @out, " | (__| |_| | _ <| |___ \n";
|
||||
push @out, " _ _ ____ _\n";
|
||||
push @out, " Project ___| | | | _ \\| |\n";
|
||||
push @out, " / __| | | | |_) | |\n";
|
||||
push @out, " | (__| |_| | _ <| |___\n";
|
||||
push @out, " \\___|\\___/|_| \\_\\_____|\n";
|
||||
|
||||
my $olen=0;
|
||||
@ -35,8 +35,8 @@ while (<STDIN>) {
|
||||
|
||||
if($line =~ /^([ \t]*\n|curl)/i) {
|
||||
# cut off headers and empty lines
|
||||
$wline++; # count number of cut off lines
|
||||
next;
|
||||
$wline++; # count number of cut off lines
|
||||
next;
|
||||
}
|
||||
|
||||
my $text = $line;
|
||||
@ -57,8 +57,8 @@ while (<STDIN>) {
|
||||
$olen = $tlen;
|
||||
|
||||
if($wline) {
|
||||
# we only make one empty line max
|
||||
$wline = 0;
|
||||
# we only make one empty line max
|
||||
$wline = 0;
|
||||
push @out, "\n";
|
||||
}
|
||||
push @out, $line;
|
||||
@ -174,12 +174,12 @@ void hugehelp(void)
|
||||
z.next_out = buf;
|
||||
status = inflate(&z, Z_SYNC_FLUSH);
|
||||
if (status == Z_OK || status == Z_STREAM_END) {
|
||||
fwrite(buf, BUF_SIZE - z.avail_out, 1, stdout);
|
||||
if (status == Z_STREAM_END)
|
||||
break;
|
||||
fwrite(buf, BUF_SIZE - z.avail_out, 1, stdout);
|
||||
if (status == Z_STREAM_END)
|
||||
break;
|
||||
}
|
||||
else
|
||||
break; /* Error */
|
||||
else
|
||||
break; /* Error */
|
||||
}
|
||||
free(buf);
|
||||
}
|
||||
|
@ -214,7 +214,7 @@ sub showdiff {
|
||||
my @out = `diff -u $file2 $file1 2>/dev/null`;
|
||||
|
||||
if(!$out[0]) {
|
||||
@out = `diff -c $file2 $file1 2>/dev/null`;
|
||||
@out = `diff -c $file2 $file1 2>/dev/null`;
|
||||
}
|
||||
|
||||
return @out;
|
||||
|
@ -3,8 +3,8 @@
|
||||
# curl --version protocols/features
|
||||
if ( $#ARGV != 2 )
|
||||
{
|
||||
print "Usage: $0 curl-config-script curl-version-output-file features|protocols\n";
|
||||
exit 3;
|
||||
print "Usage: $0 curl-config-script curl-version-output-file features|protocols\n";
|
||||
exit 3;
|
||||
}
|
||||
|
||||
my $what=$ARGV[2];
|
||||
@ -42,8 +42,8 @@ my $curlconfigproto = join ' ', @curl_config;
|
||||
|
||||
my $different = $curlproto ne $curlconfigproto;
|
||||
if ($different) {
|
||||
print "Mismatch in $what lists:\n";
|
||||
print "curl: $curlproto\n";
|
||||
print "curl-config: $curlconfigproto\n";
|
||||
print "Mismatch in $what lists:\n";
|
||||
print "curl: $curlproto\n";
|
||||
print "curl-config: $curlconfigproto\n";
|
||||
}
|
||||
exit $different;
|
||||
|
@ -2,8 +2,8 @@
|
||||
# Determine if curl-config --version matches the curl --version
|
||||
if ( $#ARGV != 2 )
|
||||
{
|
||||
print "Usage: $0 curl-config-script curl-version-output-file version|vernum\n";
|
||||
exit 3;
|
||||
print "Usage: $0 curl-config-script curl-version-output-file version|vernum\n";
|
||||
exit 3;
|
||||
}
|
||||
|
||||
my $what=$ARGV[2];
|
||||
@ -23,23 +23,23 @@ open(CURLCONFIG, "sh $ARGV[0] --$what|") || die "Can't get curl-config --$what l
|
||||
$_ = <CURLCONFIG>;
|
||||
chomp;
|
||||
if ( $what eq "version" ) {
|
||||
/^libcurl ([\.\d]+(-CVS)?)$/ ;
|
||||
$curlconfigversion = $1;
|
||||
/^libcurl ([\.\d]+(-CVS)?)$/ ;
|
||||
$curlconfigversion = $1;
|
||||
}
|
||||
else {
|
||||
# Convert hex version to decimal for comparison's sake
|
||||
/^(..)(..)(..)$/ ;
|
||||
$curlconfigversion = hex($1) . "." . hex($2) . "." . hex($3);
|
||||
# Convert hex version to decimal for comparison's sake
|
||||
/^(..)(..)(..)$/ ;
|
||||
$curlconfigversion = hex($1) . "." . hex($2) . "." . hex($3);
|
||||
|
||||
# Strip off the -CVS from the curl version if it's there
|
||||
$version =~ s/-CVS$//;
|
||||
# Strip off the -CVS from the curl version if it's there
|
||||
$version =~ s/-CVS$//;
|
||||
}
|
||||
close CURLCONFIG;
|
||||
|
||||
my $different = $version ne $curlconfigversion;
|
||||
if ($different || !$version) {
|
||||
print "Mismatch in --version:\n";
|
||||
print "curl: $version\n";
|
||||
print "curl-config: $curlconfigversion\n";
|
||||
exit 1;
|
||||
print "Mismatch in --version:\n";
|
||||
print "curl: $version\n";
|
||||
print "curl-config: $curlconfigversion\n";
|
||||
exit 1;
|
||||
}
|
||||
|
@ -2,13 +2,13 @@
|
||||
# Determine if the given curl executable supports the 'openssl' SSL engine
|
||||
if ( $#ARGV != 0 )
|
||||
{
|
||||
print "Usage: $0 curl-executable\n";
|
||||
exit 3;
|
||||
print "Usage: $0 curl-executable\n";
|
||||
exit 3;
|
||||
}
|
||||
if (!open(CURL, "@ARGV[0] -s --engine list|"))
|
||||
{
|
||||
print "Can't get SSL engine list\n";
|
||||
exit 2;
|
||||
print "Can't get SSL engine list\n";
|
||||
exit 2;
|
||||
}
|
||||
while( <CURL> )
|
||||
{
|
||||
|
@ -2,32 +2,32 @@
|
||||
# Perform simple file and directory manipulation in a portable way
|
||||
if ( $#ARGV <= 0 )
|
||||
{
|
||||
print "Usage: $0 mkdir|rmdir|rm|move|gone path1 [path2] [more commands...]\n";
|
||||
exit 1;
|
||||
print "Usage: $0 mkdir|rmdir|rm|move|gone path1 [path2] [more commands...]\n";
|
||||
exit 1;
|
||||
}
|
||||
|
||||
use File::Copy;
|
||||
while(@ARGV) {
|
||||
my $cmd = shift @ARGV;
|
||||
my $arg = shift @ARGV;
|
||||
if ($cmd eq "mkdir") {
|
||||
mkdir $arg || die "$!";
|
||||
}
|
||||
elsif ($cmd eq "rmdir") {
|
||||
rmdir $arg || die "$!";
|
||||
}
|
||||
elsif ($cmd eq "rm") {
|
||||
unlink $arg || die "$!";
|
||||
}
|
||||
elsif ($cmd eq "move") {
|
||||
my $arg2 = shift @ARGV;
|
||||
move($arg,$arg2) || die "$!";
|
||||
}
|
||||
elsif ($cmd eq "gone") {
|
||||
! -e $arg || die "Path $arg exists";
|
||||
} else {
|
||||
print "Unsupported command $cmd\n";
|
||||
exit 1;
|
||||
}
|
||||
my $cmd = shift @ARGV;
|
||||
my $arg = shift @ARGV;
|
||||
if ($cmd eq "mkdir") {
|
||||
mkdir $arg || die "$!";
|
||||
}
|
||||
elsif ($cmd eq "rmdir") {
|
||||
rmdir $arg || die "$!";
|
||||
}
|
||||
elsif ($cmd eq "rm") {
|
||||
unlink $arg || die "$!";
|
||||
}
|
||||
elsif ($cmd eq "move") {
|
||||
my $arg2 = shift @ARGV;
|
||||
move($arg,$arg2) || die "$!";
|
||||
}
|
||||
elsif ($cmd eq "gone") {
|
||||
! -e $arg || die "Path $arg exists";
|
||||
} else {
|
||||
print "Unsupported command $cmd\n";
|
||||
exit 1;
|
||||
}
|
||||
}
|
||||
exit 0;
|
||||
|
@ -4,102 +4,102 @@ use Time::Local;
|
||||
|
||||
if ( $#ARGV < 1 )
|
||||
{
|
||||
print "Usage: $0 prepare|postprocess dir [logfile]\n";
|
||||
exit 1;
|
||||
print "Usage: $0 prepare|postprocess dir [logfile]\n";
|
||||
exit 1;
|
||||
}
|
||||
|
||||
# <precheck> expects an error message on stdout
|
||||
sub errout {
|
||||
print $_[0] . "\n";
|
||||
exit 1;
|
||||
print $_[0] . "\n";
|
||||
exit 1;
|
||||
}
|
||||
|
||||
if ($ARGV[0] eq "prepare")
|
||||
{
|
||||
my $dirname = $ARGV[1];
|
||||
mkdir $dirname || errout "$!";
|
||||
chdir $dirname;
|
||||
my $dirname = $ARGV[1];
|
||||
mkdir $dirname || errout "$!";
|
||||
chdir $dirname;
|
||||
|
||||
# Create the files in alphabetical order, to increase the chances
|
||||
# of receiving a consistent set of directory contents regardless
|
||||
# of whether the server alphabetizes the results or not.
|
||||
mkdir "asubdir" || errout "$!";
|
||||
chmod 0777, "asubdir";
|
||||
# Create the files in alphabetical order, to increase the chances
|
||||
# of receiving a consistent set of directory contents regardless
|
||||
# of whether the server alphabetizes the results or not.
|
||||
mkdir "asubdir" || errout "$!";
|
||||
chmod 0777, "asubdir";
|
||||
|
||||
open(FILE, ">plainfile.txt") || errout "$!";
|
||||
binmode FILE;
|
||||
print FILE "Test file to support curl test suite\n";
|
||||
close(FILE);
|
||||
utime time, timegm(0,0,12,1,0,100), "plainfile.txt";
|
||||
chmod 0666, "plainfile.txt";
|
||||
open(FILE, ">plainfile.txt") || errout "$!";
|
||||
binmode FILE;
|
||||
print FILE "Test file to support curl test suite\n";
|
||||
close(FILE);
|
||||
utime time, timegm(0,0,12,1,0,100), "plainfile.txt";
|
||||
chmod 0666, "plainfile.txt";
|
||||
|
||||
open(FILE, ">rofile.txt") || errout "$!";
|
||||
binmode FILE;
|
||||
print FILE "Read-only test file to support curl test suite\n";
|
||||
close(FILE);
|
||||
utime time, timegm(0,0,12,31,11,100), "rofile.txt";
|
||||
chmod 0444, "rofile.txt";
|
||||
open(FILE, ">rofile.txt") || errout "$!";
|
||||
binmode FILE;
|
||||
print FILE "Read-only test file to support curl test suite\n";
|
||||
close(FILE);
|
||||
utime time, timegm(0,0,12,31,11,100), "rofile.txt";
|
||||
chmod 0444, "rofile.txt";
|
||||
|
||||
exit 0;
|
||||
exit 0;
|
||||
}
|
||||
elsif ($ARGV[0] eq "postprocess")
|
||||
{
|
||||
my $dirname = $ARGV[1];
|
||||
my $logfile = $ARGV[2];
|
||||
my $dirname = $ARGV[1];
|
||||
my $logfile = $ARGV[2];
|
||||
|
||||
# Clean up the test directory
|
||||
unlink "$dirname/rofile.txt";
|
||||
unlink "$dirname/plainfile.txt";
|
||||
rmdir "$dirname/asubdir";
|
||||
# Clean up the test directory
|
||||
unlink "$dirname/rofile.txt";
|
||||
unlink "$dirname/plainfile.txt";
|
||||
rmdir "$dirname/asubdir";
|
||||
|
||||
rmdir $dirname || die "$!";
|
||||
rmdir $dirname || die "$!";
|
||||
|
||||
if ($logfile) {
|
||||
# Process the directory file to remove all information that
|
||||
# could be inconsistent from one test run to the next (e.g.
|
||||
# file date) or may be unsupported on some platforms (e.g.
|
||||
# Windows). Also, since 7.17.0, the sftp directory listing
|
||||
# format can be dependent on the server (with a recent
|
||||
# enough version of libssh2) so this script must also
|
||||
# canonicalize the format. Here are examples of the general
|
||||
# format supported:
|
||||
# -r--r--r-- 12 ausername grp 47 Dec 31 2000 rofile.txt
|
||||
# -r--r--r-- 1 1234 4321 47 Dec 31 2000 rofile.txt
|
||||
# The "canonical" format is similar to the first (which is
|
||||
# the one generated on a typical Linux installation):
|
||||
# -r-?r-?r-? 12 U U 47 Dec 31 2000 rofile.txt
|
||||
if ($logfile) {
|
||||
# Process the directory file to remove all information that
|
||||
# could be inconsistent from one test run to the next (e.g.
|
||||
# file date) or may be unsupported on some platforms (e.g.
|
||||
# Windows). Also, since 7.17.0, the sftp directory listing
|
||||
# format can be dependent on the server (with a recent
|
||||
# enough version of libssh2) so this script must also
|
||||
# canonicalize the format. Here are examples of the general
|
||||
# format supported:
|
||||
# -r--r--r-- 12 ausername grp 47 Dec 31 2000 rofile.txt
|
||||
# -r--r--r-- 1 1234 4321 47 Dec 31 2000 rofile.txt
|
||||
# The "canonical" format is similar to the first (which is
|
||||
# the one generated on a typical Linux installation):
|
||||
# -r-?r-?r-? 12 U U 47 Dec 31 2000 rofile.txt
|
||||
|
||||
my @canondir;
|
||||
open(IN, "<$logfile") || die "$!";
|
||||
while (<IN>) {
|
||||
/^(.)(..).(..).(..).\s*(\S+)\s+\S+\s+\S+\s+(\S+)\s+(\S+\s+\S+\s+\S+)(.*)$/;
|
||||
if ($1 eq "d") {
|
||||
# Erase all directory metadata except for the name, as it is not
|
||||
# consistent for across all test systems and filesystems
|
||||
push @canondir, "d????????? N U U N ??? N NN:NN$8\n";
|
||||
} elsif ($1 eq "-") {
|
||||
# Erase user and group names, as they are not consistent across
|
||||
# all test systems
|
||||
my $line = sprintf("%s%s?%s?%s?%5d U U %15d %s%s\n", $1,$2,$3,$4,$5,$6,$7,$8);
|
||||
push @canondir, $line;
|
||||
} else {
|
||||
# Unexpected format; just pass it through and let the test fail
|
||||
push @canondir, $_;
|
||||
}
|
||||
}
|
||||
close(IN);
|
||||
my @canondir;
|
||||
open(IN, "<$logfile") || die "$!";
|
||||
while (<IN>) {
|
||||
/^(.)(..).(..).(..).\s*(\S+)\s+\S+\s+\S+\s+(\S+)\s+(\S+\s+\S+\s+\S+)(.*)$/;
|
||||
if ($1 eq "d") {
|
||||
# Erase all directory metadata except for the name, as it is not
|
||||
# consistent for across all test systems and filesystems
|
||||
push @canondir, "d????????? N U U N ??? N NN:NN$8\n";
|
||||
} elsif ($1 eq "-") {
|
||||
# Erase user and group names, as they are not consistent across
|
||||
# all test systems
|
||||
my $line = sprintf("%s%s?%s?%s?%5d U U %15d %s%s\n", $1,$2,$3,$4,$5,$6,$7,$8);
|
||||
push @canondir, $line;
|
||||
} else {
|
||||
# Unexpected format; just pass it through and let the test fail
|
||||
push @canondir, $_;
|
||||
}
|
||||
}
|
||||
close(IN);
|
||||
|
||||
@canondir = sort {substr($a,57) cmp substr($b,57)} @canondir;
|
||||
my $newfile = $logfile . ".new";
|
||||
open(OUT, ">$newfile") || die "$!";
|
||||
print OUT join('', @canondir);
|
||||
close(OUT);
|
||||
@canondir = sort {substr($a,57) cmp substr($b,57)} @canondir;
|
||||
my $newfile = $logfile . ".new";
|
||||
open(OUT, ">$newfile") || die "$!";
|
||||
print OUT join('', @canondir);
|
||||
close(OUT);
|
||||
|
||||
unlink $logfile;
|
||||
rename $newfile, $logfile;
|
||||
}
|
||||
unlink $logfile;
|
||||
rename $newfile, $logfile;
|
||||
}
|
||||
|
||||
exit 0;
|
||||
exit 0;
|
||||
}
|
||||
print "Unsupported command $ARGV[0]\n";
|
||||
exit 1;
|
||||
|
@ -2,12 +2,12 @@
|
||||
# Check that the length of a given URL is correct
|
||||
if ( $#ARGV != 1 )
|
||||
{
|
||||
print "Usage: $0 string length\n";
|
||||
exit 3;
|
||||
print "Usage: $0 string length\n";
|
||||
exit 3;
|
||||
}
|
||||
if (length(@ARGV[0]) != @ARGV[1])
|
||||
{
|
||||
print "Given host IP and port not supported\n";
|
||||
exit 1;
|
||||
print "Given host IP and port not supported\n";
|
||||
exit 1;
|
||||
}
|
||||
exit 0;
|
||||
|
@ -117,7 +117,7 @@ while(<FILE>) {
|
||||
if($sizeataddr{$addr}>0) {
|
||||
# this means weeeeeirdo
|
||||
print "Mixed debug compile ($source:$linenum at line $lnum), rebuild curl now\n";
|
||||
print "We think $sizeataddr{$addr} bytes are already allocated at that memory address: $addr!\n";
|
||||
print "We think $sizeataddr{$addr} bytes are already allocated at that memory address: $addr!\n";
|
||||
}
|
||||
|
||||
$sizeataddr{$addr}=$size;
|
||||
|
@ -749,7 +749,7 @@ sub verifyftp {
|
||||
unlink($verifylog) if(-f $verifylog);
|
||||
|
||||
if($proto eq "ftps") {
|
||||
$extra .= "--insecure --ftp-ssl-control ";
|
||||
$extra .= "--insecure --ftp-ssl-control ";
|
||||
}
|
||||
elsif($proto eq "smtp") {
|
||||
# SMTP is a bit different since it requires more options and it
|
||||
|
Loading…
Reference in New Issue
Block a user