mirror of
https://github.com/moparisthebest/curl
synced 2024-12-21 23:58:49 -05:00
replaced tabs with spaces
This commit is contained in:
parent
f442dd6496
commit
46b112bcd4
@ -503,7 +503,7 @@ main ()
|
|||||||
|
|
||||||
/* IoctlSocket source code */
|
/* IoctlSocket source code */
|
||||||
if(0 != IoctlSocket(0, 0, 0))
|
if(0 != IoctlSocket(0, 0, 0))
|
||||||
return 1;
|
return 1;
|
||||||
;
|
;
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
@ -6,12 +6,12 @@
|
|||||||
# CARES_FOUND, If false, do not try to use c-ares.
|
# CARES_FOUND, If false, do not try to use c-ares.
|
||||||
# also defined, but not for general use are
|
# also defined, but not for general use are
|
||||||
# CARES_LIBRARY, where to find the c-ares library.
|
# CARES_LIBRARY, where to find the c-ares library.
|
||||||
|
|
||||||
FIND_PATH(CARES_INCLUDE_DIR ares.h
|
FIND_PATH(CARES_INCLUDE_DIR ares.h
|
||||||
/usr/local/include
|
/usr/local/include
|
||||||
/usr/include
|
/usr/include
|
||||||
)
|
)
|
||||||
|
|
||||||
SET(CARES_NAMES ${CARES_NAMES} cares)
|
SET(CARES_NAMES ${CARES_NAMES} cares)
|
||||||
FIND_LIBRARY(CARES_LIBRARY
|
FIND_LIBRARY(CARES_LIBRARY
|
||||||
NAMES ${CARES_NAMES}
|
NAMES ${CARES_NAMES}
|
||||||
|
12
docs/INSTALL
12
docs/INSTALL
@ -672,8 +672,8 @@ eCos
|
|||||||
of running curl in this way is the contents of the configuration file
|
of running curl in this way is the contents of the configuration file
|
||||||
printed to the console.
|
printed to the console.
|
||||||
|
|
||||||
--- src/main.c 19 Jul 2006 19:09:56 -0000 1.363
|
--- src/main.c 19 Jul 2006 19:09:56 -0000 1.363
|
||||||
+++ src/main.c 24 Jul 2006 21:37:23 -0000
|
+++ src/main.c 24 Jul 2006 21:37:23 -0000
|
||||||
@@ -4286,11 +4286,31 @@
|
@@ -4286,11 +4286,31 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -808,10 +808,10 @@ CROSS COMPILE
|
|||||||
export NM=ppc_405-nm
|
export NM=ppc_405-nm
|
||||||
|
|
||||||
./configure --target=powerpc-hardhat-linux \
|
./configure --target=powerpc-hardhat-linux \
|
||||||
--host=powerpc-hardhat-linux \
|
--host=powerpc-hardhat-linux \
|
||||||
--build=i586-pc-linux-gnu \
|
--build=i586-pc-linux-gnu \
|
||||||
--prefix=/opt/hardhat/devkit/ppc/405/target/usr/local \
|
--prefix=/opt/hardhat/devkit/ppc/405/target/usr/local \
|
||||||
--exec-prefix=/usr/local
|
--exec-prefix=/usr/local
|
||||||
|
|
||||||
(end script)
|
(end script)
|
||||||
|
|
||||||
|
@ -46,7 +46,7 @@ SIMPLE USAGE
|
|||||||
Get a file from an SSH server using SCP using a private key to authenticate:
|
Get a file from an SSH server using SCP using a private key to authenticate:
|
||||||
|
|
||||||
curl -u username: --key ~/.ssh/id_dsa --pubkey ~/.ssh/id_dsa.pub \
|
curl -u username: --key ~/.ssh/id_dsa --pubkey ~/.ssh/id_dsa.pub \
|
||||||
scp://shell.example.com/~/personal.txt
|
scp://shell.example.com/~/personal.txt
|
||||||
|
|
||||||
Get the main page from an IPv6 web server:
|
Get the main page from an IPv6 web server:
|
||||||
|
|
||||||
|
@ -136,7 +136,7 @@ int main(int argc, char **argv)
|
|||||||
/* and give the size of the upload, this supports large file sizes
|
/* and give the size of the upload, this supports large file sizes
|
||||||
on systems that have general support for it */
|
on systems that have general support for it */
|
||||||
curl_easy_setopt(curl, CURLOPT_INFILESIZE_LARGE,
|
curl_easy_setopt(curl, CURLOPT_INFILESIZE_LARGE,
|
||||||
(curl_off_t)file_info.st_size);
|
(curl_off_t)file_info.st_size);
|
||||||
|
|
||||||
/* tell libcurl we can use "any" auth, which lets the lib pick one, but it
|
/* tell libcurl we can use "any" auth, which lets the lib pick one, but it
|
||||||
also costs one extra round-trip and possibly sending of all the PUT
|
also costs one extra round-trip and possibly sending of all the PUT
|
||||||
|
@ -34,126 +34,126 @@ int __cdecl _snscanf(const char * input, size_t length, const char * format, ...
|
|||||||
|
|
||||||
/* parse headers for Content-Length */
|
/* parse headers for Content-Length */
|
||||||
size_t getcontentlengthfunc(void *ptr, size_t size, size_t nmemb, void *stream) {
|
size_t getcontentlengthfunc(void *ptr, size_t size, size_t nmemb, void *stream) {
|
||||||
int r;
|
int r;
|
||||||
long len = 0;
|
long len = 0;
|
||||||
|
|
||||||
/* _snscanf() is Win32 specific */
|
/* _snscanf() is Win32 specific */
|
||||||
r = _snscanf(ptr, size * nmemb, "Content-Length: %ld\n", &len);
|
r = _snscanf(ptr, size * nmemb, "Content-Length: %ld\n", &len);
|
||||||
|
|
||||||
if (r) /* Microsoft: we don't read the specs */
|
if (r) /* Microsoft: we don't read the specs */
|
||||||
*((long *) stream) = len;
|
*((long *) stream) = len;
|
||||||
|
|
||||||
return size * nmemb;
|
return size * nmemb;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* discard downloaded data */
|
/* discard downloaded data */
|
||||||
size_t discardfunc(void *ptr, size_t size, size_t nmemb, void *stream) {
|
size_t discardfunc(void *ptr, size_t size, size_t nmemb, void *stream) {
|
||||||
return size * nmemb;
|
return size * nmemb;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* read data to upload */
|
/* read data to upload */
|
||||||
size_t readfunc(void *ptr, size_t size, size_t nmemb, void *stream)
|
size_t readfunc(void *ptr, size_t size, size_t nmemb, void *stream)
|
||||||
{
|
{
|
||||||
FILE *f = stream;
|
FILE *f = stream;
|
||||||
size_t n;
|
size_t n;
|
||||||
|
|
||||||
if (ferror(f))
|
if (ferror(f))
|
||||||
return CURL_READFUNC_ABORT;
|
return CURL_READFUNC_ABORT;
|
||||||
|
|
||||||
n = fread(ptr, size, nmemb, f) * size;
|
n = fread(ptr, size, nmemb, f) * size;
|
||||||
|
|
||||||
return n;
|
return n;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
int upload(CURL *curlhandle, const char * remotepath, const char * localpath,
|
int upload(CURL *curlhandle, const char * remotepath, const char * localpath,
|
||||||
long timeout, long tries)
|
long timeout, long tries)
|
||||||
{
|
{
|
||||||
FILE *f;
|
FILE *f;
|
||||||
long uploaded_len = 0;
|
long uploaded_len = 0;
|
||||||
CURLcode r = CURLE_GOT_NOTHING;
|
CURLcode r = CURLE_GOT_NOTHING;
|
||||||
int c;
|
int c;
|
||||||
|
|
||||||
f = fopen(localpath, "rb");
|
f = fopen(localpath, "rb");
|
||||||
if (f == NULL) {
|
if (f == NULL) {
|
||||||
perror(NULL);
|
perror(NULL);
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
curl_easy_setopt(curlhandle, CURLOPT_UPLOAD, 1L);
|
curl_easy_setopt(curlhandle, CURLOPT_UPLOAD, 1L);
|
||||||
|
|
||||||
curl_easy_setopt(curlhandle, CURLOPT_URL, remotepath);
|
curl_easy_setopt(curlhandle, CURLOPT_URL, remotepath);
|
||||||
|
|
||||||
if (timeout)
|
if (timeout)
|
||||||
curl_easy_setopt(curlhandle, CURLOPT_FTP_RESPONSE_TIMEOUT, timeout);
|
curl_easy_setopt(curlhandle, CURLOPT_FTP_RESPONSE_TIMEOUT, timeout);
|
||||||
|
|
||||||
curl_easy_setopt(curlhandle, CURLOPT_HEADERFUNCTION, getcontentlengthfunc);
|
curl_easy_setopt(curlhandle, CURLOPT_HEADERFUNCTION, getcontentlengthfunc);
|
||||||
curl_easy_setopt(curlhandle, CURLOPT_HEADERDATA, &uploaded_len);
|
curl_easy_setopt(curlhandle, CURLOPT_HEADERDATA, &uploaded_len);
|
||||||
|
|
||||||
curl_easy_setopt(curlhandle, CURLOPT_WRITEFUNCTION, discardfunc);
|
curl_easy_setopt(curlhandle, CURLOPT_WRITEFUNCTION, discardfunc);
|
||||||
|
|
||||||
curl_easy_setopt(curlhandle, CURLOPT_READFUNCTION, readfunc);
|
curl_easy_setopt(curlhandle, CURLOPT_READFUNCTION, readfunc);
|
||||||
curl_easy_setopt(curlhandle, CURLOPT_READDATA, f);
|
curl_easy_setopt(curlhandle, CURLOPT_READDATA, f);
|
||||||
|
|
||||||
curl_easy_setopt(curlhandle, CURLOPT_FTPPORT, "-"); /* disable passive mode */
|
curl_easy_setopt(curlhandle, CURLOPT_FTPPORT, "-"); /* disable passive mode */
|
||||||
curl_easy_setopt(curlhandle, CURLOPT_FTP_CREATE_MISSING_DIRS, 1L);
|
curl_easy_setopt(curlhandle, CURLOPT_FTP_CREATE_MISSING_DIRS, 1L);
|
||||||
|
|
||||||
curl_easy_setopt(curlhandle, CURLOPT_VERBOSE, 1L);
|
curl_easy_setopt(curlhandle, CURLOPT_VERBOSE, 1L);
|
||||||
|
|
||||||
for (c = 0; (r != CURLE_OK) && (c < tries); c++) {
|
for (c = 0; (r != CURLE_OK) && (c < tries); c++) {
|
||||||
/* are we resuming? */
|
/* are we resuming? */
|
||||||
if (c) { /* yes */
|
if (c) { /* yes */
|
||||||
/* determine the length of the file already written */
|
/* determine the length of the file already written */
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* With NOBODY and NOHEADER, libcurl will issue a SIZE
|
* With NOBODY and NOHEADER, libcurl will issue a SIZE
|
||||||
* command, but the only way to retrieve the result is
|
* command, but the only way to retrieve the result is
|
||||||
* to parse the returned Content-Length header. Thus,
|
* to parse the returned Content-Length header. Thus,
|
||||||
* getcontentlengthfunc(). We need discardfunc() above
|
* getcontentlengthfunc(). We need discardfunc() above
|
||||||
* because HEADER will dump the headers to stdout
|
* because HEADER will dump the headers to stdout
|
||||||
* without it.
|
* without it.
|
||||||
*/
|
*/
|
||||||
curl_easy_setopt(curlhandle, CURLOPT_NOBODY, 1L);
|
curl_easy_setopt(curlhandle, CURLOPT_NOBODY, 1L);
|
||||||
curl_easy_setopt(curlhandle, CURLOPT_HEADER, 1L);
|
curl_easy_setopt(curlhandle, CURLOPT_HEADER, 1L);
|
||||||
|
|
||||||
r = curl_easy_perform(curlhandle);
|
r = curl_easy_perform(curlhandle);
|
||||||
if (r != CURLE_OK)
|
if (r != CURLE_OK)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
curl_easy_setopt(curlhandle, CURLOPT_NOBODY, 0L);
|
curl_easy_setopt(curlhandle, CURLOPT_NOBODY, 0L);
|
||||||
curl_easy_setopt(curlhandle, CURLOPT_HEADER, 0L);
|
curl_easy_setopt(curlhandle, CURLOPT_HEADER, 0L);
|
||||||
|
|
||||||
fseek(f, uploaded_len, SEEK_SET);
|
fseek(f, uploaded_len, SEEK_SET);
|
||||||
|
|
||||||
curl_easy_setopt(curlhandle, CURLOPT_APPEND, 1L);
|
curl_easy_setopt(curlhandle, CURLOPT_APPEND, 1L);
|
||||||
}
|
}
|
||||||
else { /* no */
|
else { /* no */
|
||||||
curl_easy_setopt(curlhandle, CURLOPT_APPEND, 0L);
|
curl_easy_setopt(curlhandle, CURLOPT_APPEND, 0L);
|
||||||
}
|
}
|
||||||
|
|
||||||
r = curl_easy_perform(curlhandle);
|
r = curl_easy_perform(curlhandle);
|
||||||
}
|
}
|
||||||
|
|
||||||
fclose(f);
|
fclose(f);
|
||||||
|
|
||||||
if (r == CURLE_OK)
|
if (r == CURLE_OK)
|
||||||
return 1;
|
return 1;
|
||||||
else {
|
else {
|
||||||
fprintf(stderr, "%s\n", curl_easy_strerror(r));
|
fprintf(stderr, "%s\n", curl_easy_strerror(r));
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
int main(int c, char **argv) {
|
int main(int c, char **argv) {
|
||||||
CURL *curlhandle = NULL;
|
CURL *curlhandle = NULL;
|
||||||
|
|
||||||
curl_global_init(CURL_GLOBAL_ALL);
|
curl_global_init(CURL_GLOBAL_ALL);
|
||||||
curlhandle = curl_easy_init();
|
curlhandle = curl_easy_init();
|
||||||
|
|
||||||
upload(curlhandle, "ftp://user:pass@host/path/file", "C:\\file", 0, 3);
|
upload(curlhandle, "ftp://user:pass@host/path/file", "C:\\file", 0, 3);
|
||||||
|
|
||||||
curl_easy_cleanup(curlhandle);
|
curl_easy_cleanup(curlhandle);
|
||||||
curl_global_cleanup();
|
curl_global_cleanup();
|
||||||
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
@ -165,8 +165,8 @@ to a CURL_FORMADD_* constant defined in
|
|||||||
|
|
||||||
/* Add ptrname/ptrcontent section */
|
/* Add ptrname/ptrcontent section */
|
||||||
curl_formadd(&post, &last, CURLFORM_PTRNAME, namebuffer,
|
curl_formadd(&post, &last, CURLFORM_PTRNAME, namebuffer,
|
||||||
CURLFORM_PTRCONTENTS, buffer, CURLFORM_NAMELENGTH,
|
CURLFORM_PTRCONTENTS, buffer, CURLFORM_NAMELENGTH,
|
||||||
namelength, CURLFORM_END);
|
namelength, CURLFORM_END);
|
||||||
|
|
||||||
/* Add name/ptrcontent/contenttype section */
|
/* Add name/ptrcontent/contenttype section */
|
||||||
curl_formadd(&post, &last, CURLFORM_COPYNAME, "html_code_with_hole",
|
curl_formadd(&post, &last, CURLFORM_COPYNAME, "html_code_with_hole",
|
||||||
|
@ -78,30 +78,30 @@ AC_DEFUN([LIBCURL_CHECK_CONFIG],
|
|||||||
AC_PATH_PROG([_libcurl_config],[curl-config],["$withval/bin"],
|
AC_PATH_PROG([_libcurl_config],[curl-config],["$withval/bin"],
|
||||||
["$withval/bin"])
|
["$withval/bin"])
|
||||||
else
|
else
|
||||||
AC_PATH_PROG([_libcurl_config],[curl-config])
|
AC_PATH_PROG([_libcurl_config],[curl-config])
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if test x$_libcurl_config != "x" ; then
|
if test x$_libcurl_config != "x" ; then
|
||||||
AC_CACHE_CHECK([for the version of libcurl],
|
AC_CACHE_CHECK([for the version of libcurl],
|
||||||
[libcurl_cv_lib_curl_version],
|
[libcurl_cv_lib_curl_version],
|
||||||
[libcurl_cv_lib_curl_version=`$_libcurl_config --version | $AWK '{print $[]2}'`])
|
[libcurl_cv_lib_curl_version=`$_libcurl_config --version | $AWK '{print $[]2}'`])
|
||||||
|
|
||||||
_libcurl_version=`echo $libcurl_cv_lib_curl_version | $_libcurl_version_parse`
|
_libcurl_version=`echo $libcurl_cv_lib_curl_version | $_libcurl_version_parse`
|
||||||
_libcurl_wanted=`echo ifelse([$2],,[0],[$2]) | $_libcurl_version_parse`
|
_libcurl_wanted=`echo ifelse([$2],,[0],[$2]) | $_libcurl_version_parse`
|
||||||
|
|
||||||
if test $_libcurl_wanted -gt 0 ; then
|
if test $_libcurl_wanted -gt 0 ; then
|
||||||
AC_CACHE_CHECK([for libcurl >= version $2],
|
AC_CACHE_CHECK([for libcurl >= version $2],
|
||||||
[libcurl_cv_lib_version_ok],
|
[libcurl_cv_lib_version_ok],
|
||||||
[
|
[
|
||||||
if test $_libcurl_version -ge $_libcurl_wanted ; then
|
if test $_libcurl_version -ge $_libcurl_wanted ; then
|
||||||
libcurl_cv_lib_version_ok=yes
|
libcurl_cv_lib_version_ok=yes
|
||||||
else
|
else
|
||||||
libcurl_cv_lib_version_ok=no
|
libcurl_cv_lib_version_ok=no
|
||||||
fi
|
fi
|
||||||
])
|
])
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if test $_libcurl_wanted -eq 0 || test x$libcurl_cv_lib_version_ok = xyes ; then
|
if test $_libcurl_wanted -eq 0 || test x$libcurl_cv_lib_version_ok = xyes ; then
|
||||||
if test x"$LIBCURL_CPPFLAGS" = "x" ; then
|
if test x"$LIBCURL_CPPFLAGS" = "x" ; then
|
||||||
LIBCURL_CPPFLAGS=`$_libcurl_config --cflags`
|
LIBCURL_CPPFLAGS=`$_libcurl_config --cflags`
|
||||||
fi
|
fi
|
||||||
@ -109,8 +109,8 @@ AC_DEFUN([LIBCURL_CHECK_CONFIG],
|
|||||||
LIBCURL=`$_libcurl_config --libs`
|
LIBCURL=`$_libcurl_config --libs`
|
||||||
|
|
||||||
# This is so silly, but Apple actually has a bug in their
|
# This is so silly, but Apple actually has a bug in their
|
||||||
# curl-config script. Fixed in Tiger, but there are still
|
# curl-config script. Fixed in Tiger, but there are still
|
||||||
# lots of Panther installs around.
|
# lots of Panther installs around.
|
||||||
case "${host}" in
|
case "${host}" in
|
||||||
powerpc-apple-darwin7*)
|
powerpc-apple-darwin7*)
|
||||||
LIBCURL=`echo $LIBCURL | sed -e 's|-arch i386||g'`
|
LIBCURL=`echo $LIBCURL | sed -e 's|-arch i386||g'`
|
||||||
@ -118,18 +118,18 @@ AC_DEFUN([LIBCURL_CHECK_CONFIG],
|
|||||||
esac
|
esac
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# All curl-config scripts support --feature
|
# All curl-config scripts support --feature
|
||||||
_libcurl_features=`$_libcurl_config --feature`
|
_libcurl_features=`$_libcurl_config --feature`
|
||||||
|
|
||||||
# Is it modern enough to have --protocols? (7.12.4)
|
# Is it modern enough to have --protocols? (7.12.4)
|
||||||
if test $_libcurl_version -ge 461828 ; then
|
if test $_libcurl_version -ge 461828 ; then
|
||||||
_libcurl_protocols=`$_libcurl_config --protocols`
|
_libcurl_protocols=`$_libcurl_config --protocols`
|
||||||
fi
|
fi
|
||||||
else
|
else
|
||||||
_libcurl_try_link=no
|
_libcurl_try_link=no
|
||||||
fi
|
fi
|
||||||
|
|
||||||
unset _libcurl_wanted
|
unset _libcurl_wanted
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if test $_libcurl_try_link = yes ; then
|
if test $_libcurl_try_link = yes ; then
|
||||||
@ -167,8 +167,8 @@ x=CURLOPT_VERBOSE;
|
|||||||
|
|
||||||
if test $libcurl_cv_lib_curl_usable = yes ; then
|
if test $libcurl_cv_lib_curl_usable = yes ; then
|
||||||
|
|
||||||
# Does curl_free() exist in this version of libcurl?
|
# Does curl_free() exist in this version of libcurl?
|
||||||
# If not, fake it with free()
|
# If not, fake it with free()
|
||||||
|
|
||||||
_libcurl_save_cppflags=$CPPFLAGS
|
_libcurl_save_cppflags=$CPPFLAGS
|
||||||
CPPFLAGS="$CPPFLAGS $LIBCURL_CPPFLAGS"
|
CPPFLAGS="$CPPFLAGS $LIBCURL_CPPFLAGS"
|
||||||
@ -176,8 +176,8 @@ x=CURLOPT_VERBOSE;
|
|||||||
LIBS="$LIBS $LIBCURL"
|
LIBS="$LIBS $LIBCURL"
|
||||||
|
|
||||||
AC_CHECK_FUNC(curl_free,,
|
AC_CHECK_FUNC(curl_free,,
|
||||||
AC_DEFINE(curl_free,free,
|
AC_DEFINE(curl_free,free,
|
||||||
[Define curl_free() as free() if our version of curl lacks curl_free.]))
|
[Define curl_free() as free() if our version of curl lacks curl_free.]))
|
||||||
|
|
||||||
CPPFLAGS=$_libcurl_save_cppflags
|
CPPFLAGS=$_libcurl_save_cppflags
|
||||||
LIBS=$_libcurl_save_libs
|
LIBS=$_libcurl_save_libs
|
||||||
@ -190,40 +190,40 @@ x=CURLOPT_VERBOSE;
|
|||||||
AC_SUBST(LIBCURL)
|
AC_SUBST(LIBCURL)
|
||||||
|
|
||||||
for _libcurl_feature in $_libcurl_features ; do
|
for _libcurl_feature in $_libcurl_features ; do
|
||||||
AC_DEFINE_UNQUOTED(AS_TR_CPP(libcurl_feature_$_libcurl_feature),[1])
|
AC_DEFINE_UNQUOTED(AS_TR_CPP(libcurl_feature_$_libcurl_feature),[1])
|
||||||
eval AS_TR_SH(libcurl_feature_$_libcurl_feature)=yes
|
eval AS_TR_SH(libcurl_feature_$_libcurl_feature)=yes
|
||||||
done
|
done
|
||||||
|
|
||||||
if test "x$_libcurl_protocols" = "x" ; then
|
if test "x$_libcurl_protocols" = "x" ; then
|
||||||
|
|
||||||
# We don't have --protocols, so just assume that all
|
# We don't have --protocols, so just assume that all
|
||||||
# protocols are available
|
# protocols are available
|
||||||
_libcurl_protocols="HTTP FTP FILE TELNET LDAP DICT TFTP"
|
_libcurl_protocols="HTTP FTP FILE TELNET LDAP DICT TFTP"
|
||||||
|
|
||||||
if test x$libcurl_feature_SSL = xyes ; then
|
if test x$libcurl_feature_SSL = xyes ; then
|
||||||
_libcurl_protocols="$_libcurl_protocols HTTPS"
|
_libcurl_protocols="$_libcurl_protocols HTTPS"
|
||||||
|
|
||||||
# FTPS wasn't standards-compliant until version
|
# FTPS wasn't standards-compliant until version
|
||||||
# 7.11.0 (0x070b00 == 461568)
|
# 7.11.0 (0x070b00 == 461568)
|
||||||
if test $_libcurl_version -ge 461568; then
|
if test $_libcurl_version -ge 461568; then
|
||||||
_libcurl_protocols="$_libcurl_protocols FTPS"
|
_libcurl_protocols="$_libcurl_protocols FTPS"
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# RTSP, IMAP, POP3 and SMTP were added in
|
# RTSP, IMAP, POP3 and SMTP were added in
|
||||||
# 7.20.0 (0x071400 == 463872)
|
# 7.20.0 (0x071400 == 463872)
|
||||||
if test $_libcurl_version -ge 463872; then
|
if test $_libcurl_version -ge 463872; then
|
||||||
_libcurl_protocols="$_libcurl_protocols RTSP IMAP POP3 SMTP"
|
_libcurl_protocols="$_libcurl_protocols RTSP IMAP POP3 SMTP"
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
for _libcurl_protocol in $_libcurl_protocols ; do
|
for _libcurl_protocol in $_libcurl_protocols ; do
|
||||||
AC_DEFINE_UNQUOTED(AS_TR_CPP(libcurl_protocol_$_libcurl_protocol),[1])
|
AC_DEFINE_UNQUOTED(AS_TR_CPP(libcurl_protocol_$_libcurl_protocol),[1])
|
||||||
eval AS_TR_SH(libcurl_protocol_$_libcurl_protocol)=yes
|
eval AS_TR_SH(libcurl_protocol_$_libcurl_protocol)=yes
|
||||||
done
|
done
|
||||||
else
|
else
|
||||||
unset LIBCURL
|
unset LIBCURL
|
||||||
unset LIBCURL_CPPFLAGS
|
unset LIBCURL_CPPFLAGS
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
|
|
||||||
Content Encoding Support for libcurl
|
Content Encoding Support for libcurl
|
||||||
|
|
||||||
* About content encodings:
|
* About content encodings:
|
||||||
|
|
||||||
|
@ -110,7 +110,7 @@ options:
|
|||||||
CURLOPT_RTSP_STREAM_URI
|
CURLOPT_RTSP_STREAM_URI
|
||||||
CURLOPT_RTSP_TRANSPORT
|
CURLOPT_RTSP_TRANSPORT
|
||||||
CURLOPT_SOCKS5_GSSAPI_SERVICE
|
CURLOPT_SOCKS5_GSSAPI_SERVICE
|
||||||
CURLOPT_MAIL_FROM
|
CURLOPT_MAIL_FROM
|
||||||
Else it is the same as for curl_easy_setopt().
|
Else it is the same as for curl_easy_setopt().
|
||||||
Note that CURLOPT_ERRORBUFFER is not in the list above, since it gives the
|
Note that CURLOPT_ERRORBUFFER is not in the list above, since it gives the
|
||||||
address of an (empty) character buffer, not the address of a string.
|
address of an (empty) character buffer, not the address of a string.
|
||||||
|
@ -12,7 +12,7 @@ line or use the curl interface.
|
|||||||
|
|
||||||
The latest release of Curl_easy, a Perl interface to curl is available from
|
The latest release of Curl_easy, a Perl interface to curl is available from
|
||||||
|
|
||||||
http://curl.haxx.se/libcurl/perl/
|
http://curl.haxx.se/libcurl/perl/
|
||||||
|
|
||||||
(Georg Horn's original version of Curl_easy, supporting curl versions
|
(Georg Horn's original version of Curl_easy, supporting curl versions
|
||||||
before 7.7 is still available from: http://www.koblenz-net.de/~horn/export/ )
|
before 7.7 is still available from: http://www.koblenz-net.de/~horn/export/ )
|
||||||
|
@ -88,32 +88,32 @@ sub SplitURL {
|
|||||||
my $inurl = $_[0];
|
my $inurl = $_[0];
|
||||||
|
|
||||||
if($inurl=~ /^([^:]+):\/\/([^\/]*)\/(.*)\/(.*)/ ) {
|
if($inurl=~ /^([^:]+):\/\/([^\/]*)\/(.*)\/(.*)/ ) {
|
||||||
$getprotocol = $1;
|
$getprotocol = $1;
|
||||||
$getserver = $2;
|
$getserver = $2;
|
||||||
$getpath = $3;
|
$getpath = $3;
|
||||||
$getdocument = $4;
|
$getdocument = $4;
|
||||||
}
|
}
|
||||||
elsif ($inurl=~ /^([^:]+):\/\/([^\/]*)\/(.*)/ ) {
|
elsif ($inurl=~ /^([^:]+):\/\/([^\/]*)\/(.*)/ ) {
|
||||||
$getprotocol = $1;
|
$getprotocol = $1;
|
||||||
$getserver = $2;
|
$getserver = $2;
|
||||||
$getpath = $3;
|
$getpath = $3;
|
||||||
$getdocument = "";
|
$getdocument = "";
|
||||||
|
|
||||||
if($getpath !~ /\//) {
|
if($getpath !~ /\//) {
|
||||||
$getpath ="";
|
$getpath ="";
|
||||||
$getdocument = $3;
|
$getdocument = $3;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
elsif ($inurl=~ /^([^:]+):\/\/(.*)/ ) {
|
elsif ($inurl=~ /^([^:]+):\/\/(.*)/ ) {
|
||||||
$getprotocol = $1;
|
$getprotocol = $1;
|
||||||
$getserver = $2;
|
$getserver = $2;
|
||||||
$getpath = "";
|
$getpath = "";
|
||||||
$getdocument = "";
|
$getdocument = "";
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
print "Couldn't parse the specified URL, retry please!\n";
|
print "Couldn't parse the specified URL, retry please!\n";
|
||||||
exit;
|
exit;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -127,52 +127,52 @@ sub SplitURL {
|
|||||||
|
|
||||||
if(!$usestdin) {
|
if(!$usestdin) {
|
||||||
open(HEADGET, "$linkcheck $geturl|") ||
|
open(HEADGET, "$linkcheck $geturl|") ||
|
||||||
die "Couldn't get web page for some reason";
|
die "Couldn't get web page for some reason";
|
||||||
headget:
|
headget:
|
||||||
while(<HEADGET>) {
|
while(<HEADGET>) {
|
||||||
# print $_;
|
# print $_;
|
||||||
if($_ =~ /HTTP\/.*3\d\d /) {
|
if($_ =~ /HTTP\/.*3\d\d /) {
|
||||||
$pagemoved=1;
|
$pagemoved=1;
|
||||||
}
|
}
|
||||||
elsif($pagemoved &&
|
elsif($pagemoved &&
|
||||||
($_ =~ /^Location: (.*)/)) {
|
($_ =~ /^Location: (.*)/)) {
|
||||||
$geturl = $1;
|
$geturl = $1;
|
||||||
|
|
||||||
&SplitURL($geturl);
|
&SplitURL($geturl);
|
||||||
|
|
||||||
$pagemoved++;
|
$pagemoved++;
|
||||||
last headget;
|
last headget;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
close(HEADGET);
|
close(HEADGET);
|
||||||
|
|
||||||
if($pagemoved == 1) {
|
if($pagemoved == 1) {
|
||||||
print "Page is moved but we don't know where. Did you forget the ",
|
print "Page is moved but we don't know where. Did you forget the ",
|
||||||
"traling slash?\n";
|
"traling slash?\n";
|
||||||
exit;
|
exit;
|
||||||
}
|
}
|
||||||
|
|
||||||
open(WEBGET, "$htmlget $geturl|") ||
|
open(WEBGET, "$htmlget $geturl|") ||
|
||||||
die "Couldn't get web page for some reason";
|
die "Couldn't get web page for some reason";
|
||||||
|
|
||||||
while(<WEBGET>) {
|
while(<WEBGET>) {
|
||||||
$line = $_;
|
$line = $_;
|
||||||
push @indoc, $line;
|
push @indoc, $line;
|
||||||
$line=~ s/\n//g;
|
$line=~ s/\n//g;
|
||||||
$line=~ s/\r//g;
|
$line=~ s/\r//g;
|
||||||
# print $line."\n";
|
# print $line."\n";
|
||||||
$in=$in.$line;
|
$in=$in.$line;
|
||||||
}
|
}
|
||||||
|
|
||||||
close(WEBGET);
|
close(WEBGET);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
while(<STDIN>) {
|
while(<STDIN>) {
|
||||||
$line = $_;
|
$line = $_;
|
||||||
push @indoc, $line;
|
push @indoc, $line;
|
||||||
$line=~ s/\n//g;
|
$line=~ s/\n//g;
|
||||||
$line=~ s/\r//g;
|
$line=~ s/\r//g;
|
||||||
$in=$in.$line;
|
$in=$in.$line;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -193,21 +193,21 @@ sub LinkWorks {
|
|||||||
|
|
||||||
boo:
|
boo:
|
||||||
if( $doc[0] =~ /^HTTP[^ ]+ (\d+)/ ) {
|
if( $doc[0] =~ /^HTTP[^ ]+ (\d+)/ ) {
|
||||||
$error = $1;
|
$error = $1;
|
||||||
|
|
||||||
if($error < 400 ) {
|
if($error < 400 ) {
|
||||||
return "GOOD";
|
return "GOOD";
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
|
|
||||||
if($head && ($error >= 500)) {
|
if($head && ($error >= 500)) {
|
||||||
# This server doesn't like HEAD!
|
# This server doesn't like HEAD!
|
||||||
@doc = `$linkcheckfull \"$check\"`;
|
@doc = `$linkcheckfull \"$check\"`;
|
||||||
$head = 0;
|
$head = 0;
|
||||||
goto boo;
|
goto boo;
|
||||||
}
|
}
|
||||||
return "BAD";
|
return "BAD";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return "BAD";
|
return "BAD";
|
||||||
}
|
}
|
||||||
@ -219,43 +219,43 @@ sub GetLinks {
|
|||||||
|
|
||||||
getlinkloop:
|
getlinkloop:
|
||||||
while($in =~ /[^<]*(<[^>]+>)/g ) {
|
while($in =~ /[^<]*(<[^>]+>)/g ) {
|
||||||
# we have a tag in $1
|
# we have a tag in $1
|
||||||
$tag = $1;
|
$tag = $1;
|
||||||
|
|
||||||
if($tag =~ /^<!--/) {
|
if($tag =~ /^<!--/) {
|
||||||
# this is a comment tag, ignore it
|
# this is a comment tag, ignore it
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
if($tag =~ /(src|href|background|archive) *= *(\"[^\"]\"|[^ )>]*)/i) {
|
if($tag =~ /(src|href|background|archive) *= *(\"[^\"]\"|[^ )>]*)/i) {
|
||||||
$url=$2;
|
$url=$2;
|
||||||
if($url =~ /^\"(.*)\"$/) {
|
if($url =~ /^\"(.*)\"$/) {
|
||||||
# this was a "string" now $1 has removed the quotes:
|
# this was a "string" now $1 has removed the quotes:
|
||||||
$url=$1;
|
$url=$1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
$url =~ s/([^\#]*)\#.*/$1/g;
|
$url =~ s/([^\#]*)\#.*/$1/g;
|
||||||
|
|
||||||
if($url eq "") {
|
if($url eq "") {
|
||||||
# if the link was nothing than a #-link it may now have
|
# if the link was nothing than a #-link it may now have
|
||||||
# been emptied completely so then we skip the rest
|
# been emptied completely so then we skip the rest
|
||||||
next getlinkloop;
|
next getlinkloop;
|
||||||
}
|
}
|
||||||
|
|
||||||
if($done{$url}) {
|
if($done{$url}) {
|
||||||
# if this url already is done, do next
|
# if this url already is done, do next
|
||||||
$done{$url}++;
|
$done{$url}++;
|
||||||
next getlinkloop;
|
next getlinkloop;
|
||||||
}
|
}
|
||||||
|
|
||||||
$done{$url} = 1; # this is "done"
|
$done{$url} = 1; # this is "done"
|
||||||
|
|
||||||
push @result, $url;
|
push @result, $url;
|
||||||
if($tag =~ /< *([^ ]+)/) {
|
if($tag =~ /< *([^ ]+)/) {
|
||||||
# print "TAG: $1\n";
|
# print "TAG: $1\n";
|
||||||
$tagtype{$url}=$1;
|
$tagtype{$url}=$1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return @result;
|
return @result;
|
||||||
@ -268,36 +268,36 @@ for(@links) {
|
|||||||
$url = $_;
|
$url = $_;
|
||||||
|
|
||||||
if($url =~ /^([^:]+):/) {
|
if($url =~ /^([^:]+):/) {
|
||||||
$prot = $1;
|
$prot = $1;
|
||||||
# if($prot !~ /(http|ftp)/i) {
|
# if($prot !~ /(http|ftp)/i) {
|
||||||
if($prot !~ /http/i) {
|
if($prot !~ /http/i) {
|
||||||
# this is an unsupported protocol, we ignore this
|
# this is an unsupported protocol, we ignore this
|
||||||
next linkloop;
|
next linkloop;
|
||||||
}
|
}
|
||||||
$link = $url;
|
$link = $url;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
if($external) {
|
if($external) {
|
||||||
next linkloop;
|
next linkloop;
|
||||||
}
|
}
|
||||||
|
|
||||||
# this is a link on the save server:
|
# this is a link on the save server:
|
||||||
if($url =~ /^\//) {
|
if($url =~ /^\//) {
|
||||||
# from root
|
# from root
|
||||||
$link = "$getprotocol://$getserver$url";
|
$link = "$getprotocol://$getserver$url";
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
# from the scanned page's dir
|
# from the scanned page's dir
|
||||||
$nyurl=$url;
|
$nyurl=$url;
|
||||||
|
|
||||||
if(length($getpath) &&
|
if(length($getpath) &&
|
||||||
($getpath !~ /\/$/) &&
|
($getpath !~ /\/$/) &&
|
||||||
($nyurl !~ /^\//)) {
|
($nyurl !~ /^\//)) {
|
||||||
# lacks ending slash, add one to the document part:
|
# lacks ending slash, add one to the document part:
|
||||||
$nyurl = "/".$nyurl;
|
$nyurl = "/".$nyurl;
|
||||||
}
|
}
|
||||||
$link = "$getprotocol://$getserver/$getpath$nyurl";
|
$link = "$getprotocol://$getserver/$getpath$nyurl";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#print "test $link\n";
|
#print "test $link\n";
|
||||||
@ -313,16 +313,16 @@ for(@links) {
|
|||||||
|
|
||||||
# If bad and -l, present the line numbers of the usage
|
# If bad and -l, present the line numbers of the usage
|
||||||
if("BAD" eq $success) {
|
if("BAD" eq $success) {
|
||||||
$badlinks++;
|
$badlinks++;
|
||||||
if($linenumber) {
|
if($linenumber) {
|
||||||
$line =1;
|
$line =1;
|
||||||
for(@indoc) {
|
for(@indoc) {
|
||||||
if($_ =~ /$url/) {
|
if($_ =~ /$url/) {
|
||||||
print " line $line\n";
|
print " line $line\n";
|
||||||
}
|
}
|
||||||
$line++;
|
$line++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
@ -330,7 +330,7 @@ for(@links) {
|
|||||||
if($verbose) {
|
if($verbose) {
|
||||||
print "$allcount links were checked";
|
print "$allcount links were checked";
|
||||||
if($badlinks > 0) {
|
if($badlinks > 0) {
|
||||||
print ", $badlinks were found bad";
|
print ", $badlinks were found bad";
|
||||||
}
|
}
|
||||||
print "\n";
|
print "\n";
|
||||||
}
|
}
|
||||||
|
@ -125,32 +125,32 @@ my $badlinks=0;
|
|||||||
sub SplitURL {
|
sub SplitURL {
|
||||||
my $inurl = $_[0];
|
my $inurl = $_[0];
|
||||||
if($inurl=~ /^([^:]+):\/\/([^\/]*)\/(.*)\/(.*)/ ) {
|
if($inurl=~ /^([^:]+):\/\/([^\/]*)\/(.*)\/(.*)/ ) {
|
||||||
$getprotocol = $1;
|
$getprotocol = $1;
|
||||||
$getserver = $2;
|
$getserver = $2;
|
||||||
$getpath = $3;
|
$getpath = $3;
|
||||||
$getdocument = $4;
|
$getdocument = $4;
|
||||||
}
|
}
|
||||||
elsif ($inurl=~ /^([^:]+):\/\/([^\/]*)\/(.*)/ ) {
|
elsif ($inurl=~ /^([^:]+):\/\/([^\/]*)\/(.*)/ ) {
|
||||||
$getprotocol = $1;
|
$getprotocol = $1;
|
||||||
$getserver = $2;
|
$getserver = $2;
|
||||||
$getpath = $3;
|
$getpath = $3;
|
||||||
$getdocument = "";
|
$getdocument = "";
|
||||||
|
|
||||||
if($getpath !~ /\//) {
|
if($getpath !~ /\//) {
|
||||||
$getpath ="";
|
$getpath ="";
|
||||||
$getdocument = $3;
|
$getdocument = $3;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
elsif ($inurl=~ /^([^:]+):\/\/(.*)/ ) {
|
elsif ($inurl=~ /^([^:]+):\/\/(.*)/ ) {
|
||||||
$getprotocol = $1;
|
$getprotocol = $1;
|
||||||
$getserver = $2;
|
$getserver = $2;
|
||||||
$getpath = "";
|
$getpath = "";
|
||||||
$getdocument = "";
|
$getdocument = "";
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
print "Couldn't parse the specified URL, retry please!\n";
|
print "Couldn't parse the specified URL, retry please!\n";
|
||||||
exit;
|
exit;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -164,35 +164,35 @@ sub GetRootPage {
|
|||||||
|
|
||||||
my $pagemoved=0;
|
my $pagemoved=0;
|
||||||
open(HEADGET, "$linkcheck $geturl|") ||
|
open(HEADGET, "$linkcheck $geturl|") ||
|
||||||
die "Couldn't get web page for some reason";
|
die "Couldn't get web page for some reason";
|
||||||
|
|
||||||
while(<HEADGET>) {
|
while(<HEADGET>) {
|
||||||
#print STDERR $_;
|
#print STDERR $_;
|
||||||
if($_ =~ /HTTP\/1\.[01] (\d\d\d) /) {
|
if($_ =~ /HTTP\/1\.[01] (\d\d\d) /) {
|
||||||
$code=$1;
|
$code=$1;
|
||||||
if($code =~ /^3/) {
|
if($code =~ /^3/) {
|
||||||
$pagemoved=1;
|
$pagemoved=1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
elsif($_ =~ /^Content-Type: ([\/a-zA-Z]+)/) {
|
elsif($_ =~ /^Content-Type: ([\/a-zA-Z]+)/) {
|
||||||
$type=$1;
|
$type=$1;
|
||||||
}
|
}
|
||||||
elsif($pagemoved &&
|
elsif($pagemoved &&
|
||||||
($_ =~ /^Location: (.*)/)) {
|
($_ =~ /^Location: (.*)/)) {
|
||||||
$geturl = $1;
|
$geturl = $1;
|
||||||
|
|
||||||
&SplitURL($geturl);
|
&SplitURL($geturl);
|
||||||
|
|
||||||
$pagemoved++;
|
$pagemoved++;
|
||||||
last;
|
last;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
close(HEADGET);
|
close(HEADGET);
|
||||||
|
|
||||||
if($pagemoved == 1) {
|
if($pagemoved == 1) {
|
||||||
print "Page is moved but we don't know where. Did you forget the ",
|
print "Page is moved but we don't know where. Did you forget the ",
|
||||||
"traling slash?\n";
|
"traling slash?\n";
|
||||||
exit;
|
exit;
|
||||||
}
|
}
|
||||||
|
|
||||||
if($type ne "text/html") {
|
if($type ne "text/html") {
|
||||||
@ -229,21 +229,21 @@ sub LinkWorks {
|
|||||||
|
|
||||||
boo:
|
boo:
|
||||||
if( $doc[0] =~ /^HTTP[^ ]+ (\d+)/ ) {
|
if( $doc[0] =~ /^HTTP[^ ]+ (\d+)/ ) {
|
||||||
my $error = $1;
|
my $error = $1;
|
||||||
|
|
||||||
if($error < 400 ) {
|
if($error < 400 ) {
|
||||||
return "GOOD";
|
return "GOOD";
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
|
|
||||||
if($head && ($error >= 500)) {
|
if($head && ($error >= 500)) {
|
||||||
# This server doesn't like HEAD!
|
# This server doesn't like HEAD!
|
||||||
@doc = `$linkcheckfull \"$check\"`;
|
@doc = `$linkcheckfull \"$check\"`;
|
||||||
$head = 0;
|
$head = 0;
|
||||||
goto boo;
|
goto boo;
|
||||||
}
|
}
|
||||||
return "BAD";
|
return "BAD";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return "BAD";
|
return "BAD";
|
||||||
}
|
}
|
||||||
@ -254,45 +254,45 @@ sub GetLinks {
|
|||||||
my @result;
|
my @result;
|
||||||
|
|
||||||
while($in =~ /[^<]*(<[^>]+>)/g ) {
|
while($in =~ /[^<]*(<[^>]+>)/g ) {
|
||||||
# we have a tag in $1
|
# we have a tag in $1
|
||||||
my $tag = $1;
|
my $tag = $1;
|
||||||
|
|
||||||
if($tag =~ /^<!--/) {
|
if($tag =~ /^<!--/) {
|
||||||
# this is a comment tag, ignore it
|
# this is a comment tag, ignore it
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
if($tag =~ /(src|href|background|archive) *= *(\"[^\"]\"|[^ \)>]*)/i) {
|
if($tag =~ /(src|href|background|archive) *= *(\"[^\"]\"|[^ \)>]*)/i) {
|
||||||
my $url=$2;
|
my $url=$2;
|
||||||
if($url =~ /^\"(.*)\"$/) {
|
if($url =~ /^\"(.*)\"$/) {
|
||||||
# this was a "string" now $1 has removed the quotes:
|
# this was a "string" now $1 has removed the quotes:
|
||||||
$url=$1;
|
$url=$1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
$url =~ s/([^\#]*)\#.*/$1/g;
|
$url =~ s/([^\#]*)\#.*/$1/g;
|
||||||
|
|
||||||
if($url eq "") {
|
if($url eq "") {
|
||||||
# if the link was nothing than a #-link it may now have
|
# if the link was nothing than a #-link it may now have
|
||||||
# been emptied completely so then we skip the rest
|
# been emptied completely so then we skip the rest
|
||||||
next;
|
next;
|
||||||
}
|
}
|
||||||
|
|
||||||
if($done{$url}) {
|
if($done{$url}) {
|
||||||
# if this url already is done, do next
|
# if this url already is done, do next
|
||||||
$done{$url}++;
|
$done{$url}++;
|
||||||
if($verbose) {
|
if($verbose) {
|
||||||
print " FOUND $url but that is already checked\n";
|
print " FOUND $url but that is already checked\n";
|
||||||
}
|
}
|
||||||
next;
|
next;
|
||||||
}
|
}
|
||||||
|
|
||||||
$done{$url} = 1; # this is "done"
|
$done{$url} = 1; # this is "done"
|
||||||
|
|
||||||
push @result, $url;
|
push @result, $url;
|
||||||
if($tag =~ /< *([^ ]+)/) {
|
if($tag =~ /< *([^ ]+)/) {
|
||||||
$tagtype{$url}=$1;
|
$tagtype{$url}=$1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return @result;
|
return @result;
|
||||||
@ -437,7 +437,7 @@ while(1) {
|
|||||||
if($verbose) {
|
if($verbose) {
|
||||||
print "$allcount links were checked";
|
print "$allcount links were checked";
|
||||||
if($badlinks > 0) {
|
if($badlinks > 0) {
|
||||||
print ", $badlinks were found bad";
|
print ", $badlinks were found bad";
|
||||||
}
|
}
|
||||||
print "\n";
|
print "\n";
|
||||||
}
|
}
|
||||||
|
@ -74,7 +74,7 @@ while(<STDIN>) {
|
|||||||
while($in =~ /[^<]*(<[^>]+>)/g ) {
|
while($in =~ /[^<]*(<[^>]+>)/g ) {
|
||||||
# we have a tag in $1
|
# we have a tag in $1
|
||||||
$tag = $1;
|
$tag = $1;
|
||||||
|
|
||||||
if($tag =~ /^<!--/) {
|
if($tag =~ /^<!--/) {
|
||||||
# this is a comment tag, ignore it
|
# this is a comment tag, ignore it
|
||||||
}
|
}
|
||||||
@ -99,7 +99,7 @@ while($in =~ /[^<]*(<[^>]+>)/g ) {
|
|||||||
|
|
||||||
if($enctype eq "multipart/form-data") {
|
if($enctype eq "multipart/form-data") {
|
||||||
$enctype="multipart form upload [use -F]"
|
$enctype="multipart form upload [use -F]"
|
||||||
}
|
}
|
||||||
$enctype = "\n--- type: $enctype";
|
$enctype = "\n--- type: $enctype";
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
@ -134,7 +134,7 @@ while($in =~ /[^<]*(<[^>]+>)/g ) {
|
|||||||
$mtag = $1;
|
$mtag = $1;
|
||||||
|
|
||||||
($name, $value)=namevalue($tag);
|
($name, $value)=namevalue($tag);
|
||||||
|
|
||||||
if($mtag =~ /select/i) {
|
if($mtag =~ /select/i) {
|
||||||
print "Select: NAME=\"$name\"\n";
|
print "Select: NAME=\"$name\"\n";
|
||||||
push @vars, "$name";
|
push @vars, "$name";
|
||||||
|
@ -80,32 +80,32 @@ sub SplitURL {
|
|||||||
my $inurl = $_[0];
|
my $inurl = $_[0];
|
||||||
|
|
||||||
if($inurl=~ /^([^:]+):\/\/([^\/]*)\/(.*)\/(.*)/ ) {
|
if($inurl=~ /^([^:]+):\/\/([^\/]*)\/(.*)\/(.*)/ ) {
|
||||||
$getprotocol = $1;
|
$getprotocol = $1;
|
||||||
$getserver = $2;
|
$getserver = $2;
|
||||||
$getpath = $3;
|
$getpath = $3;
|
||||||
$getdocument = $4;
|
$getdocument = $4;
|
||||||
}
|
}
|
||||||
elsif ($inurl=~ /^([^:]+):\/\/([^\/]*)\/(.*)/ ) {
|
elsif ($inurl=~ /^([^:]+):\/\/([^\/]*)\/(.*)/ ) {
|
||||||
$getprotocol = $1;
|
$getprotocol = $1;
|
||||||
$getserver = $2;
|
$getserver = $2;
|
||||||
$getpath = $3;
|
$getpath = $3;
|
||||||
$getdocument = "";
|
$getdocument = "";
|
||||||
|
|
||||||
if($getpath !~ /\//) {
|
if($getpath !~ /\//) {
|
||||||
$getpath ="";
|
$getpath ="";
|
||||||
$getdocument = $3;
|
$getdocument = $3;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
elsif ($inurl=~ /^([^:]+):\/\/(.*)/ ) {
|
elsif ($inurl=~ /^([^:]+):\/\/(.*)/ ) {
|
||||||
$getprotocol = $1;
|
$getprotocol = $1;
|
||||||
$getserver = $2;
|
$getserver = $2;
|
||||||
$getpath = "";
|
$getpath = "";
|
||||||
$getdocument = "";
|
$getdocument = "";
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
print "Couldn't parse the specified URL, retry please!\n";
|
print "Couldn't parse the specified URL, retry please!\n";
|
||||||
exit;
|
exit;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -119,52 +119,52 @@ sub SplitURL {
|
|||||||
|
|
||||||
if(!$usestdin) {
|
if(!$usestdin) {
|
||||||
open(HEADGET, "$linkcheck $geturl|") ||
|
open(HEADGET, "$linkcheck $geturl|") ||
|
||||||
die "Couldn't get web page for some reason";
|
die "Couldn't get web page for some reason";
|
||||||
headget:
|
headget:
|
||||||
while(<HEADGET>) {
|
while(<HEADGET>) {
|
||||||
# print $_;
|
# print $_;
|
||||||
if($_ =~ /HTTP\/.*3\d\d /) {
|
if($_ =~ /HTTP\/.*3\d\d /) {
|
||||||
$pagemoved=1;
|
$pagemoved=1;
|
||||||
}
|
}
|
||||||
elsif($pagemoved &&
|
elsif($pagemoved &&
|
||||||
($_ =~ /^Location: (.*)/)) {
|
($_ =~ /^Location: (.*)/)) {
|
||||||
$geturl = $1;
|
$geturl = $1;
|
||||||
|
|
||||||
&SplitURL($geturl);
|
&SplitURL($geturl);
|
||||||
|
|
||||||
$pagemoved++;
|
$pagemoved++;
|
||||||
last headget;
|
last headget;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
close(HEADGET);
|
close(HEADGET);
|
||||||
|
|
||||||
if($pagemoved == 1) {
|
if($pagemoved == 1) {
|
||||||
print "Page is moved but we don't know where. Did you forget the ",
|
print "Page is moved but we don't know where. Did you forget the ",
|
||||||
"traling slash?\n";
|
"traling slash?\n";
|
||||||
exit;
|
exit;
|
||||||
}
|
}
|
||||||
|
|
||||||
open(WEBGET, "$htmlget $geturl|") ||
|
open(WEBGET, "$htmlget $geturl|") ||
|
||||||
die "Couldn't get web page for some reason";
|
die "Couldn't get web page for some reason";
|
||||||
|
|
||||||
while(<WEBGET>) {
|
while(<WEBGET>) {
|
||||||
$line = $_;
|
$line = $_;
|
||||||
push @indoc, $line;
|
push @indoc, $line;
|
||||||
$line=~ s/\n//g;
|
$line=~ s/\n//g;
|
||||||
$line=~ s/\r//g;
|
$line=~ s/\r//g;
|
||||||
# print $line."\n";
|
# print $line."\n";
|
||||||
$in=$in.$line;
|
$in=$in.$line;
|
||||||
}
|
}
|
||||||
|
|
||||||
close(WEBGET);
|
close(WEBGET);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
while(<STDIN>) {
|
while(<STDIN>) {
|
||||||
$line = $_;
|
$line = $_;
|
||||||
push @indoc, $line;
|
push @indoc, $line;
|
||||||
$line=~ s/\n//g;
|
$line=~ s/\n//g;
|
||||||
$line=~ s/\r//g;
|
$line=~ s/\r//g;
|
||||||
$in=$in.$line;
|
$in=$in.$line;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -174,43 +174,43 @@ sub GetLinks {
|
|||||||
|
|
||||||
getlinkloop:
|
getlinkloop:
|
||||||
while($in =~ /[^<]*(<[^>]+>)/g ) {
|
while($in =~ /[^<]*(<[^>]+>)/g ) {
|
||||||
# we have a tag in $1
|
# we have a tag in $1
|
||||||
$tag = $1;
|
$tag = $1;
|
||||||
|
|
||||||
if($tag =~ /^<!--/) {
|
if($tag =~ /^<!--/) {
|
||||||
# this is a comment tag, ignore it
|
# this is a comment tag, ignore it
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
if($tag =~ /(src|href|background|archive) *= *(\"[^\"]\"|[^ )>]*)/i) {
|
if($tag =~ /(src|href|background|archive) *= *(\"[^\"]\"|[^ )>]*)/i) {
|
||||||
$url=$2;
|
$url=$2;
|
||||||
if($url =~ /^\"(.*)\"$/) {
|
if($url =~ /^\"(.*)\"$/) {
|
||||||
# this was a "string" now $1 has removed the quotes:
|
# this was a "string" now $1 has removed the quotes:
|
||||||
$url=$1;
|
$url=$1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
$url =~ s/([^\#]*)\#.*/$1/g;
|
$url =~ s/([^\#]*)\#.*/$1/g;
|
||||||
|
|
||||||
if($url eq "") {
|
if($url eq "") {
|
||||||
# if the link was nothing than a #-link it may now have
|
# if the link was nothing than a #-link it may now have
|
||||||
# been emptied completely so then we skip the rest
|
# been emptied completely so then we skip the rest
|
||||||
next getlinkloop;
|
next getlinkloop;
|
||||||
}
|
}
|
||||||
|
|
||||||
if($done{$url}) {
|
if($done{$url}) {
|
||||||
# if this url already is done, do next
|
# if this url already is done, do next
|
||||||
$done{$url}++;
|
$done{$url}++;
|
||||||
next getlinkloop;
|
next getlinkloop;
|
||||||
}
|
}
|
||||||
|
|
||||||
$done{$url} = 1; # this is "done"
|
$done{$url} = 1; # this is "done"
|
||||||
|
|
||||||
push @result, $url;
|
push @result, $url;
|
||||||
if($tag =~ /< *([^ ]+)/) {
|
if($tag =~ /< *([^ ]+)/) {
|
||||||
# print "TAG: $1\n";
|
# print "TAG: $1\n";
|
||||||
$tagtype{$url}=$1;
|
$tagtype{$url}=$1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return @result;
|
return @result;
|
||||||
@ -223,38 +223,38 @@ for(@links) {
|
|||||||
$url = $_;
|
$url = $_;
|
||||||
|
|
||||||
if($url =~ /^([^:]+):/) {
|
if($url =~ /^([^:]+):/) {
|
||||||
$link = $url;
|
$link = $url;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
# this is an absolute link on the same server:
|
# this is an absolute link on the same server:
|
||||||
if($url =~ /^\//) {
|
if($url =~ /^\//) {
|
||||||
# from root
|
# from root
|
||||||
$link = "$getprotocol://$getserver$url";
|
$link = "$getprotocol://$getserver$url";
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
# from the scanned page's dir
|
# from the scanned page's dir
|
||||||
$nyurl=$url;
|
$nyurl=$url;
|
||||||
|
|
||||||
if(length($getpath) &&
|
if(length($getpath) &&
|
||||||
($getpath !~ /\/$/) &&
|
($getpath !~ /\/$/) &&
|
||||||
($nyurl !~ /^\//)) {
|
($nyurl !~ /^\//)) {
|
||||||
# lacks ending slash, add one to the document part:
|
# lacks ending slash, add one to the document part:
|
||||||
$nyurl = "/".$nyurl;
|
$nyurl = "/".$nyurl;
|
||||||
}
|
}
|
||||||
$link = "$getprotocol://$getserver/$getpath$nyurl";
|
$link = "$getprotocol://$getserver/$getpath$nyurl";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if($link =~ /$getregex/) {
|
if($link =~ /$getregex/) {
|
||||||
if($display) {
|
if($display) {
|
||||||
print "$link\n";
|
print "$link\n";
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
if($verbose) {
|
if($verbose) {
|
||||||
print "Gets $link\n";
|
print "Gets $link\n";
|
||||||
}
|
}
|
||||||
print `$urlget $link`;
|
print `$urlget $link`;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -51,7 +51,7 @@ for(@all) {
|
|||||||
}
|
}
|
||||||
print "Recursing for dir $dir$name in target $target/$name\n";
|
print "Recursing for dir $dir$name in target $target/$name\n";
|
||||||
|
|
||||||
$nextdepth=$maxdepth-1;
|
$nextdepth=$maxdepth-1;
|
||||||
print `$0 $dir$name/ $target/$name $nextdepth`;
|
print `$0 $dir$name/ $target/$name $nextdepth`;
|
||||||
}
|
}
|
||||||
elsif($firstletter eq "-") {
|
elsif($firstletter eq "-") {
|
||||||
|
@ -20,10 +20,10 @@ if($README eq "") {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
push @out, " _ _ ____ _ \n";
|
push @out, " _ _ ____ _\n";
|
||||||
push @out, " Project ___| | | | _ \\| | \n";
|
push @out, " Project ___| | | | _ \\| |\n";
|
||||||
push @out, " / __| | | | |_) | | \n";
|
push @out, " / __| | | | |_) | |\n";
|
||||||
push @out, " | (__| |_| | _ <| |___ \n";
|
push @out, " | (__| |_| | _ <| |___\n";
|
||||||
push @out, " \\___|\\___/|_| \\_\\_____|\n";
|
push @out, " \\___|\\___/|_| \\_\\_____|\n";
|
||||||
|
|
||||||
my $olen=0;
|
my $olen=0;
|
||||||
@ -35,8 +35,8 @@ while (<STDIN>) {
|
|||||||
|
|
||||||
if($line =~ /^([ \t]*\n|curl)/i) {
|
if($line =~ /^([ \t]*\n|curl)/i) {
|
||||||
# cut off headers and empty lines
|
# cut off headers and empty lines
|
||||||
$wline++; # count number of cut off lines
|
$wline++; # count number of cut off lines
|
||||||
next;
|
next;
|
||||||
}
|
}
|
||||||
|
|
||||||
my $text = $line;
|
my $text = $line;
|
||||||
@ -57,8 +57,8 @@ while (<STDIN>) {
|
|||||||
$olen = $tlen;
|
$olen = $tlen;
|
||||||
|
|
||||||
if($wline) {
|
if($wline) {
|
||||||
# we only make one empty line max
|
# we only make one empty line max
|
||||||
$wline = 0;
|
$wline = 0;
|
||||||
push @out, "\n";
|
push @out, "\n";
|
||||||
}
|
}
|
||||||
push @out, $line;
|
push @out, $line;
|
||||||
@ -174,12 +174,12 @@ void hugehelp(void)
|
|||||||
z.next_out = buf;
|
z.next_out = buf;
|
||||||
status = inflate(&z, Z_SYNC_FLUSH);
|
status = inflate(&z, Z_SYNC_FLUSH);
|
||||||
if (status == Z_OK || status == Z_STREAM_END) {
|
if (status == Z_OK || status == Z_STREAM_END) {
|
||||||
fwrite(buf, BUF_SIZE - z.avail_out, 1, stdout);
|
fwrite(buf, BUF_SIZE - z.avail_out, 1, stdout);
|
||||||
if (status == Z_STREAM_END)
|
if (status == Z_STREAM_END)
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
break; /* Error */
|
break; /* Error */
|
||||||
}
|
}
|
||||||
free(buf);
|
free(buf);
|
||||||
}
|
}
|
||||||
|
@ -214,7 +214,7 @@ sub showdiff {
|
|||||||
my @out = `diff -u $file2 $file1 2>/dev/null`;
|
my @out = `diff -u $file2 $file1 2>/dev/null`;
|
||||||
|
|
||||||
if(!$out[0]) {
|
if(!$out[0]) {
|
||||||
@out = `diff -c $file2 $file1 2>/dev/null`;
|
@out = `diff -c $file2 $file1 2>/dev/null`;
|
||||||
}
|
}
|
||||||
|
|
||||||
return @out;
|
return @out;
|
||||||
|
@ -3,8 +3,8 @@
|
|||||||
# curl --version protocols/features
|
# curl --version protocols/features
|
||||||
if ( $#ARGV != 2 )
|
if ( $#ARGV != 2 )
|
||||||
{
|
{
|
||||||
print "Usage: $0 curl-config-script curl-version-output-file features|protocols\n";
|
print "Usage: $0 curl-config-script curl-version-output-file features|protocols\n";
|
||||||
exit 3;
|
exit 3;
|
||||||
}
|
}
|
||||||
|
|
||||||
my $what=$ARGV[2];
|
my $what=$ARGV[2];
|
||||||
@ -42,8 +42,8 @@ my $curlconfigproto = join ' ', @curl_config;
|
|||||||
|
|
||||||
my $different = $curlproto ne $curlconfigproto;
|
my $different = $curlproto ne $curlconfigproto;
|
||||||
if ($different) {
|
if ($different) {
|
||||||
print "Mismatch in $what lists:\n";
|
print "Mismatch in $what lists:\n";
|
||||||
print "curl: $curlproto\n";
|
print "curl: $curlproto\n";
|
||||||
print "curl-config: $curlconfigproto\n";
|
print "curl-config: $curlconfigproto\n";
|
||||||
}
|
}
|
||||||
exit $different;
|
exit $different;
|
||||||
|
@ -2,8 +2,8 @@
|
|||||||
# Determine if curl-config --version matches the curl --version
|
# Determine if curl-config --version matches the curl --version
|
||||||
if ( $#ARGV != 2 )
|
if ( $#ARGV != 2 )
|
||||||
{
|
{
|
||||||
print "Usage: $0 curl-config-script curl-version-output-file version|vernum\n";
|
print "Usage: $0 curl-config-script curl-version-output-file version|vernum\n";
|
||||||
exit 3;
|
exit 3;
|
||||||
}
|
}
|
||||||
|
|
||||||
my $what=$ARGV[2];
|
my $what=$ARGV[2];
|
||||||
@ -23,23 +23,23 @@ open(CURLCONFIG, "sh $ARGV[0] --$what|") || die "Can't get curl-config --$what l
|
|||||||
$_ = <CURLCONFIG>;
|
$_ = <CURLCONFIG>;
|
||||||
chomp;
|
chomp;
|
||||||
if ( $what eq "version" ) {
|
if ( $what eq "version" ) {
|
||||||
/^libcurl ([\.\d]+(-CVS)?)$/ ;
|
/^libcurl ([\.\d]+(-CVS)?)$/ ;
|
||||||
$curlconfigversion = $1;
|
$curlconfigversion = $1;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
# Convert hex version to decimal for comparison's sake
|
# Convert hex version to decimal for comparison's sake
|
||||||
/^(..)(..)(..)$/ ;
|
/^(..)(..)(..)$/ ;
|
||||||
$curlconfigversion = hex($1) . "." . hex($2) . "." . hex($3);
|
$curlconfigversion = hex($1) . "." . hex($2) . "." . hex($3);
|
||||||
|
|
||||||
# Strip off the -CVS from the curl version if it's there
|
# Strip off the -CVS from the curl version if it's there
|
||||||
$version =~ s/-CVS$//;
|
$version =~ s/-CVS$//;
|
||||||
}
|
}
|
||||||
close CURLCONFIG;
|
close CURLCONFIG;
|
||||||
|
|
||||||
my $different = $version ne $curlconfigversion;
|
my $different = $version ne $curlconfigversion;
|
||||||
if ($different || !$version) {
|
if ($different || !$version) {
|
||||||
print "Mismatch in --version:\n";
|
print "Mismatch in --version:\n";
|
||||||
print "curl: $version\n";
|
print "curl: $version\n";
|
||||||
print "curl-config: $curlconfigversion\n";
|
print "curl-config: $curlconfigversion\n";
|
||||||
exit 1;
|
exit 1;
|
||||||
}
|
}
|
||||||
|
@ -2,13 +2,13 @@
|
|||||||
# Determine if the given curl executable supports the 'openssl' SSL engine
|
# Determine if the given curl executable supports the 'openssl' SSL engine
|
||||||
if ( $#ARGV != 0 )
|
if ( $#ARGV != 0 )
|
||||||
{
|
{
|
||||||
print "Usage: $0 curl-executable\n";
|
print "Usage: $0 curl-executable\n";
|
||||||
exit 3;
|
exit 3;
|
||||||
}
|
}
|
||||||
if (!open(CURL, "@ARGV[0] -s --engine list|"))
|
if (!open(CURL, "@ARGV[0] -s --engine list|"))
|
||||||
{
|
{
|
||||||
print "Can't get SSL engine list\n";
|
print "Can't get SSL engine list\n";
|
||||||
exit 2;
|
exit 2;
|
||||||
}
|
}
|
||||||
while( <CURL> )
|
while( <CURL> )
|
||||||
{
|
{
|
||||||
|
@ -2,32 +2,32 @@
|
|||||||
# Perform simple file and directory manipulation in a portable way
|
# Perform simple file and directory manipulation in a portable way
|
||||||
if ( $#ARGV <= 0 )
|
if ( $#ARGV <= 0 )
|
||||||
{
|
{
|
||||||
print "Usage: $0 mkdir|rmdir|rm|move|gone path1 [path2] [more commands...]\n";
|
print "Usage: $0 mkdir|rmdir|rm|move|gone path1 [path2] [more commands...]\n";
|
||||||
exit 1;
|
exit 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
use File::Copy;
|
use File::Copy;
|
||||||
while(@ARGV) {
|
while(@ARGV) {
|
||||||
my $cmd = shift @ARGV;
|
my $cmd = shift @ARGV;
|
||||||
my $arg = shift @ARGV;
|
my $arg = shift @ARGV;
|
||||||
if ($cmd eq "mkdir") {
|
if ($cmd eq "mkdir") {
|
||||||
mkdir $arg || die "$!";
|
mkdir $arg || die "$!";
|
||||||
}
|
}
|
||||||
elsif ($cmd eq "rmdir") {
|
elsif ($cmd eq "rmdir") {
|
||||||
rmdir $arg || die "$!";
|
rmdir $arg || die "$!";
|
||||||
}
|
}
|
||||||
elsif ($cmd eq "rm") {
|
elsif ($cmd eq "rm") {
|
||||||
unlink $arg || die "$!";
|
unlink $arg || die "$!";
|
||||||
}
|
}
|
||||||
elsif ($cmd eq "move") {
|
elsif ($cmd eq "move") {
|
||||||
my $arg2 = shift @ARGV;
|
my $arg2 = shift @ARGV;
|
||||||
move($arg,$arg2) || die "$!";
|
move($arg,$arg2) || die "$!";
|
||||||
}
|
}
|
||||||
elsif ($cmd eq "gone") {
|
elsif ($cmd eq "gone") {
|
||||||
! -e $arg || die "Path $arg exists";
|
! -e $arg || die "Path $arg exists";
|
||||||
} else {
|
} else {
|
||||||
print "Unsupported command $cmd\n";
|
print "Unsupported command $cmd\n";
|
||||||
exit 1;
|
exit 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
exit 0;
|
exit 0;
|
||||||
|
@ -4,102 +4,102 @@ use Time::Local;
|
|||||||
|
|
||||||
if ( $#ARGV < 1 )
|
if ( $#ARGV < 1 )
|
||||||
{
|
{
|
||||||
print "Usage: $0 prepare|postprocess dir [logfile]\n";
|
print "Usage: $0 prepare|postprocess dir [logfile]\n";
|
||||||
exit 1;
|
exit 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
# <precheck> expects an error message on stdout
|
# <precheck> expects an error message on stdout
|
||||||
sub errout {
|
sub errout {
|
||||||
print $_[0] . "\n";
|
print $_[0] . "\n";
|
||||||
exit 1;
|
exit 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
if ($ARGV[0] eq "prepare")
|
if ($ARGV[0] eq "prepare")
|
||||||
{
|
{
|
||||||
my $dirname = $ARGV[1];
|
my $dirname = $ARGV[1];
|
||||||
mkdir $dirname || errout "$!";
|
mkdir $dirname || errout "$!";
|
||||||
chdir $dirname;
|
chdir $dirname;
|
||||||
|
|
||||||
# Create the files in alphabetical order, to increase the chances
|
# Create the files in alphabetical order, to increase the chances
|
||||||
# of receiving a consistent set of directory contents regardless
|
# of receiving a consistent set of directory contents regardless
|
||||||
# of whether the server alphabetizes the results or not.
|
# of whether the server alphabetizes the results or not.
|
||||||
mkdir "asubdir" || errout "$!";
|
mkdir "asubdir" || errout "$!";
|
||||||
chmod 0777, "asubdir";
|
chmod 0777, "asubdir";
|
||||||
|
|
||||||
open(FILE, ">plainfile.txt") || errout "$!";
|
open(FILE, ">plainfile.txt") || errout "$!";
|
||||||
binmode FILE;
|
binmode FILE;
|
||||||
print FILE "Test file to support curl test suite\n";
|
print FILE "Test file to support curl test suite\n";
|
||||||
close(FILE);
|
close(FILE);
|
||||||
utime time, timegm(0,0,12,1,0,100), "plainfile.txt";
|
utime time, timegm(0,0,12,1,0,100), "plainfile.txt";
|
||||||
chmod 0666, "plainfile.txt";
|
chmod 0666, "plainfile.txt";
|
||||||
|
|
||||||
open(FILE, ">rofile.txt") || errout "$!";
|
open(FILE, ">rofile.txt") || errout "$!";
|
||||||
binmode FILE;
|
binmode FILE;
|
||||||
print FILE "Read-only test file to support curl test suite\n";
|
print FILE "Read-only test file to support curl test suite\n";
|
||||||
close(FILE);
|
close(FILE);
|
||||||
utime time, timegm(0,0,12,31,11,100), "rofile.txt";
|
utime time, timegm(0,0,12,31,11,100), "rofile.txt";
|
||||||
chmod 0444, "rofile.txt";
|
chmod 0444, "rofile.txt";
|
||||||
|
|
||||||
exit 0;
|
exit 0;
|
||||||
}
|
}
|
||||||
elsif ($ARGV[0] eq "postprocess")
|
elsif ($ARGV[0] eq "postprocess")
|
||||||
{
|
{
|
||||||
my $dirname = $ARGV[1];
|
my $dirname = $ARGV[1];
|
||||||
my $logfile = $ARGV[2];
|
my $logfile = $ARGV[2];
|
||||||
|
|
||||||
# Clean up the test directory
|
# Clean up the test directory
|
||||||
unlink "$dirname/rofile.txt";
|
unlink "$dirname/rofile.txt";
|
||||||
unlink "$dirname/plainfile.txt";
|
unlink "$dirname/plainfile.txt";
|
||||||
rmdir "$dirname/asubdir";
|
rmdir "$dirname/asubdir";
|
||||||
|
|
||||||
rmdir $dirname || die "$!";
|
rmdir $dirname || die "$!";
|
||||||
|
|
||||||
if ($logfile) {
|
if ($logfile) {
|
||||||
# Process the directory file to remove all information that
|
# Process the directory file to remove all information that
|
||||||
# could be inconsistent from one test run to the next (e.g.
|
# could be inconsistent from one test run to the next (e.g.
|
||||||
# file date) or may be unsupported on some platforms (e.g.
|
# file date) or may be unsupported on some platforms (e.g.
|
||||||
# Windows). Also, since 7.17.0, the sftp directory listing
|
# Windows). Also, since 7.17.0, the sftp directory listing
|
||||||
# format can be dependent on the server (with a recent
|
# format can be dependent on the server (with a recent
|
||||||
# enough version of libssh2) so this script must also
|
# enough version of libssh2) so this script must also
|
||||||
# canonicalize the format. Here are examples of the general
|
# canonicalize the format. Here are examples of the general
|
||||||
# format supported:
|
# format supported:
|
||||||
# -r--r--r-- 12 ausername grp 47 Dec 31 2000 rofile.txt
|
# -r--r--r-- 12 ausername grp 47 Dec 31 2000 rofile.txt
|
||||||
# -r--r--r-- 1 1234 4321 47 Dec 31 2000 rofile.txt
|
# -r--r--r-- 1 1234 4321 47 Dec 31 2000 rofile.txt
|
||||||
# The "canonical" format is similar to the first (which is
|
# The "canonical" format is similar to the first (which is
|
||||||
# the one generated on a typical Linux installation):
|
# the one generated on a typical Linux installation):
|
||||||
# -r-?r-?r-? 12 U U 47 Dec 31 2000 rofile.txt
|
# -r-?r-?r-? 12 U U 47 Dec 31 2000 rofile.txt
|
||||||
|
|
||||||
my @canondir;
|
my @canondir;
|
||||||
open(IN, "<$logfile") || die "$!";
|
open(IN, "<$logfile") || die "$!";
|
||||||
while (<IN>) {
|
while (<IN>) {
|
||||||
/^(.)(..).(..).(..).\s*(\S+)\s+\S+\s+\S+\s+(\S+)\s+(\S+\s+\S+\s+\S+)(.*)$/;
|
/^(.)(..).(..).(..).\s*(\S+)\s+\S+\s+\S+\s+(\S+)\s+(\S+\s+\S+\s+\S+)(.*)$/;
|
||||||
if ($1 eq "d") {
|
if ($1 eq "d") {
|
||||||
# Erase all directory metadata except for the name, as it is not
|
# Erase all directory metadata except for the name, as it is not
|
||||||
# consistent for across all test systems and filesystems
|
# consistent for across all test systems and filesystems
|
||||||
push @canondir, "d????????? N U U N ??? N NN:NN$8\n";
|
push @canondir, "d????????? N U U N ??? N NN:NN$8\n";
|
||||||
} elsif ($1 eq "-") {
|
} elsif ($1 eq "-") {
|
||||||
# Erase user and group names, as they are not consistent across
|
# Erase user and group names, as they are not consistent across
|
||||||
# all test systems
|
# all test systems
|
||||||
my $line = sprintf("%s%s?%s?%s?%5d U U %15d %s%s\n", $1,$2,$3,$4,$5,$6,$7,$8);
|
my $line = sprintf("%s%s?%s?%s?%5d U U %15d %s%s\n", $1,$2,$3,$4,$5,$6,$7,$8);
|
||||||
push @canondir, $line;
|
push @canondir, $line;
|
||||||
} else {
|
} else {
|
||||||
# Unexpected format; just pass it through and let the test fail
|
# Unexpected format; just pass it through and let the test fail
|
||||||
push @canondir, $_;
|
push @canondir, $_;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
close(IN);
|
close(IN);
|
||||||
|
|
||||||
@canondir = sort {substr($a,57) cmp substr($b,57)} @canondir;
|
@canondir = sort {substr($a,57) cmp substr($b,57)} @canondir;
|
||||||
my $newfile = $logfile . ".new";
|
my $newfile = $logfile . ".new";
|
||||||
open(OUT, ">$newfile") || die "$!";
|
open(OUT, ">$newfile") || die "$!";
|
||||||
print OUT join('', @canondir);
|
print OUT join('', @canondir);
|
||||||
close(OUT);
|
close(OUT);
|
||||||
|
|
||||||
unlink $logfile;
|
unlink $logfile;
|
||||||
rename $newfile, $logfile;
|
rename $newfile, $logfile;
|
||||||
}
|
}
|
||||||
|
|
||||||
exit 0;
|
exit 0;
|
||||||
}
|
}
|
||||||
print "Unsupported command $ARGV[0]\n";
|
print "Unsupported command $ARGV[0]\n";
|
||||||
exit 1;
|
exit 1;
|
||||||
|
@ -2,12 +2,12 @@
|
|||||||
# Check that the length of a given URL is correct
|
# Check that the length of a given URL is correct
|
||||||
if ( $#ARGV != 1 )
|
if ( $#ARGV != 1 )
|
||||||
{
|
{
|
||||||
print "Usage: $0 string length\n";
|
print "Usage: $0 string length\n";
|
||||||
exit 3;
|
exit 3;
|
||||||
}
|
}
|
||||||
if (length(@ARGV[0]) != @ARGV[1])
|
if (length(@ARGV[0]) != @ARGV[1])
|
||||||
{
|
{
|
||||||
print "Given host IP and port not supported\n";
|
print "Given host IP and port not supported\n";
|
||||||
exit 1;
|
exit 1;
|
||||||
}
|
}
|
||||||
exit 0;
|
exit 0;
|
||||||
|
@ -117,7 +117,7 @@ while(<FILE>) {
|
|||||||
if($sizeataddr{$addr}>0) {
|
if($sizeataddr{$addr}>0) {
|
||||||
# this means weeeeeirdo
|
# this means weeeeeirdo
|
||||||
print "Mixed debug compile ($source:$linenum at line $lnum), rebuild curl now\n";
|
print "Mixed debug compile ($source:$linenum at line $lnum), rebuild curl now\n";
|
||||||
print "We think $sizeataddr{$addr} bytes are already allocated at that memory address: $addr!\n";
|
print "We think $sizeataddr{$addr} bytes are already allocated at that memory address: $addr!\n";
|
||||||
}
|
}
|
||||||
|
|
||||||
$sizeataddr{$addr}=$size;
|
$sizeataddr{$addr}=$size;
|
||||||
|
@ -749,7 +749,7 @@ sub verifyftp {
|
|||||||
unlink($verifylog) if(-f $verifylog);
|
unlink($verifylog) if(-f $verifylog);
|
||||||
|
|
||||||
if($proto eq "ftps") {
|
if($proto eq "ftps") {
|
||||||
$extra .= "--insecure --ftp-ssl-control ";
|
$extra .= "--insecure --ftp-ssl-control ";
|
||||||
}
|
}
|
||||||
elsif($proto eq "smtp") {
|
elsif($proto eq "smtp") {
|
||||||
# SMTP is a bit different since it requires more options and it
|
# SMTP is a bit different since it requires more options and it
|
||||||
|
Loading…
Reference in New Issue
Block a user