1
0
mirror of https://github.com/moparisthebest/curl synced 2024-12-21 23:58:49 -05:00

Peteris Krumins added CURLOPT_COOKIELIST and CURLINFO_COOKIELIST, which is a

simple interface to extracting and setting cookies in libcurl's internal
"cookie jar". See the new cookie_interface.c example code.
This commit is contained in:
Daniel Stenberg 2005-07-27 22:17:14 +00:00
parent 463c0f7096
commit 2236ba0d20
12 changed files with 252 additions and 23 deletions

View File

@ -7,6 +7,15 @@
Changelog Changelog
Daniel (27 July 2005)
- Dan Fandrich changes over the last week: fixed numerous minor configure
option parsing flaws: --without-gnutls, --without-spnego --without-gssapi
and --without-krb4. Spellfixed several error messages.
- Peteris Krumins added CURLOPT_COOKIELIST and CURLINFO_COOKIELIST, which is a
simple interface to extracting and setting cookies in libcurl's internal
"cookie jar". See the new cookie_interface.c example code.
Daniel (13 July 2005) Daniel (13 July 2005)
- Diego Casorran provided patches to make curl build fine on Amiga again. - Diego Casorran provided patches to make curl build fine on Amiga again.

View File

@ -11,6 +11,7 @@ Curl and libcurl 7.14.1
This release includes the following changes: This release includes the following changes:
o CURLOPT_COOKIELIST and CURLINFO_COOKIELIST
o trailer support for chunked encoded data streams o trailer support for chunked encoded data streams
o -x/CURL_PROXY strings may now contain user+password o -x/CURL_PROXY strings may now contain user+password
o --trace-time now outputs the full microsecond, all 6 digits o --trace-time now outputs the full microsecond, all 6 digits
@ -46,6 +47,6 @@ advice from friends like these:
John McGowan, Georg Wicherski, Andres Garcia, Eric Cooper, Todd Kulesza, John McGowan, Georg Wicherski, Andres Garcia, Eric Cooper, Todd Kulesza,
Tupone Alfredo, Gisle Vanem, David Shaw, Andrew Bushnell, Dan Fandrich, Tupone Alfredo, Gisle Vanem, David Shaw, Andrew Bushnell, Dan Fandrich,
Adrian Schuur, Diego Casorran Adrian Schuur, Diego Casorran, Peteris Krumins
Thanks! (and sorry if I forgot to mention someone) Thanks! (and sorry if I forgot to mention someone)

View File

@ -10,7 +10,8 @@ EXTRA_DIST = README curlgtk.c sepheaders.c simple.c postit2.c \
post-callback.c multi-app.c multi-double.c multi-single.c \ post-callback.c multi-app.c multi-double.c multi-single.c \
multi-post.c fopen.c simplepost.c makefile.dj curlx.c https.c \ multi-post.c fopen.c simplepost.c makefile.dj curlx.c https.c \
multi-debugcallback.c fileupload.c getinfo.c ftp3rdparty.c debug.c \ multi-debugcallback.c fileupload.c getinfo.c ftp3rdparty.c debug.c \
anyauthput.c htmltitle.cc htmltidy.c opensslthreadlock.c anyauthput.c htmltitle.cc htmltidy.c opensslthreadlock.c \
cookie_interface.c
all: all:
@echo "done" @echo "done"

View File

@ -0,0 +1,110 @@
/*****************************************************************************
* _ _ ____ _
* Project ___| | | | _ \| |
* / __| | | | |_) | |
* | (__| |_| | _ <| |___
* \___|\___/|_| \_\_____|
*
* This example shows usage of simple cookie interface.
*/
#include <stdio.h>
#include <string.h>
#include <errno.h>
#include <time.h>
#include <curl/curl.h>
static void
print_cookies(CURL *curl)
{
CURLcode res;
struct curl_slist *cookies;
struct curl_slist *nc;
int i;
printf("Cookies, curl knows:\n");
res = curl_easy_getinfo(curl, CURLINFO_COOKIELIST, &cookies);
if (res != CURLE_OK) {
fprintf(stderr, "Curl curl_easy_getinfo failed: %s\n", curl_easy_strerror(res));
exit(1);
}
nc = cookies, i = 1;
while (nc) {
printf("[%d]: %s\n", i, nc->data);
nc = nc->next;
i++;
}
if (i == 1) {
printf("(none)\n");
}
curl_slist_free_all(cookies);
}
int
main(void)
{
CURL *curl;
CURLcode res;
curl_global_init(CURL_GLOBAL_ALL);
curl = curl_easy_init();
if (curl) {
char nline[256];
curl_easy_setopt(curl, CURLOPT_URL, "http://www.google.com/"); /* google.com sets "PREF" cookie */
curl_easy_setopt(curl, CURLOPT_VERBOSE, 1);
curl_easy_setopt(curl, CURLOPT_COOKIEFILE, ""); /* just to start the cookie engine */
res = curl_easy_perform(curl);
if (res != CURLE_OK) {
fprintf(stderr, "Curl perform failed: %s\n", curl_easy_strerror(res));
return 1;
}
print_cookies(curl);
printf("Erasing curl's knowledge of cookies!\n");
curl_easy_setopt(curl, CURLOPT_COOKIELIST, NULL);
print_cookies(curl);
printf("-----------------------------------------------\n"
"Setting a cookie \"PREF\" via cookie interface:\n");
#ifdef WIN32
#define snprintf _snprintf
#endif
/* Netscape format cookie */
snprintf(nline, 256, "%s\t%s\t%s\t%s\t%u\t%s\t%s",
".google.com", "TRUE", "/", "FALSE", time(NULL) + 31337, "PREF", "hello google, i like you very much!");
res = curl_easy_setopt(curl, CURLOPT_COOKIELIST, nline);
if (res != CURLE_OK) {
fprintf(stderr, "Curl curl_easy_setopt failed: %s\n", curl_easy_strerror(res));
return 1;
}
/* HTTP-header style cookie */
snprintf(nline, 256,
"Set-Cookie: OLD_PREF=3d141414bf4209321; "
"expires=Sun, 17-Jan-2038 19:14:07 GMT; path=/; domain=.google.com");
res = curl_easy_setopt(curl, CURLOPT_COOKIELIST, nline);
if (res != CURLE_OK) {
fprintf(stderr, "Curl curl_easy_setopt failed: %s\n", curl_easy_strerror(res));
return 1;
}
print_cookies(curl);
res = curl_easy_perform(curl);
if (res != CURLE_OK) {
fprintf(stderr, "Curl perform failed: %s\n", curl_easy_strerror(res));
return 1;
}
}
else {
fprintf(stderr, "Curl init failed!\n");
return 1;
}
curl_global_cleanup();
return 0;
}

View File

@ -20,7 +20,8 @@ PROGRAMS = fopen.exe ftpget.exe ftpgetresp.exe ftpupload.exe \
multi-double.exe multi-post.exe multi-single.exe \ multi-double.exe multi-post.exe multi-single.exe \
persistant.exe post-callback.exe postit2.exe \ persistant.exe post-callback.exe postit2.exe \
sepheaders.exe simple.exe simplessl.exe https.exe \ sepheaders.exe simple.exe simplessl.exe https.exe \
ftp3rdparty.exe getinfo.exe anyauthput.exe ftp3rdparty.exe getinfo.exe anyauthput.exe \
cookie_interface.exe
all: $(PROGRAMS) all: $(PROGRAMS)

View File

@ -134,6 +134,13 @@ counted). Combined with \fICURLINFO_REDIRECT_COUNT\fP you are able to know
how many times libcurl successfully reused existing connection(s) or not. See how many times libcurl successfully reused existing connection(s) or not. See
the Connection Options of \fIcurl_easy_setopt(3)\fP to see how libcurl tries the Connection Options of \fIcurl_easy_setopt(3)\fP to see how libcurl tries
to make persistent connections to save time. (Added in 7.12.3) to make persistent connections to save time. (Added in 7.12.3)
.IP CURLINFO_COOKIELIST
Pass a pointer to a 'struct curl_slist *' to receive a linked-list of all
cookies cURL knows (expired ones, too). Don't forget to
\fIcurl_slist_free_all(3)\fP the list after it has been used.
If there are no cookies (cookies for the handle have not been enabled or
simply none have been received) 'struct curl_slist *' will be set to
point to NULL.
.SH RETURN VALUE .SH RETURN VALUE
If the operation was successful, CURLE_OK is returned. Otherwise an If the operation was successful, CURLE_OK is returned. Otherwise an
appropriate error code will be returned. appropriate error code will be returned.

View File

@ -654,6 +654,12 @@ cookies" from the previous session. By default, libcurl always stores and
loads all cookies, independent if they are session cookies are not. Session loads all cookies, independent if they are session cookies are not. Session
cookies are cookies without expiry date and they are meant to be alive and cookies are cookies without expiry date and they are meant to be alive and
existing for this "session" only. existing for this "session" only.
.IP CURLOPT_COOKIELIST
Pass a char * to a cookie string. Cookie can be either in Netscape / Mozilla
format or just regular HTTP-style header (Set-Cookie: ...) format. The passed
string will get modified so make sure it's writable. If cURL cookie engine
was not enabled it will enable its cookie engine. Passing a magic string
\&"ALL" will erase all cookies known by cURL.
.IP CURLOPT_HTTPGET .IP CURLOPT_HTTPGET
Pass a long. If the long is non-zero, this forces the HTTP request to get back Pass a long. If the long is non-zero, this forces the HTTP request to get back
to GET. usable if a POST, HEAD, PUT or a custom request have been used to GET. usable if a POST, HEAD, PUT or a custom request have been used

View File

@ -890,6 +890,9 @@ typedef enum {
"account" info */ "account" info */
CINIT(FTP_ACCOUNT, OBJECTPOINT, 134), CINIT(FTP_ACCOUNT, OBJECTPOINT, 134),
/* feed cookies into cookie engine */
CINIT(COOKIELIST, OBJECTPOINT, 135),
CURLOPT_LASTENTRY /* the last unused */ CURLOPT_LASTENTRY /* the last unused */
} CURLoption; } CURLoption;
@ -1244,6 +1247,7 @@ typedef enum {
CURLINFO_OS_ERRNO = CURLINFO_LONG + 25, CURLINFO_OS_ERRNO = CURLINFO_LONG + 25,
CURLINFO_NUM_CONNECTS = CURLINFO_LONG + 26, CURLINFO_NUM_CONNECTS = CURLINFO_LONG + 26,
CURLINFO_SSL_ENGINES = CURLINFO_SLIST + 27, CURLINFO_SSL_ENGINES = CURLINFO_SLIST + 27,
CURLINFO_COOKIELIST = CURLINFO_SLIST + 28,
/* Fill in new entries below here! */ /* Fill in new entries below here! */
CURLINFO_LASTONE = 28 CURLINFO_LASTONE = 28

View File

@ -85,6 +85,9 @@ Example set of cookies:
#include <stdlib.h> #include <stdlib.h>
#include <string.h> #include <string.h>
#define _MPRINTF_REPLACE /* without this on windows OS we get undefined reference to snprintf */
#include <curl/mprintf.h>
#include "urldata.h" #include "urldata.h"
#include "cookie.h" #include "cookie.h"
#include "strequal.h" #include "strequal.h"
@ -816,6 +819,34 @@ void Curl_cookie_cleanup(struct CookieInfo *c)
} }
} }
/* get_netscape_format()
*
* Formats a string for Netscape output file, w/o a newline at the end.
*
* Function returns a char * to a formatted line. Has to be free()d
*/
static char *get_netscape_format(const struct Cookie *co)
{
return aprintf(
"%s%s\t" /* domain */
"%s\t" /* tailmatch */
"%s\t" /* path */
"%s\t" /* secure */
"%u\t" /* expires */
"%s\t" /* name */
"%s", /* value */
/* Make sure all domains are prefixed with a dot if they allow
tailmatching. This is Mozilla-style. */
(co->tailmatch && co->domain && co->domain[0] != '.')? ".":"",
co->domain?co->domain:"unknown",
co->tailmatch?"TRUE":"FALSE",
co->path?co->path:"/",
co->secure?"TRUE":"FALSE",
(unsigned int)co->expires,
co->name,
co->value?co->value:"");
}
/* /*
* Curl_cookie_output() * Curl_cookie_output()
* *
@ -847,6 +878,8 @@ int Curl_cookie_output(struct CookieInfo *c, char *dumphere)
} }
if(c) { if(c) {
char *format_ptr;
fputs("# Netscape HTTP Cookie File\n" fputs("# Netscape HTTP Cookie File\n"
"# http://www.netscape.com/newsref/std/cookie_spec.html\n" "# http://www.netscape.com/newsref/std/cookie_spec.html\n"
"# This file was generated by libcurl! Edit at your own risk.\n\n", "# This file was generated by libcurl! Edit at your own risk.\n\n",
@ -854,26 +887,13 @@ int Curl_cookie_output(struct CookieInfo *c, char *dumphere)
co = c->cookies; co = c->cookies;
while(co) { while(co) {
fprintf(out, format_ptr = get_netscape_format(co);
"%s%s\t" /* domain */ if (format_ptr == NULL) {
"%s\t" /* tailmatch */ fprintf(out, "#\n# Fatal libcurl error\n");
"%s\t" /* path */ return 1;
"%s\t" /* secure */ }
"%u\t" /* expires */ fprintf(out, "%s\n", format_ptr);
"%s\t" /* name */ free(format_ptr);
"%s\n", /* value */
/* Make sure all domains are prefixed with a dot if they allow
tailmatching. This is Mozilla-style. */
(co->tailmatch && co->domain && co->domain[0] != '.')? ".":"",
co->domain?co->domain:"unknown",
co->tailmatch?"TRUE":"FALSE",
co->path?co->path:"/",
co->secure?"TRUE":"FALSE",
(unsigned int)co->expires,
co->name,
co->value?co->value:"");
co=co->next; co=co->next;
} }
} }
@ -884,4 +904,34 @@ int Curl_cookie_output(struct CookieInfo *c, char *dumphere)
return 0; return 0;
} }
struct curl_slist *Curl_cookie_list(struct SessionHandle *data)
{
struct curl_slist *list = NULL;
struct curl_slist *beg;
struct Cookie *c;
char *line;
if (data->cookies == NULL) return NULL;
if (data->cookies->numcookies == 0) return NULL;
c = data->cookies->cookies;
beg = list;
while (c) {
/* fill the list with _all_ the cookies we know */
line = get_netscape_format(c);
if (line == NULL) {
/* get_netscape_format returns null only if we run out of memory */
curl_slist_free_all(beg); /* free some memory */
return NULL;
}
list = curl_slist_append(list, line);
free(line);
c = c->next;
}
return list;
}
#endif /* CURL_DISABLE_HTTP || CURL_DISABLE_COOKIES */ #endif /* CURL_DISABLE_HTTP || CURL_DISABLE_COOKIES */

View File

@ -92,4 +92,10 @@ void Curl_cookie_freelist(struct Cookie *);
void Curl_cookie_cleanup(struct CookieInfo *); void Curl_cookie_cleanup(struct CookieInfo *);
int Curl_cookie_output(struct CookieInfo *, char *); int Curl_cookie_output(struct CookieInfo *, char *);
#if defined(CURL_DISABLE_HTTP) || defined(CURL_DISABLE_COOKIES)
#define Curl_cookie_list(x) NULL
#else
struct curl_slist *Curl_cookie_list(struct SessionHandle *data);
#endif
#endif #endif

View File

@ -184,6 +184,9 @@ CURLcode Curl_getinfo(struct SessionHandle *data, CURLINFO info, ...)
case CURLINFO_SSL_ENGINES: case CURLINFO_SSL_ENGINES:
*param_slistp = Curl_ssl_engines_list(data); *param_slistp = Curl_ssl_engines_list(data);
break; break;
case CURLINFO_COOKIELIST:
*param_slistp = Curl_cookie_list(data);
break;
default: default:
return CURLE_BAD_FUNCTION_ARGUMENT; return CURLE_BAD_FUNCTION_ARGUMENT;
} }

View File

@ -773,6 +773,37 @@ CURLcode Curl_setopt(struct SessionHandle *data, CURLoption option,
*/ */
data->set.cookiesession = (bool)va_arg(param, long); data->set.cookiesession = (bool)va_arg(param, long);
break; break;
case CURLOPT_COOKIELIST:
argptr = va_arg(param, char *);
if (argptr == NULL)
break;
if (strequal(argptr, "ALL")) {
if (data->cookies == NULL) {
break;
}
else {
/* clear all cookies */
Curl_cookie_freelist(data->cookies->cookies);
data->cookies->cookies = NULL;
break;
}
}
if (!data->cookies)
/* if cookie engine was not running, activate it */
data->cookies = Curl_cookie_init(data, NULL, NULL, TRUE);
if (checkprefix("Set-Cookie:", argptr))
/* HTTP Header format line */
Curl_cookie_add(data, data->cookies, TRUE, argptr + 11, NULL, NULL);
else
/* Netscape format line */
Curl_cookie_add(data, data->cookies, FALSE, argptr, NULL, NULL);
break;
#endif /* CURL_DISABLE_COOKIES */ #endif /* CURL_DISABLE_COOKIES */
case CURLOPT_HTTPGET: case CURLOPT_HTTPGET: