aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDaniel Stenberg <daniel@haxx.se>2005-07-27 22:17:14 +0000
committerDaniel Stenberg <daniel@haxx.se>2005-07-27 22:17:14 +0000
commit2236ba0d206fe9fef5d93889ee652feaa03fe089 (patch)
tree2651d5300c11cf99f0abd3d73b9d7ddb14efa748
parent463c0f7096f7a0e56929a8e1b4fb3c38e164ce13 (diff)
Peteris Krumins added CURLOPT_COOKIELIST and CURLINFO_COOKIELIST, which is a
simple interface to extracting and setting cookies in libcurl's internal "cookie jar". See the new cookie_interface.c example code.
-rw-r--r--CHANGES9
-rw-r--r--RELEASE-NOTES3
-rw-r--r--docs/examples/Makefile.am3
-rw-r--r--docs/examples/cookie_interface.c110
-rw-r--r--docs/examples/makefile.dj3
-rw-r--r--docs/libcurl/curl_easy_getinfo.37
-rw-r--r--docs/libcurl/curl_easy_setopt.36
-rw-r--r--include/curl/curl.h4
-rw-r--r--lib/cookie.c90
-rw-r--r--lib/cookie.h6
-rw-r--r--lib/getinfo.c3
-rw-r--r--lib/url.c31
12 files changed, 252 insertions, 23 deletions
diff --git a/CHANGES b/CHANGES
index 969a44d2e..c06710cda 100644
--- a/CHANGES
+++ b/CHANGES
@@ -7,6 +7,15 @@
Changelog
+Daniel (27 July 2005)
+- Dan Fandrich changes over the last week: fixed numerous minor configure
+ option parsing flaws: --without-gnutls, --without-spnego --without-gssapi
+ and --without-krb4. Spellfixed several error messages.
+
+- Peteris Krumins added CURLOPT_COOKIELIST and CURLINFO_COOKIELIST, which is a
+ simple interface to extracting and setting cookies in libcurl's internal
+ "cookie jar". See the new cookie_interface.c example code.
+
Daniel (13 July 2005)
- Diego Casorran provided patches to make curl build fine on Amiga again.
diff --git a/RELEASE-NOTES b/RELEASE-NOTES
index 100b469aa..4e119d152 100644
--- a/RELEASE-NOTES
+++ b/RELEASE-NOTES
@@ -11,6 +11,7 @@ Curl and libcurl 7.14.1
This release includes the following changes:
+ o CURLOPT_COOKIELIST and CURLINFO_COOKIELIST
o trailer support for chunked encoded data streams
o -x/CURL_PROXY strings may now contain user+password
o --trace-time now outputs the full microsecond, all 6 digits
@@ -46,6 +47,6 @@ advice from friends like these:
John McGowan, Georg Wicherski, Andres Garcia, Eric Cooper, Todd Kulesza,
Tupone Alfredo, Gisle Vanem, David Shaw, Andrew Bushnell, Dan Fandrich,
- Adrian Schuur, Diego Casorran
+ Adrian Schuur, Diego Casorran, Peteris Krumins
Thanks! (and sorry if I forgot to mention someone)
diff --git a/docs/examples/Makefile.am b/docs/examples/Makefile.am
index 7e71c2846..d2a60a0f6 100644
--- a/docs/examples/Makefile.am
+++ b/docs/examples/Makefile.am
@@ -10,7 +10,8 @@ EXTRA_DIST = README curlgtk.c sepheaders.c simple.c postit2.c \
post-callback.c multi-app.c multi-double.c multi-single.c \
multi-post.c fopen.c simplepost.c makefile.dj curlx.c https.c \
multi-debugcallback.c fileupload.c getinfo.c ftp3rdparty.c debug.c \
- anyauthput.c htmltitle.cc htmltidy.c opensslthreadlock.c
+ anyauthput.c htmltitle.cc htmltidy.c opensslthreadlock.c \
+ cookie_interface.c
all:
@echo "done"
diff --git a/docs/examples/cookie_interface.c b/docs/examples/cookie_interface.c
new file mode 100644
index 000000000..b9278dbcd
--- /dev/null
+++ b/docs/examples/cookie_interface.c
@@ -0,0 +1,110 @@
+/*****************************************************************************
+ * _ _ ____ _
+ * Project ___| | | | _ \| |
+ * / __| | | | |_) | |
+ * | (__| |_| | _ <| |___
+ * \___|\___/|_| \_\_____|
+ *
+ * This example shows usage of simple cookie interface.
+ */
+
+#include <stdio.h>
+#include <string.h>
+#include <errno.h>
+#include <time.h>
+
+#include <curl/curl.h>
+
+static void
+print_cookies(CURL *curl)
+{
+ CURLcode res;
+ struct curl_slist *cookies;
+ struct curl_slist *nc;
+ int i;
+
+ printf("Cookies, curl knows:\n");
+ res = curl_easy_getinfo(curl, CURLINFO_COOKIELIST, &cookies);
+ if (res != CURLE_OK) {
+ fprintf(stderr, "Curl curl_easy_getinfo failed: %s\n", curl_easy_strerror(res));
+ exit(1);
+ }
+ nc = cookies, i = 1;
+ while (nc) {
+ printf("[%d]: %s\n", i, nc->data);
+ nc = nc->next;
+ i++;
+ }
+ if (i == 1) {
+ printf("(none)\n");
+ }
+ curl_slist_free_all(cookies);
+}
+
+int
+main(void)
+{
+ CURL *curl;
+ CURLcode res;
+
+ curl_global_init(CURL_GLOBAL_ALL);
+ curl = curl_easy_init();
+ if (curl) {
+ char nline[256];
+
+ curl_easy_setopt(curl, CURLOPT_URL, "http://www.google.com/"); /* google.com sets "PREF" cookie */
+ curl_easy_setopt(curl, CURLOPT_VERBOSE, 1);
+ curl_easy_setopt(curl, CURLOPT_COOKIEFILE, ""); /* just to start the cookie engine */
+ res = curl_easy_perform(curl);
+ if (res != CURLE_OK) {
+ fprintf(stderr, "Curl perform failed: %s\n", curl_easy_strerror(res));
+ return 1;
+ }
+
+ print_cookies(curl);
+
+ printf("Erasing curl's knowledge of cookies!\n");
+ curl_easy_setopt(curl, CURLOPT_COOKIELIST, NULL);
+
+ print_cookies(curl);
+
+ printf("-----------------------------------------------\n"
+ "Setting a cookie \"PREF\" via cookie interface:\n");
+#ifdef WIN32
+#define snprintf _snprintf
+#endif
+ /* Netscape format cookie */
+ snprintf(nline, 256, "%s\t%s\t%s\t%s\t%u\t%s\t%s",
+ ".google.com", "TRUE", "/", "FALSE", time(NULL) + 31337, "PREF", "hello google, i like you very much!");
+ res = curl_easy_setopt(curl, CURLOPT_COOKIELIST, nline);
+ if (res != CURLE_OK) {
+ fprintf(stderr, "Curl curl_easy_setopt failed: %s\n", curl_easy_strerror(res));
+ return 1;
+ }
+
+ /* HTTP-header style cookie */
+ snprintf(nline, 256,
+ "Set-Cookie: OLD_PREF=3d141414bf4209321; "
+ "expires=Sun, 17-Jan-2038 19:14:07 GMT; path=/; domain=.google.com");
+ res = curl_easy_setopt(curl, CURLOPT_COOKIELIST, nline);
+ if (res != CURLE_OK) {
+ fprintf(stderr, "Curl curl_easy_setopt failed: %s\n", curl_easy_strerror(res));
+ return 1;
+ }
+
+ print_cookies(curl);
+
+ res = curl_easy_perform(curl);
+ if (res != CURLE_OK) {
+ fprintf(stderr, "Curl perform failed: %s\n", curl_easy_strerror(res));
+ return 1;
+ }
+ }
+ else {
+ fprintf(stderr, "Curl init failed!\n");
+ return 1;
+ }
+
+ curl_global_cleanup();
+ return 0;
+}
diff --git a/docs/examples/makefile.dj b/docs/examples/makefile.dj
index 35e53d381..8d19ef63e 100644
--- a/docs/examples/makefile.dj
+++ b/docs/examples/makefile.dj
@@ -20,7 +20,8 @@ PROGRAMS = fopen.exe ftpget.exe ftpgetresp.exe ftpupload.exe \
multi-double.exe multi-post.exe multi-single.exe \
persistant.exe post-callback.exe postit2.exe \
sepheaders.exe simple.exe simplessl.exe https.exe \
- ftp3rdparty.exe getinfo.exe anyauthput.exe
+ ftp3rdparty.exe getinfo.exe anyauthput.exe \
+ cookie_interface.exe
all: $(PROGRAMS)
diff --git a/docs/libcurl/curl_easy_getinfo.3 b/docs/libcurl/curl_easy_getinfo.3
index 44dc433a6..c9be5e6a0 100644
--- a/docs/libcurl/curl_easy_getinfo.3
+++ b/docs/libcurl/curl_easy_getinfo.3
@@ -134,6 +134,13 @@ counted). Combined with \fICURLINFO_REDIRECT_COUNT\fP you are able to know
how many times libcurl successfully reused existing connection(s) or not. See
the Connection Options of \fIcurl_easy_setopt(3)\fP to see how libcurl tries
to make persistent connections to save time. (Added in 7.12.3)
+.IP CURLINFO_COOKIELIST
+Pass a pointer to a 'struct curl_slist *' to receive a linked-list of all
+cookies cURL knows (expired ones, too). Don't forget to
+\fIcurl_slist_free_all(3)\fP the list after it has been used.
+If there are no cookies (cookies for the handle have not been enabled or
+simply none have been received) 'struct curl_slist *' will be set to
+point to NULL.
.SH RETURN VALUE
If the operation was successful, CURLE_OK is returned. Otherwise an
appropriate error code will be returned.
diff --git a/docs/libcurl/curl_easy_setopt.3 b/docs/libcurl/curl_easy_setopt.3
index 046cba0fb..75fc28926 100644
--- a/docs/libcurl/curl_easy_setopt.3
+++ b/docs/libcurl/curl_easy_setopt.3
@@ -654,6 +654,12 @@ cookies" from the previous session. By default, libcurl always stores and
loads all cookies, independent if they are session cookies are not. Session
cookies are cookies without expiry date and they are meant to be alive and
existing for this "session" only.
+.IP CURLOPT_COOKIELIST
+Pass a char * to a cookie string. Cookie can be either in Netscape / Mozilla
+format or just regular HTTP-style header (Set-Cookie: ...) format. The passed
+string will get modified so make sure it's writable. If cURL cookie engine
+was not enabled it will enable its cookie engine. Passing a magic string
+\&"ALL" will erase all cookies known by cURL.
.IP CURLOPT_HTTPGET
Pass a long. If the long is non-zero, this forces the HTTP request to get back
to GET. usable if a POST, HEAD, PUT or a custom request have been used
diff --git a/include/curl/curl.h b/include/curl/curl.h
index ca12b5948..46f36f921 100644
--- a/include/curl/curl.h
+++ b/include/curl/curl.h
@@ -890,6 +890,9 @@ typedef enum {
"account" info */
CINIT(FTP_ACCOUNT, OBJECTPOINT, 134),
+ /* feed cookies into cookie engine */
+ CINIT(COOKIELIST, OBJECTPOINT, 135),
+
CURLOPT_LASTENTRY /* the last unused */
} CURLoption;
@@ -1244,6 +1247,7 @@ typedef enum {
CURLINFO_OS_ERRNO = CURLINFO_LONG + 25,
CURLINFO_NUM_CONNECTS = CURLINFO_LONG + 26,
CURLINFO_SSL_ENGINES = CURLINFO_SLIST + 27,
+ CURLINFO_COOKIELIST = CURLINFO_SLIST + 28,
/* Fill in new entries below here! */
CURLINFO_LASTONE = 28
diff --git a/lib/cookie.c b/lib/cookie.c
index 019c00b71..00ea0d635 100644
--- a/lib/cookie.c
+++ b/lib/cookie.c
@@ -85,6 +85,9 @@ Example set of cookies:
#include <stdlib.h>
#include <string.h>
+#define _MPRINTF_REPLACE /* without this on windows OS we get undefined reference to snprintf */
+#include <curl/mprintf.h>
+
#include "urldata.h"
#include "cookie.h"
#include "strequal.h"
@@ -816,6 +819,34 @@ void Curl_cookie_cleanup(struct CookieInfo *c)
}
}
+/* get_netscape_format()
+ *
+ * Formats a string for Netscape output file, w/o a newline at the end.
+ *
+ * Function returns a char * to a formatted line. Has to be free()d
+*/
+static char *get_netscape_format(const struct Cookie *co)
+{
+ return aprintf(
+ "%s%s\t" /* domain */
+ "%s\t" /* tailmatch */
+ "%s\t" /* path */
+ "%s\t" /* secure */
+ "%u\t" /* expires */
+ "%s\t" /* name */
+ "%s", /* value */
+ /* Make sure all domains are prefixed with a dot if they allow
+ tailmatching. This is Mozilla-style. */
+ (co->tailmatch && co->domain && co->domain[0] != '.')? ".":"",
+ co->domain?co->domain:"unknown",
+ co->tailmatch?"TRUE":"FALSE",
+ co->path?co->path:"/",
+ co->secure?"TRUE":"FALSE",
+ (unsigned int)co->expires,
+ co->name,
+ co->value?co->value:"");
+}
+
/*
* Curl_cookie_output()
*
@@ -847,6 +878,8 @@ int Curl_cookie_output(struct CookieInfo *c, char *dumphere)
}
if(c) {
+ char *format_ptr;
+
fputs("# Netscape HTTP Cookie File\n"
"# http://www.netscape.com/newsref/std/cookie_spec.html\n"
"# This file was generated by libcurl! Edit at your own risk.\n\n",
@@ -854,26 +887,13 @@ int Curl_cookie_output(struct CookieInfo *c, char *dumphere)
co = c->cookies;
while(co) {
- fprintf(out,
- "%s%s\t" /* domain */
- "%s\t" /* tailmatch */
- "%s\t" /* path */
- "%s\t" /* secure */
- "%u\t" /* expires */
- "%s\t" /* name */
- "%s\n", /* value */
-
- /* Make sure all domains are prefixed with a dot if they allow
- tailmatching. This is Mozilla-style. */
- (co->tailmatch && co->domain && co->domain[0] != '.')? ".":"",
- co->domain?co->domain:"unknown",
- co->tailmatch?"TRUE":"FALSE",
- co->path?co->path:"/",
- co->secure?"TRUE":"FALSE",
- (unsigned int)co->expires,
- co->name,
- co->value?co->value:"");
-
+ format_ptr = get_netscape_format(co);
+ if (format_ptr == NULL) {
+ fprintf(out, "#\n# Fatal libcurl error\n");
+ return 1;
+ }
+ fprintf(out, "%s\n", format_ptr);
+ free(format_ptr);
co=co->next;
}
}
@@ -884,4 +904,34 @@ int Curl_cookie_output(struct CookieInfo *c, char *dumphere)
return 0;
}
+struct curl_slist *Curl_cookie_list(struct SessionHandle *data)
+{
+ struct curl_slist *list = NULL;
+ struct curl_slist *beg;
+ struct Cookie *c;
+ char *line;
+
+ if (data->cookies == NULL) return NULL;
+ if (data->cookies->numcookies == 0) return NULL;
+
+ c = data->cookies->cookies;
+
+ beg = list;
+ while (c) {
+ /* fill the list with _all_ the cookies we know */
+ line = get_netscape_format(c);
+ if (line == NULL) {
+ /* get_netscape_format returns null only if we run out of memory */
+
+ curl_slist_free_all(beg); /* free some memory */
+ return NULL;
+ }
+ list = curl_slist_append(list, line);
+ free(line);
+ c = c->next;
+ }
+
+ return list;
+}
+
#endif /* CURL_DISABLE_HTTP || CURL_DISABLE_COOKIES */
diff --git a/lib/cookie.h b/lib/cookie.h
index 6f8e8e5fc..aed9f73f7 100644
--- a/lib/cookie.h
+++ b/lib/cookie.h
@@ -92,4 +92,10 @@ void Curl_cookie_freelist(struct Cookie *);
void Curl_cookie_cleanup(struct CookieInfo *);
int Curl_cookie_output(struct CookieInfo *, char *);
+#if defined(CURL_DISABLE_HTTP) || defined(CURL_DISABLE_COOKIES)
+#define Curl_cookie_list(x) NULL
+#else
+struct curl_slist *Curl_cookie_list(struct SessionHandle *data);
+#endif
+
#endif
diff --git a/lib/getinfo.c b/lib/getinfo.c
index 77945bbfc..47828212b 100644
--- a/lib/getinfo.c
+++ b/lib/getinfo.c
@@ -184,6 +184,9 @@ CURLcode Curl_getinfo(struct SessionHandle *data, CURLINFO info, ...)
case CURLINFO_SSL_ENGINES:
*param_slistp = Curl_ssl_engines_list(data);
break;
+ case CURLINFO_COOKIELIST:
+ *param_slistp = Curl_cookie_list(data);
+ break;
default:
return CURLE_BAD_FUNCTION_ARGUMENT;
}
diff --git a/lib/url.c b/lib/url.c
index 98662ceb9..07a34a94e 100644
--- a/lib/url.c
+++ b/lib/url.c
@@ -773,6 +773,37 @@ CURLcode Curl_setopt(struct SessionHandle *data, CURLoption option,
*/
data->set.cookiesession = (bool)va_arg(param, long);
break;
+
+ case CURLOPT_COOKIELIST:
+ argptr = va_arg(param, char *);
+
+ if (argptr == NULL)
+ break;
+
+ if (strequal(argptr, "ALL")) {
+ if (data->cookies == NULL) {
+ break;
+ }
+ else {
+ /* clear all cookies */
+ Curl_cookie_freelist(data->cookies->cookies);
+ data->cookies->cookies = NULL;
+ break;
+ }
+ }
+
+ if (!data->cookies)
+ /* if cookie engine was not running, activate it */
+ data->cookies = Curl_cookie_init(data, NULL, NULL, TRUE);
+
+ if (checkprefix("Set-Cookie:", argptr))
+ /* HTTP Header format line */
+ Curl_cookie_add(data, data->cookies, TRUE, argptr + 11, NULL, NULL);
+
+ else
+ /* Netscape format line */
+ Curl_cookie_add(data, data->cookies, FALSE, argptr, NULL, NULL);
+ break;
#endif /* CURL_DISABLE_COOKIES */
case CURLOPT_HTTPGET: