aboutsummaryrefslogtreecommitdiff
path: root/docs
diff options
context:
space:
mode:
Diffstat (limited to 'docs')
-rw-r--r--docs/examples/10-at-a-time.c97
1 files changed, 25 insertions, 72 deletions
diff --git a/docs/examples/10-at-a-time.c b/docs/examples/10-at-a-time.c
index 638f425f2..2b7497616 100644
--- a/docs/examples/10-at-a-time.c
+++ b/docs/examples/10-at-a-time.c
@@ -5,7 +5,7 @@
* | (__| |_| | _ <| |___
* \___|\___/|_| \_\_____|
*
- * Copyright (C) 1998 - 2018, Daniel Stenberg, <daniel@haxx.se>, et al.
+ * Copyright (C) 1998 - 2019, Daniel Stenberg, <daniel@haxx.se>, et al.
*
* This software is licensed as described in the file COPYING, which
* you should have received as part of this distribution. The terms
@@ -20,10 +20,8 @@
*
***************************************************************************/
/* <DESC>
- * Source code using the multi interface to download many
- * files, with a capped maximum amount of simultaneous transfers.
+ * Download many files in parallel, in the same thread.
* </DESC>
- * Written by Michael Wallner
*/
#include <errno.h>
@@ -84,27 +82,23 @@ static const char *urls[] = {
"https://www.un.org",
};
-#define MAX 10 /* number of simultaneous transfers */
-#define CNT sizeof(urls)/sizeof(char *) /* total number of transfers to do */
+#define MAX_PARALLEL 10 /* number of simultaneous transfers */
+#define NUM_URLS sizeof(urls)/sizeof(char *) /* total number of transfers to do */
-static size_t cb(char *d, size_t n, size_t l, void *p)
+static size_t write_cb(char *data, size_t n, size_t l, void *userp)
{
/* take care of the data here, ignored in this example */
- (void)d;
- (void)p;
+ (void)data;
+ (void)userp;
return n*l;
}
-static void init(CURLM *cm, int i)
+static void add_transfer(CURLM *cm, int i)
{
CURL *eh = curl_easy_init();
-
- curl_easy_setopt(eh, CURLOPT_WRITEFUNCTION, cb);
- curl_easy_setopt(eh, CURLOPT_HEADER, 0L);
+ curl_easy_setopt(eh, CURLOPT_WRITEFUNCTION, write_cb);
curl_easy_setopt(eh, CURLOPT_URL, urls[i]);
curl_easy_setopt(eh, CURLOPT_PRIVATE, urls[i]);
- curl_easy_setopt(eh, CURLOPT_VERBOSE, 0L);
-
curl_multi_add_handle(cm, eh);
}
@@ -112,64 +106,23 @@ int main(void)
{
CURLM *cm;
CURLMsg *msg;
- long L;
- unsigned int C = 0;
- int M, Q, U = -1;
- fd_set R, W, E;
- struct timeval T;
+ unsigned int transfers = 0;
+ int msgs_left = -1;
+ int still_alive = 1;
curl_global_init(CURL_GLOBAL_ALL);
-
cm = curl_multi_init();
- /* we can optionally limit the total amount of connections this multi handle
- uses */
- curl_multi_setopt(cm, CURLMOPT_MAXCONNECTS, (long)MAX);
+ /* Limit the amount of simultaneous connections curl should allow: */
+ curl_multi_setopt(cm, CURLMOPT_MAXCONNECTS, (long)MAX_PARALLEL);
- for(C = 0; C < MAX; ++C) {
- init(cm, C);
- }
+ for(transfers = 0; transfers < MAX_PARALLEL; transfers++)
+ add_transfer(cm, transfers);
- while(U) {
- curl_multi_perform(cm, &U);
+ do {
+ curl_multi_perform(cm, &still_alive);
- if(U) {
- FD_ZERO(&R);
- FD_ZERO(&W);
- FD_ZERO(&E);
-
- if(curl_multi_fdset(cm, &R, &W, &E, &M)) {
- fprintf(stderr, "E: curl_multi_fdset\n");
- return EXIT_FAILURE;
- }
-
- if(curl_multi_timeout(cm, &L)) {
- fprintf(stderr, "E: curl_multi_timeout\n");
- return EXIT_FAILURE;
- }
- if(L == -1)
- L = 100;
-
- if(M == -1) {
-#ifdef WIN32
- Sleep(L);
-#else
- sleep((unsigned int)L / 1000);
-#endif
- }
- else {
- T.tv_sec = L/1000;
- T.tv_usec = (L%1000)*1000;
-
- if(0 > select(M + 1, &R, &W, &E, &T)) {
- fprintf(stderr, "E: select(%i,,,,%li): %i: %s\n",
- M + 1, L, errno, strerror(errno));
- return EXIT_FAILURE;
- }
- }
- }
-
- while((msg = curl_multi_info_read(cm, &Q))) {
+ while((msg = curl_multi_info_read(cm, &msgs_left))) {
if(msg->msg == CURLMSG_DONE) {
char *url;
CURL *e = msg->easy_handle;
@@ -182,13 +135,13 @@ int main(void)
else {
fprintf(stderr, "E: CURLMsg (%d)\n", msg->msg);
}
- if(C < CNT) {
- init(cm, C++);
- U++; /* just to prevent it from remaining at 0 if there are more
- URLs to get */
- }
+ if(transfers < NUM_URLS)
+ add_transfer(cm, transfers++);
}
- }
+ if(still_alive)
+ curl_multi_wait(cm, NULL, 0, 1000, NULL);
+
+ } while(still_alive || (transfers < NUM_URLS));
curl_multi_cleanup(cm);
curl_global_cleanup();