diff options
author | Daniel Stenberg <daniel@haxx.se> | 2000-11-20 08:53:21 +0000 |
---|---|---|
committer | Daniel Stenberg <daniel@haxx.se> | 2000-11-20 08:53:21 +0000 |
commit | 42280e95bf159c4db89e3d9ea3d2e77f32cf800f (patch) | |
tree | 9bb798409f402e6a01eab36b5de1bdf870431892 | |
parent | b2ad1f68ccc066799685a86886c7df71b7ea1489 (diff) |
removed URL size restrictions
-rw-r--r-- | lib/dict.c | 2 | ||||
-rw-r--r-- | lib/http.c | 2 | ||||
-rw-r--r-- | lib/url.c | 24 | ||||
-rw-r--r-- | lib/urldata.h | 2 |
4 files changed, 24 insertions, 6 deletions
diff --git a/lib/dict.c b/lib/dict.c index b592266cd..a012dc1cf 100644 --- a/lib/dict.c +++ b/lib/dict.c @@ -233,7 +233,7 @@ CURLcode dict(struct connectdata *conn) int i; ppath++; - for (i = 0; (i < URL_MAX_LENGTH) && (ppath[i]); i++) { + for (i = 0; ppath[i]; i++) { if (ppath[i] == ':') ppath[i] = ' '; } diff --git a/lib/http.c b/lib/http.c index d5241acec..2ff643248 100644 --- a/lib/http.c +++ b/lib/http.c @@ -327,7 +327,7 @@ CURLcode http(struct connectdata *conn) } if ((data->bits.httpproxy) && !(conn->protocol&PROT_HTTPS)) { /* The path sent to the proxy is in fact the entire URL */ - strncpy(ppath, data->url, URL_MAX_LENGTH-1); + ppath = data->url; } if(data->bits.http_formpost) { /* we must build the whole darned post sequence first, so that we have @@ -677,6 +677,9 @@ CURLcode curl_disconnect(CURLconnect *c_connect) if(conn->hostent_buf) /* host name info */ free(conn->hostent_buf); + if(conn->path) /* the URL path part */ + free(conn->path); + free(conn); /* free the connection oriented data */ /* clean up the sockets and SSL stuff from the previous "round" */ @@ -696,6 +699,7 @@ static CURLcode _connect(CURL *curl, CURLconnect **in_connect) #ifdef HAVE_SIGACTION struct sigaction sigact; #endif + int urllen; if(!data || (data->handle != STRUCT_OPEN)) return CURLE_BAD_FUNCTION_ARGUMENT; /* TBD: make error codes */ @@ -734,13 +738,25 @@ static CURLcode _connect(CURL *curl, CURLconnect **in_connect) #endif + /* We need to allocate memory to store the path in. We get the size of the + full URL to be sure, and we need to make it at least 256 bytes since + other parts of the code will rely on this fact */ +#define LEAST_PATH_ALLOC 256 + urllen=strlen(data->url); + if(urllen < LEAST_PATH_ALLOC) + urllen=LEAST_PATH_ALLOC; + + conn->path=(char *)malloc(urllen); + if(NULL == conn->path) + return CURLE_OUT_OF_MEMORY; /* really bad error */ + /* Parse <url> */ /* We need to parse the url, even when using the proxy, because * we will need the hostname and port in case we are trying * to SSL connect through the proxy -- and we don't know if we * will need to use SSL until we parse the url ... */ - if((2 == sscanf(data->url, "%64[^:]://%" URL_MAX_LENGTH_TXT "[^\n]", + if((2 == sscanf(data->url, "%64[^:]://%[^\n]", conn->proto, conn->path)) && strequal(conn->proto, "file")) { /* we deal with file://<host>/<path> differently since it @@ -760,11 +776,11 @@ static CURLcode _connect(CURL *curl, CURLconnect **in_connect) strcpy(conn->path, "/"); if (2 > sscanf(data->url, - "%64[^\n:]://%256[^\n/]%" URL_MAX_LENGTH_TXT "[^\n]", + "%64[^\n:]://%256[^\n/]%[^\n]", conn->proto, conn->gname, conn->path)) { /* badly formatted, let's try the browser-style _without_ 'http://' */ - if((1 > sscanf(data->url, "%256[^\n/]%" URL_MAX_LENGTH_TXT "[^\n]", + if((1 > sscanf(data->url, "%256[^\n/]%[^\n]", conn->gname, conn->path)) ) { failf(data, "<url> malformed"); return CURLE_URL_MALFORMAT; @@ -1548,6 +1564,8 @@ CURLcode curl_connect(CURL *curl, CURLconnect **in_connect) in the connectdata struct, free those here */ conn = (struct connectdata *)*in_connect; if(conn) { + if(conn->path) + free(conn->path); if(conn->hostent_buf) free(conn->hostent_buf); free(conn); diff --git a/lib/urldata.h b/lib/urldata.h index bda19f73b..de16ea17d 100644 --- a/lib/urldata.h +++ b/lib/urldata.h @@ -175,7 +175,7 @@ struct connectdata { char proto[64]; char gname[256]; char *name; - char path[URL_MAX_LENGTH]; + char *path; /* formerly staticly this size: URL_MAX_LENGTH */ char *ppath; long bytecount; struct timeval now; |