aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/Makefile.am2
-rw-r--r--src/config-win32.h3
-rw-r--r--src/hugehelp.c1290
-rw-r--r--src/main.c253
-rw-r--r--src/urlglob.c38
-rw-r--r--src/version.h2
6 files changed, 840 insertions, 748 deletions
diff --git a/src/Makefile.am b/src/Makefile.am
index 561142ed3..21a799acd 100644
--- a/src/Makefile.am
+++ b/src/Makefile.am
@@ -4,7 +4,7 @@
# Some flags needed when trying to cause warnings ;-)
# CFLAGS = -Wall -pedantic
-CPPFLAGS = -DGLOBURL -DCURL_SEPARATORS
+#CPPFLAGS = -DGLOBURL -DCURL_SEPARATORS
INCLUDES = -I$(top_srcdir)/include
diff --git a/src/config-win32.h b/src/config-win32.h
index ba60773eb..55d3e3a5f 100644
--- a/src/config-win32.h
+++ b/src/config-win32.h
@@ -2,6 +2,9 @@
/* Define if you have the strcasecmp function. */
/*#define HAVE_STRCASECMP 1*/
+/* Define if you have the stricmp function. */
+#define HAVE_STRICMP 1
+
/* Define cpu-machine-OS */
#define OS "win32"
diff --git a/src/hugehelp.c b/src/hugehelp.c
index dde24867a..316d3e39d 100644
--- a/src/hugehelp.c
+++ b/src/hugehelp.c
@@ -9,725 +9,694 @@ puts (
" | (__| |_| | _ <| |___ \n"
" \\___|\\___/|_| \\_\\_____|\n"
"NAME\n"
-" curl - get a URL with FTP, TELNET, LDAP, GOPHER, DICT,\n"
-" FILE, HTTP or HTTPS syntax.\n"
+" curl - get a URL with FTP, TELNET, LDAP, GOPHER, DICT, FILE,\n"
+" HTTP or HTTPS syntax.\n"
"\n"
"SYNOPSIS\n"
-" curl [options] url\n"
+" curl [options] url\n"
"\n"
"DESCRIPTION\n"
-" curl is a client to get documents/files from servers,\n"
-" using any of the supported protocols. The command is\n"
-" designed to work without user interaction or any kind of\n"
-" interactivity.\n"
+" curl is a client to get documents/files from servers, using\n"
+" any of the supported protocols. The command is designed to\n"
+" work without user interaction or any kind of interactivity.\n"
"\n"
-" curl offers a busload of useful tricks like proxy support,\n"
-" user authentication, ftp upload, HTTP post, SSL (https:)\n"
-" connections, cookies, file transfer resume and more.\n"
+" curl offers a busload of useful tricks like proxy support,\n"
+" user authentication, ftp upload, HTTP post, SSL (https:)\n"
+" connections, cookies, file transfer resume and more.\n"
"\n"
"URL\n"
-" The URL syntax is protocol dependent. You'll find a\n"
-" detailed description in RFC 2396.\n"
+" The URL syntax is protocol dependent. You'll find a detailed\n"
+" description in RFC 2396.\n"
"\n"
-" You can specify multiple URLs or parts of URLs by writing\n"
-" part sets within braces as in:\n"
+" You can specify multiple URLs or parts of URLs by writing\n"
+" part sets within braces as in:\n"
"\n"
-" http://site.{one,two,three}.com\n"
+" http://site.{one,two,three}.com\n"
"\n"
-" or you can get sequences of alphanumeric series by using\n"
-" [] as in:\n"
+" or you can get sequences of alphanumeric series by using []\n"
+" as in:\n"
"\n"
-" ftp://ftp.numericals.com/file[1-100].txt\n"
-" ftp://ftp.numericals.com/file[001-100].txt (with lead-\n"
-" ing zeros)\n"
-" ftp://ftp.letters.com/file[a-z].txt\n"
+" ftp://ftp.numericals.com/file[1-100].txt\n"
+" ftp://ftp.numericals.com/file[001-100].txt (with leading\n"
+" zeros)\n"
+" ftp://ftp.letters.com/file[a-z].txt\n"
"\n"
-" It is possible to specify up to 9 sets or series for a\n"
-" URL, but no nesting is supported at the moment:\n"
+" It is possible to specify up to 9 sets or series for a URL,\n"
+" but no nesting is supported at the moment:\n"
"\n"
-" http://www.any.org/archive[1996-1999]/vol-\n"
-" ume[1-4]part{a,b,c,index}.html\n"
+" http://www.any.org/archive[1996-1999]/vol­\n"
+" ume[1-4]part{a,b,c,index}.html\n"
"\n"
"OPTIONS\n"
-" -a/--append\n"
-" (FTP) When used in a ftp upload, this will tell\n"
-" curl to append to the target file instead of over-\n"
-" writing it. If the file doesn't exist, it will be\n"
-" created.\n"
-"\n"
-" -A/--user-agent <agent string>\n"
-" (HTTP) Specify the User-Agent string to send to the\n"
-" HTTP server. Some badly done CGIs fail if its not\n"
-" set to \"Mozilla/4.0\". To encode blanks in the\n"
-" string, surround the string with single quote\n"
-" marks. This can also be set with the -H/--header\n"
-" flag of course.\n"
-" -b/--cookie <name=data>\n"
-" (HTTP) Pass the data to the HTTP server as a\n"
-" cookie. It is supposedly the data previously\n"
-" received from the server in a \"Set-Cookie:\" line.\n"
-" The data should be in the format \"NAME1=VALUE1;\n"
-" NAME2=VALUE2\".\n"
-"\n"
-" If no '=' letter is used in the line, it is treated\n"
-" as a filename to use to read previously stored\n"
-" cookie lines from, which should be used in this\n"
-" session if they match. Using this method also acti-\n"
-" vates the \"cookie parser\" which will make curl\n"
-" record incoming cookies too, which may be handy if\n"
-" you're using this in combination with the\n"
-" -L/--location option. The file format of the file\n"
-" to read cookies from should be plain HTTP headers\n"
-" or the netscape cookie file format.\n"
-"\n"
-" NOTE that the file specified with -b/--cookie is\n"
-" only used as input. No cookies will be stored in\n"
-" the file. To store cookies, save the HTTP headers\n"
-" to a file using -D/--dump-header!\n"
-"\n"
-" -B/--ftp-ascii\n"
-" (FTP/LDAP) Use ASCII transfer when getting an FTP\n"
-" file or LDAP info. For FTP, this can also be\n"
-" enforced by using an URL that ends with \";type=A\".\n"
-"\n"
-" -c/--continue\n"
-" Continue/Resume a previous file transfer. This\n"
-" instructs curl to continue appending data on the\n"
-" file where it was previously left, possibly because\n"
-" of a broken connection to the server. There must be\n"
-" a named physical file to append to for this to\n"
-" work. Note: Upload resume is depening on a command\n"
-" named SIZE not always present in all ftp servers!\n"
-" Upload resume is for FTP only. HTTP resume is only\n"
-" possible with HTTP/1.1 or later servers.\n"
-"\n"
-" -C/--continue-at <offset>\n"
-" Continue/Resume a previous file transfer at the\n"
-" given offset. The given offset is the exact number\n"
-" of bytes that will be skipped counted from the\n"
-" beginning of the source file before it is trans-\n"
-" fered to the destination. If used with uploads,\n"
-" the ftp server command SIZE will not be used by\n"
-" curl. Upload resume is for FTP only. HTTP resume\n"
-" is only possible with HTTP/1.1 or later servers.\n"
-"\n"
-" -d/--data <data>\n"
-" (HTTP) Sends the specified data in a POST request\n"
-" to the HTTP server. Note that the data is sent\n"
-" exactly as specified with no extra processing. The\n"
-" data is expected to be \"url-encoded\". This will\n"
-" cause curl to pass the data to the server using the\n"
-" content-type application/x-www-form-urlencoded.\n"
-" Compare to -F.\n"
-"\n"
-" If you start the data with the letter @, the rest\n"
-" should be a file name to read the data from, or -\n"
-" if you want curl to read the data from stdin. The\n"
-" contents of the file must already be url-encoded.\n"
-"\n"
-" -D/--dump-header <file>\n"
-" (HTTP/FTP) Write the HTTP headers to this file.\n"
-" Write the FTP file info to this file if -I/--head\n"
-" is used.\n"
-"\n"
-" This option is handy to use when you want to store\n"
-" the cookies that a HTTP site sends to you. The\n"
-" cookies could then be read in a second curl invoke\n"
-" by using the -b/--cookie option!\n"
-"\n"
-" -e/--referer <URL>\n"
-" (HTTP) Sends the \"Referer Page\" information to the\n"
-" HTTP server. Some badly done CGIs fail if it's not\n"
-" set. This can also be set with the -H/--header flag\n"
-" of course.\n"
-"\n"
-" -E/--cert <certificate[:password]>\n"
-" (HTTPS) Tells curl to use the specified certificate\n"
-" file when getting a file with HTTPS. The certifi-\n"
-" cate must be in PEM format. If the optional pass-\n"
-" word isn't specified, it will be queried for on the\n"
-" terminal. Note that this certificate is the private\n"
-" key and the private certificate concatenated!\n"
-"\n"
-" -f/--fail\n"
-" (HTTP) Fail silently (no output at all) on server\n"
-" errors. This is mostly done like this to better\n"
-" enable scripts etc to better deal with failed\n"
-" attempts. In normal cases when a HTTP server fails\n"
-" to deliver a document, it returns a HTML document\n"
-" stating so (which often also describes why and\n"
-" more). This flag will prevent curl from outputting\n"
-" that and fail silently instead.\n"
-"\n"
-" -F/--form <name=content>\n"
-" (HTTP) This lets curl emulate a filled in form in\n"
-" which a user has pressed the submit button. This\n"
-" causes curl to POST data using the content-type\n"
-" multipart/form-data according to RFC1867. This\n"
-" enables uploading of binary files etc. To force the\n"
-" 'content' part to be read from a file, prefix the\n"
-" file name with an @ sign. Example, to send your\n"
-" password file to the server, where 'password' is\n"
-" the name of the form-field to which /etc/passwd\n"
-" will be the input:\n"
-" curl -F password=@/etc/passwd www.mypasswords.com\n"
-"\n"
-" To read the file's content from stdin insted of a\n"
-" file, use - where the file name should've been.\n"
-"\n"
-" -h/--help\n"
-" Usage help.\n"
-"\n"
-" -H/--header <header>\n"
-" (HTTP) Extra header to use when getting a web page.\n"
-" You may specify any number of extra headers. Note\n"
-" that if you should add a custom header that has the\n"
-" same name as one of the internal ones curl would\n"
-" use, your externally set header will be used\n"
-" instead of the internal one. This allows you to\n"
-" make even trickier stuff than curl would normally\n"
-" do. You should not replace internally set headers\n"
-" without knowing perfectly well what you're doing.\n"
-"\n"
-" -i/--include\n"
-" (HTTP) Include the HTTP-header in the output. The\n"
-" HTTP-header includes things like server-name, date\n"
-" of the document, HTTP-version and more...\n"
-"\n"
-" -I/--head\n"
-" (HTTP/FTP) Fetch the HTTP-header only! HTTP-servers\n"
-" feature the command HEAD which this uses to get\n"
-" nothing but the header of a document. When used on\n"
-" a FTP file, curl displays the file size only.\n"
-"\n"
-" -K/--config <config file>\n"
-" Specify which config file to read curl arguments\n"
-" from. The config file is a text file in which com-\n"
-" mand line arguments can be written which then will\n"
-" be used as if they were written on the actual com-\n"
-" mand line. If the first column of a config line is\n"
-" a '#' character, the rest of the line will be\n"
-" treated as a comment.\n"
-"\n"
-" Specify the filename as '-' to make curl read the\n"
-" file from stdin.\n"
-"\n"
-" -l/--list-only\n"
-" (FTP) When listing an FTP directory, this switch\n"
-" forces a name-only view. Especially useful if you\n"
-" want to machine-parse the contents of an FTP direc-\n"
-" tory since the normal directory view doesn't use a\n"
-" standard look or format.\n"
-"\n"
-" -L/--location\n"
-" (HTTP/HTTPS) If the server reports that the\n"
-" requested page has a different location (indicated\n"
-" with the header line Location:) this flag will let\n"
-" curl attempt to reattempt the get on the new place.\n"
-" If used together with -i or -I, headers from all\n"
-" requested pages will be shown.\n"
-"\n"
-" -m/--max-time <seconds>\n"
-" Maximum time in seconds that you allow the whole\n"
-" operation to take. This is useful for preventing\n"
-" your batch jobs from hanging for hours due to slow\n"
-" networks or links going down. This doesn't work\n"
-" properly in win32 systems.\n"
-"\n"
-" -M/--manual\n"
-" Manual. Display the huge help text.\n"
-"\n"
-" -n/--netrc\n"
-" Makes curl scan the .netrc file in the user's home\n"
-" directory for login name and password. This is typ-\n"
-" ically used for ftp on unix. If used with http,\n"
-" curl will enable user authentication. See netrc(5)\n"
-" for details on the file format. Curl will not com-\n"
-" plain if that file hasn't the right permissions (it\n"
-" should not be world nor group readable). The envi-\n"
-" ronment variable \"HOME\" is used to find the home\n"
-" directory.\n"
-"\n"
-" A quick and very simple example of how to setup a\n"
-" .netrc to allow curl to ftp to the machine\n"
-" host.domain.com with user name\n"
-"\n"
-" machine host.domain.com user myself password secret\n"
-"\n"
-" -N/--no-buffer\n"
-" Disables the buffering of the output stream. In\n"
-" normal work situations, curl will use a standard\n"
-" buffered output stream that will have the effect\n"
-" that it will output the data in chunks, not neces-\n"
-" sarily exactly when the data arrives. Using this\n"
-" option will disable that buffering.\n"
-"\n"
-" -o/--output <file>\n"
-" Write output to <file> instead of stdout. If you\n"
-" are using {} or [] to fetch multiple documents, you\n"
-" can use '#' followed by a number in the <file>\n"
-" specifier. That variable will be replaced with the\n"
-" current string for the URL being fetched. Like in:\n"
-"\n"
-" curl http://{one,two}.site.com -o \"file_#1.txt\"\n"
-"\n"
-" or use several variables like:\n"
-"\n"
-" curl http://{site,host}.host[1-5].com -o \"#1_#2\"\n"
-"\n"
-" -O/--remote-name\n"
-" Write output to a local file named like the remote\n"
-" file we get. (Only the file part of the remote file\n"
-" is used, the path is cut off.)\n"
-"\n"
-" -P/--ftpport <address>\n"
-" (FTP) Reverses the initiator/listener roles when\n"
-" connecting with ftp. This switch makes Curl use the\n"
-" PORT command instead of PASV. In practice, PORT\n"
-" tells the server to connect to the client's speci-\n"
-" fied address and port, while PASV asks the server\n"
-" for an ip address and port to connect to. <address>\n"
-" should be one of:\n"
-"\n"
-" interface i.e \"eth0\" to specify which interface's\n"
-" IP address you want to use (Unix only)\n"
-"\n"
-" IP address i.e \"192.168.10.1\" to specify exact IP\n"
-" number\n"
-"\n"
-" host name i.e \"my.host.domain\" to specify machine\n"
-"\n"
-" - (any single-letter string) to make it\n"
-" pick the machine's default\n"
-"\n"
-" -q If used as the first parameter on the command line,\n"
-" the $HOME/.curlrc file will not be read and used as\n"
-" a config file.\n"
-"\n"
-" -Q/--quote <comand>\n"
-" (FTP) Send an arbitrary command to the remote FTP\n"
-" server, by using the QUOTE command of the server.\n"
-" Not all servers support this command, and the set\n"
-" of QUOTE commands are server specific! Quote com-\n"
-" mands are sent BEFORE the transfer is taking place.\n"
-" To make commands take place after a successful\n"
-" transfer, prefix them with a dash '-'. You may\n"
-" specify any amount of commands to be run before and\n"
-" after the transfer. If the server returns failure\n"
-" for one of the commands, the entire operation will\n"
-" be aborted.\n"
-"\n"
-" -r/--range <range>\n"
-" (HTTP/FTP) Retrieve a byte range (i.e a partial\n"
-" document) from a HTTP/1.1 or FTP server. Ranges can\n"
-" be specified in a number of ways.\n"
-"\n"
-" 0-499 specifies the first 500 bytes\n"
-"\n"
-" 500-999 specifies the second 500 bytes\n"
-"\n"
-" -500 specifies the last 500 bytes\n"
-"\n"
-" 9500 specifies the bytes from offset 9500 and\n"
-" forward\n"
-"\n"
-" 0-0,-1 specifies the first and last byte\n"
-" only(*)(H)\n"
-"\n"
-" 500-700,600-799\n"
-" specifies 300 bytes from offset 500(H)\n"
-"\n"
-" 100-199,500-599\n"
-" specifies two separate 100 bytes\n"
-" ranges(*)(H)\n"
-"\n"
-" (*) = NOTE that this will cause the server to reply with a\n"
-" multipart response!\n"
-"\n"
-" You should also be aware that many HTTP/1.1 servers do not\n"
-" have this feature enabled, so that when you attempt to get\n"
-" a range, you'll instead get the whole document.\n"
-"\n"
-" FTP range downloads only support the simple syntax 'start-\n"
-" stop' (optionally with one of the numbers omitted). It\n"
-" depends on the non-RFC command SIZE.\n"
-"\n"
-" -s/--silent\n"
-" Silent mode. Don't show progress meter or error\n"
-" messages. Makes Curl mute.\n"
-"\n"
-" -S/--show-error\n"
-" When used with -s it makes curl show error message\n"
-" if it fails.\n"
-"\n"
-" -t/--upload\n"
-" Transfer the stdin data to the specified file. Curl\n"
-" will read everything from stdin until EOF and store\n"
-" with the supplied name. If this is used on a\n"
-" http(s) server, the PUT command will be used.\n"
-"\n"
-" -T/--upload-file <file>\n"
-" Like -t, but this transfers the specified local\n"
-" file. If there is no file part in the specified\n"
-" URL, Curl will append the local file name. NOTE\n"
-" that you must use a trailing / on the last direc-\n"
-" tory to really prove to Curl that there is no file\n"
-" name or curl will think that your last directory\n"
-" name is the remote file name to use. That will most\n"
-" likely cause the upload operation to fail. If this\n"
-" is used on a http(s) server, the PUT command will\n"
-" be used.\n"
-"\n"
-" -u/--user <user:password>\n"
-" Specify user and password to use when fetching. See\n"
-" README.curl for detailed examples of how to use\n"
-" this. If no password is specified, curl will ask\n"
-" for it interactively.\n"
-"\n"
-" -U/--proxy-user <user:password>\n"
-" Specify user and password to use for Proxy\n"
-" authentication. If no password is specified, curl\n"
-" will ask for it interactively.\n"
-"\n"
-" -v/--verbose\n"
-" Makes the fetching more verbose/talkative. Mostly\n"
-" usable for debugging. Lines starting with '>' means\n"
-" data sent by curl, '<' means data received by curl\n"
-" that is hidden in normal cases and lines starting\n"
-" with '*' means additional info provided by curl.\n"
-"\n"
-" -V/--version\n"
-" Displays the full version of curl, libcurl and\n"
-" other 3rd party libraries linked with the exe-\n"
-" cutable.\n"
-"\n"
-" -w/--write-out <format>\n"
-" Defines what to display after a completed and suc-\n"
-" cessful operation. The format is a string that may\n"
-" contain plain text mixed with any number of vari-\n"
-" ables. The string can be specified as \"string\", to\n"
-" get read from a particular file you specify it\n"
-" \"@filename\" and to tell curl to read the format\n"
-" from stdin you write \"@-\".\n"
-"\n"
-" The variables present in the output format will be\n"
-" substituted by the value or text that curl thinks\n"
-" fit, as described below. All variables are speci-\n"
-" fied like %{variable_name} and to output a normal %\n"
-" you just write them like %%. You can output a new-\n"
-" line by using \\n, a carrige return with \\r and a\n"
-" tab space with \\t.\n"
-"\n"
-" NOTE: The %-letter is a special letter in the\n"
-" win32-environment, where all occurrences of % must\n"
-" be doubled when using this option.\n"
-"\n"
-" Available variables are at this point:\n"
-"\n"
-" url_effective The URL that was fetched last. This\n"
-" is mostly meaningful if you've told\n"
-" curl to follow location: headers.\n"
-"\n"
-" http_code The numerical code that was found in\n"
-" the last retrieved HTTP(S) page.\n"
-"\n"
-" time_total The total time, in seconds, that the\n"
-" full operation lasted. The time will\n"
-" be displayed with millisecond reso-\n"
-" lution.\n"
-"\n"
-" time_namelookup\n"
-" The time, in seconds, it took from\n"
-" the start until the name resolving\n"
-" was completed.\n"
-" time_connect The time, in seconds, it took from\n"
-" the start until the connect to the\n"
-" remote host (or proxy) was com-\n"
-" pleted.\n"
-"\n"
-" time_pretransfer\n"
-" The time, in seconds, it took from\n"
-" the start until the file transfer is\n"
-" just about to begin. This includes\n"
-" all pre-transfer commands and nego-\n"
-" tiations that are specific to the\n"
-" particular protocol(s) involved.\n"
-"\n"
-" size_download The total amount of bytes that were\n"
-" downloaded.\n"
-"\n"
-" size_upload The total amount of bytes that were\n"
-" uploaded.\n"
-"\n"
-" speed_download The average download speed that curl\n"
-" measured for the complete download.\n"
-"\n"
-" speed_upload The average upload speed that curl\n"
-" measured for the complete download.\n"
-"\n"
-" -x/--proxy <proxyhost[:port]>\n"
-" Use specified proxy. If the port number is not\n"
-" specified, it is assumed at port 1080.\n"
-"\n"
-" -X/--request <command>\n"
-" (HTTP) Specifies a custom request to use when com-\n"
-" municating with the HTTP server. The specified\n"
-" request will be used instead of the standard GET.\n"
-" Read the HTTP 1.1 specification for details and\n"
-" explanations.\n"
-"\n"
-" (FTP) Specifies a custom FTP command to use instead\n"
-" of LIST when doing file lists with ftp.\n"
-"\n"
-" -y/--speed-time <time>\n"
-" If a download is slower than speed-limit bytes per\n"
-" second during a speed-time period, the download\n"
-" gets aborted. If speed-time is used, the default\n"
-" speed-limit will be 1 unless set with -y.\n"
-"\n"
-" -Y/--speed-limit <speed>\n"
-" If a download is slower than this given speed, in\n"
-" bytes per second, for speed-time seconds it gets\n"
-" aborted. speed-time is set with -Y and is 30 if not\n"
-" set.\n"
-"\n"
-" -z/--time-cond <date expression>\n"
-" (HTTP) Request to get a file that has been modified\n"
-" later than the given time and date, or one that has\n"
-" been modified before that time. The date expression\n"
-" can be all sorts of date strings or if it doesn't\n"
-" match any internal ones, it tries to get the time\n"
-" from a given file name instead! See the GNU date(1)\n"
-" man page for date expression details.\n"
-"\n"
-" Start the date expression with a dash (-) to make\n"
-" it request for a document that is older than the\n"
-" given date/time, default is a document that is\n"
-" newer than the specified date/time.\n"
-"\n"
-" -3/--sslv3\n"
-" (HTTPS) Forces curl to use SSL version 3 when nego-\n"
-" tiating with a remote SSL server.\n"
-"\n"
-" -2/--sslv2\n"
-" (HTTPS) Forces curl to use SSL version 2 when nego-\n"
-" tiating with a remote SSL server.\n"
-"\n"
-" -#/--progress-bar\n"
-" Make curl display progress information as a\n"
-" progress bar instead of the default statistics.\n"
-"\n"
-" --crlf (FTP) Convert LF to CRLF in upload. Useful for MVS\n"
-" (OS/390).\n"
-"\n"
-" --stderr <file>\n"
-" Redirect all writes to stderr to the specified file\n"
-" instead. If the file name is a plain '-', it is\n"
-" instead written to stdout. This option has no point\n"
-" when you're using a shell with decent redirecting\n"
-" capabilities.\n"
+" -a/--append\n"
+" (FTP) When used in a ftp upload, this will tell curl to\n"
+" append to the target file instead of overwriting it. If\n"
+" the file doesn't exist, it will be created.\n"
+"\n"
+" -A/--user-agent <agent string>\n"
+" (HTTP) Specify the User-Agent string to send to the\n"
+" HTTP server. Some badly done CGIs fail if its not set\n"
+" to \"Mozilla/4.0\". To encode blanks in the string, sur­\n"
+" round the string with single quote marks. This can\n"
+" also be set with the -H/--header flag of course.\n"
+" -b/--cookie <name=data>\n"
+" (HTTP) Pass the data to the HTTP server as a cookie. It\n"
+" is supposedly the data previously received from the\n"
+" server in a \"Set-Cookie:\" line. The data should be in\n"
+" the format \"NAME1=VALUE1; NAME2=VALUE2\".\n"
+"\n"
+" If no '=' letter is used in the line, it is treated as\n"
+" a filename to use to read previously stored cookie\n"
+" lines from, which should be used in this session if\n"
+" they match. Using this method also activates the\n"
+" \"cookie parser\" which will make curl record incoming\n"
+" cookies too, which may be handy if you're using this in\n"
+" combination with the -L/--location option. The file\n"
+" format of the file to read cookies from should be plain\n"
+" HTTP headers or the netscape cookie file format.\n"
+"\n"
+" NOTE that the file specified with -b/--cookie is only\n"
+" used as input. No cookies will be stored in the file.\n"
+" To store cookies, save the HTTP headers to a file using\n"
+" -D/--dump-header!\n"
+"\n"
+" -B/--ftp-ascii\n"
+" (FTP/LDAP) Use ASCII transfer when getting an FTP file\n"
+" or LDAP info. For FTP, this can also be enforced by\n"
+" using an URL that ends with \";type=A\".\n"
+"\n"
+" -c/--continue\n"
+" Continue/Resume a previous file transfer. This\n"
+" instructs curl to continue appending data on the file\n"
+" where it was previously left, possibly because of a\n"
+" broken connection to the server. There must be a named\n"
+" physical file to append to for this to work. Note:\n"
+" Upload resume is depening on a command named SIZE not\n"
+" always present in all ftp servers! Upload resume is for\n"
+" FTP only. HTTP resume is only possible with HTTP/1.1\n"
+" or later servers.\n"
+"\n"
+" -C/--continue-at <offset>\n"
+" Continue/Resume a previous file transfer at the given\n"
+" offset. The given offset is the exact number of bytes\n"
+" that will be skipped counted from the beginning of the\n"
+" source file before it is transfered to the destination.\n"
+" If used with uploads, the ftp server command SIZE will\n"
+" not be used by curl. Upload resume is for FTP only.\n"
+" HTTP resume is only possible with HTTP/1.1 or later\n"
+" servers.\n"
+"\n"
+" -d/--data <data>\n"
+" (HTTP) Sends the specified data in a POST request to\n"
+" the HTTP server. Note that the data is sent exactly as\n"
+" specified with no extra processing. The data is\n"
+" expected to be \"url-encoded\". This will cause curl to\n"
+" pass the data to the server using the content-type\n"
+" application/x-www-form-urlencoded. Compare to -F.\n"
+"\n"
+" If you start the data with the letter @, the rest\n"
+" should be a file name to read the data from, or - if\n"
+" you want curl to read the data from stdin. The con­\n"
+" tents of the file must already be url-encoded.\n"
+"\n"
+" -D/--dump-header <file>\n"
+" (HTTP/FTP) Write the HTTP headers to this file. Write\n"
+" the FTP file info to this file if -I/--head is used.\n"
+"\n"
+" This option is handy to use when you want to store the\n"
+" cookies that a HTTP site sends to you. The cookies\n"
+" could then be read in a second curl invoke by using the\n"
+" -b/--cookie option!\n"
+"\n"
+" -e/--referer <URL>\n"
+" (HTTP) Sends the \"Referer Page\" information to the HTTP\n"
+" server. Some badly done CGIs fail if it's not set. This\n"
+" can also be set with the -H/--header flag of course.\n"
+"\n"
+" -E/--cert <certificate[:password]>\n"
+" (HTTPS) Tells curl to use the specified certificate\n"
+" file when getting a file with HTTPS. The certificate\n"
+" must be in PEM format. If the optional password isn't\n"
+" specified, it will be queried for on the terminal. Note\n"
+" that this certificate is the private key and the pri­\n"
+" vate certificate concatenated!\n"
+"\n"
+" -f/--fail\n"
+" (HTTP) Fail silently (no output at all) on server\n"
+" errors. This is mostly done like this to better enable\n"
+" scripts etc to better deal with failed attempts. In\n"
+" normal cases when a HTTP server fails to deliver a doc­\n"
+" ument, it returns a HTML document stating so (which\n"
+" often also describes why and more). This flag will pre­\n"
+" vent curl from outputting that and fail silently\n"
+" instead.\n"
+"\n"
+" -F/--form <name=content>\n"
+" (HTTP) This lets curl emulate a filled in form in which\n"
+" a user has pressed the submit button. This causes curl\n"
+" to POST data using the content-type multipart/form-data\n"
+" according to RFC1867. This enables uploading of binary\n"
+" files etc. To force the 'content' part to be read from\n"
+" a file, prefix the file name with an @ sign. Example,\n"
+" to send your password file to the server, where 'pass­\n"
+" word' is the name of the form-field to which\n"
+" /etc/passwd will be the input:\n"
+"\n"
+" curl -F password=@/etc/passwd www.mypasswords.com\n"
+" To read the file's content from stdin insted of a file,\n"
+" use - where the file name should've been.\n"
+"\n"
+" -h/--help\n"
+" Usage help.\n"
+"\n"
+" -H/--header <header>\n"
+" (HTTP) Extra header to use when getting a web page. You\n"
+" may specify any number of extra headers. Note that if\n"
+" you should add a custom header that has the same name\n"
+" as one of the internal ones curl would use, your exter­\n"
+" nally set header will be used instead of the internal\n"
+" one. This allows you to make even trickier stuff than\n"
+" curl would normally do. You should not replace inter­\n"
+" nally set headers without knowing perfectly well what\n"
+" you're doing.\n"
+"\n"
+" -i/--include\n"
+" (HTTP) Include the HTTP-header in the output. The HTTP-\n"
+" header includes things like server-name, date of the\n"
+" document, HTTP-version and more...\n"
+"\n"
+" -I/--head\n"
+" (HTTP/FTP) Fetch the HTTP-header only! HTTP-servers\n"
+" feature the command HEAD which this uses to get nothing\n"
+" but the header of a document. When used on a FTP file,\n"
+" curl displays the file size only.\n"
+"\n"
+" -K/--config <config file>\n"
+" Specify which config file to read curl arguments from.\n"
+" The config file is a text file in which command line\n"
+" arguments can be written which then will be used as if\n"
+" they were written on the actual command line. If the\n"
+" first column of a config line is a '#' character, the\n"
+" rest of the line will be treated as a comment.\n"
+"\n"
+" Specify the filename as '-' to make curl read the file\n"
+" from stdin.\n"
+"\n"
+" -l/--list-only\n"
+" (FTP) When listing an FTP directory, this switch forces\n"
+" a name-only view. Especially useful if you want to\n"
+" machine-parse the contents of an FTP directory since\n"
+" the normal directory view doesn't use a standard look\n"
+" or format.\n"
+"\n"
+" -L/--location\n"
+" (HTTP/HTTPS) If the server reports that the requested\n"
+" page has a different location (indicated with the\n"
+" header line Location:) this flag will let curl attempt\n"
+" to reattempt the get on the new place. If used together\n"
+" with -i or -I, headers from all requested pages will be\n"
+" shown.\n"
+"\n"
+" -m/--max-time <seconds>\n"
+" Maximum time in seconds that you allow the whole opera­\n"
+" tion to take. This is useful for preventing your batch\n"
+" jobs from hanging for hours due to slow networks or\n"
+" links going down. This doesn't work properly in win32\n"
+" systems.\n"
+"\n"
+" -M/--manual\n"
+" Manual. Display the huge help text.\n"
+"\n"
+" -n/--netrc\n"
+" Makes curl scan the .netrc file in the user's home\n"
+" directory for login name and password. This is typi­\n"
+" cally used for ftp on unix. If used with http, curl\n"
+" will enable user authentication. See netrc(5) for\n"
+" details on the file format. Curl will not complain if\n"
+" that file hasn't the right permissions (it should not\n"
+" be world nor group readable). The environment variable\n"
+" \"HOME\" is used to find the home directory.\n"
+"\n"
+" A quick and very simple example of how to setup a\n"
+" .netrc to allow curl to ftp to the machine\n"
+" host.domain.com with user name\n"
+"\n"
+" machine host.domain.com login myself password secret\n"
+"\n"
+" -N/--no-buffer\n"
+" Disables the buffering of the output stream. In normal\n"
+" work situations, curl will use a standard buffered out­\n"
+" put stream that will have the effect that it will out­\n"
+" put the data in chunks, not necessarily exactly when\n"
+" the data arrives. Using this option will disable that\n"
+" buffering.\n"
+"\n"
+" -o/--output <file>\n"
+" Write output to <file> instead of stdout. If you are\n"
+" using {} or [] to fetch multiple documents, you can use\n"
+" '#' followed by a number in the <file> specifier. That\n"
+" variable will be replaced with the current string for\n"
+" the URL being fetched. Like in:\n"
+"\n"
+" curl http://{one,two}.site.com -o \"file_#1.txt\"\n"
+"\n"
+" or use several variables like:\n"
+"\n"
+" curl http://{site,host}.host[1-5].com -o \"#1_#2\"\n"
+"\n"
+" -O/--remote-name\n"
+" Write output to a local file named like the remote file\n"
+" we get. (Only the file part of the remote file is used,\n"
+" the path is cut off.)\n"
+"\n"
+" -P/--ftpport <address>\n"
+" (FTP) Reverses the initiator/listener roles when con­\n"
+" necting with ftp. This switch makes Curl use the PORT\n"
+" command instead of PASV. In practice, PORT tells the\n"
+" server to connect to the client's specified address and\n"
+" port, while PASV asks the server for an ip address and\n"
+" port to connect to. <address> should be one of:\n"
+"\n"
+" interface i.e \"eth0\" to specify which interface's IP\n"
+" address you want to use (Unix only)\n"
+"\n"
+" IP address i.e \"192.168.10.1\" to specify exact IP num­\n"
+" ber\n"
+"\n"
+" host name i.e \"my.host.domain\" to specify machine\n"
+"\n"
+" - (any single-letter string) to make it pick\n"
+" the machine's default\n"
+"\n"
+" -q If used as the first parameter on the command line, the\n"
+" $HOME/.curlrc file will not be read and used as a con­\n"
+" fig file.\n"
+"\n"
+" -Q/--quote <comand>\n"
+" (FTP) Send an arbitrary command to the remote FTP\n"
+" server, by using the QUOTE command of the server. Not\n"
+" all servers support this command, and the set of QUOTE\n"
+" commands are server specific! Quote commands are sent\n"
+" BEFORE the transfer is taking place. To make commands\n"
+" take place after a successful transfer, prefix them\n"
+" with a dash '-'. You may specify any amount of commands\n"
+" to be run before and after the transfer. If the server\n"
+" returns failure for one of the commands, the entire\n"
+" operation will be aborted.\n"
+"\n"
+" -r/--range <range>\n"
+" (HTTP/FTP) Retrieve a byte range (i.e a partial docu­\n"
+" ment) from a HTTP/1.1 or FTP server. Ranges can be\n"
+" specified in a number of ways.\n"
+"\n"
+" 0-499 specifies the first 500 bytes\n"
+"\n"
+" 500-999 specifies the second 500 bytes\n"
+"\n"
+" -500 specifies the last 500 bytes\n"
+"\n"
+" 9500 specifies the bytes from offset 9500 and for­\n"
+" ward\n"
+"\n"
+" 0-0,-1 specifies the first and last byte only(*)(H)\n"
+" 500-700,600-799\n"
+" specifies 300 bytes from offset 500(H)\n"
+"\n"
+" 100-199,500-599\n"
+" specifies two separate 100 bytes ranges(*)(H)\n"
+"\n"
+" (*) = NOTE that this will cause the server to reply with a\n"
+" multipart response!\n"
+"\n"
+" You should also be aware that many HTTP/1.1 servers do not\n"
+" have this feature enabled, so that when you attempt to get a\n"
+" range, you'll instead get the whole document.\n"
+"\n"
+" FTP range downloads only support the simple syntax 'start-\n"
+" stop' (optionally with one of the numbers omitted). It\n"
+" depends on the non-RFC command SIZE.\n"
+"\n"
+" -s/--silent\n"
+" Silent mode. Don't show progress meter or error mes­\n"
+" sages. Makes Curl mute.\n"
+"\n"
+" -S/--show-error\n"
+" When used with -s it makes curl show error message if\n"
+" it fails.\n"
+"\n"
+" -t/--upload\n"
+" Transfer the stdin data to the specified file. Curl\n"
+" will read everything from stdin until EOF and store\n"
+" with the supplied name. If this is used on a http(s)\n"
+" server, the PUT command will be used.\n"
+"\n"
+" -T/--upload-file <file>\n"
+" Like -t, but this transfers the specified local file.\n"
+" If there is no file part in the specified URL, Curl\n"
+" will append the local file name. NOTE that you must use\n"
+" a trailing / on the last directory to really prove to\n"
+" Curl that there is no file name or curl will think that\n"
+" your last directory name is the remote file name to\n"
+" use. That will most likely cause the upload operation\n"
+" to fail. If this is used on a http(s) server, the PUT\n"
+" command will be used.\n"
+"\n"
+" -u/--user <user:password>\n"
+" Specify user and password to use when fetching. See\n"
+" README.curl for detailed examples of how to use this.\n"
+" If no password is specified, curl will ask for it\n"
+" interactively.\n"
+"\n"
+" -U/--proxy-user <user:password>\n"
+" Specify user and password to use for Proxy authentica­\n"
+" tion. If no password is specified, curl will ask for it\n"
+" interactively.\n"
+" -v/--verbose\n"
+" Makes the fetching more verbose/talkative. Mostly\n"
+" usable for debugging. Lines starting with '>' means\n"
+" data sent by curl, '<' means data received by curl that\n"
+" is hidden in normal cases and lines starting with '*'\n"
+" means additional info provided by curl.\n"
+"\n"
+" -V/--version\n"
+" Displays the full version of curl, libcurl and other\n"
+" 3rd party libraries linked with the executable.\n"
+"\n"
+" -w/--write-out <format>\n"
+" Defines what to display after a completed and success­\n"
+" ful operation. The format is a string that may contain\n"
+" plain text mixed with any number of variables. The\n"
+" string can be specified as \"string\", to get read from a\n"
+" particular file you specify it \"@filename\" and to tell\n"
+" curl to read the format from stdin you write \"@-\".\n"
+"\n"
+" The variables present in the output format will be sub­\n"
+" stituted by the value or text that curl thinks fit, as\n"
+" described below. All variables are specified like\n"
+" %{variable_name} and to output a normal % you just\n"
+" write them like %%. You can output a newline by using\n"
+" \\n, a carrige return with \\r and a tab space with \\t.\n"
+"\n"
+" NOTE: The %-letter is a special letter in the\n"
+" win32-environment, where all occurrences of % must be\n"
+" doubled when using this option.\n"
+"\n"
+" Available variables are at this point:\n"
+"\n"
+" url_effective The URL that was fetched last. This is\n"
+" mostly meaningful if you've told curl to\n"
+" follow location: headers.\n"
+"\n"
+" http_code The numerical code that was found in the\n"
+" last retrieved HTTP(S) page.\n"
+"\n"
+" time_total The total time, in seconds, that the\n"
+" full operation lasted. The time will be\n"
+" displayed with millisecond resolution.\n"
+"\n"
+" time_namelookup\n"
+" The time, in seconds, it took from the\n"
+" start until the name resolving was com­\n"
+" pleted.\n"
+"\n"
+" time_connect The time, in seconds, it took from the\n"
+" start until the connect to the remote\n"
+" host (or proxy) was completed.\n"
+" time_pretransfer\n"
+" The time, in seconds, it took from the\n"
+" start until the file transfer is just\n"
+" about to begin. This includes all pre-\n"
+" transfer commands and negotiations that\n"
+" are specific to the particular proto­\n"
+" col(s) involved.\n"
+"\n"
+" size_download The total amount of bytes that were\n"
+" downloaded.\n"
+"\n"
+" size_upload The total amount of bytes that were\n"
+" uploaded.\n"
+"\n"
+" speed_download The average download speed that curl\n"
+" measured for the complete download.\n"
+"\n"
+" speed_upload The average upload speed that curl mea­\n"
+" sured for the complete download.\n"
+"\n"
+" -x/--proxy <proxyhost[:port]>\n"
+" Use specified proxy. If the port number is not speci­\n"
+" fied, it is assumed at port 1080.\n"
+"\n"
+" -X/--request <command>\n"
+" (HTTP) Specifies a custom request to use when communi­\n"
+" cating with the HTTP server. The specified request\n"
+" will be used instead of the standard GET. Read the HTTP\n"
+" 1.1 specification for details and explanations.\n"
+"\n"
+" (FTP) Specifies a custom FTP command to use instead of\n"
+" LIST when doing file lists with ftp.\n"
+"\n"
+" -y/--speed-time <time>\n"
+" If a download is slower than speed-limit bytes per sec­\n"
+" ond during a speed-time period, the download gets\n"
+" aborted. If speed-time is used, the default speed-limit\n"
+" will be 1 unless set with -y.\n"
+"\n"
+" -Y/--speed-limit <speed>\n"
+" If a download is slower than this given speed, in bytes\n"
+" per second, for speed-time seconds it gets aborted.\n"
+" speed-time is set with -Y and is 30 if not set.\n"
+"\n"
+" -z/--time-cond <date expression>\n"
+" (HTTP) Request to get a file that has been modified\n"
+" later than the given time and date, or one that has\n"
+" been modified before that time. The date expression can\n"
+" be all sorts of date strings or if it doesn't match any\n"
+" internal ones, it tries to get the time from a given\n"
+" file name instead! See the GNU date(1) man page for\n"
+" date expression details.\n"
+" Start the date expression with a dash (-) to make it\n"
+" request for a document that is older than the given\n"
+" date/time, default is a document that is newer than the\n"
+" specified date/time.\n"
+"\n"
+" -3/--sslv3\n"
+" (HTTPS) Forces curl to use SSL version 3 when negotiat­\n"
+" ing with a remote SSL server.\n"
+"\n"
+" -2/--sslv2\n"
+" (HTTPS) Forces curl to use SSL version 2 when negotiat­\n"
+" ing with a remote SSL server.\n"
+"\n"
+" -#/--progress-bar\n"
+" Make curl display progress information as a progress\n"
+" bar instead of the default statistics.\n"
+"\n"
+" --crlf\n"
+" (FTP) Convert LF to CRLF in upload. Useful for MVS\n"
+" (OS/390).\n"
+"\n"
+" --stderr <file>\n"
+" Redirect all writes to stderr to the specified file\n"
+" instead. If the file name is a plain '-', it is instead\n"
+" written to stdout. This option has no point when you're\n"
+" using a shell with decent redirecting capabilities.\n"
"\n"
"FILES\n"
-" ~/.curlrc\n"
-" Default config file.\n"
+" ~/.curlrc\n"
+" Default config file.\n"
"\n"
"ENVIRONMENT\n"
-" HTTP_PROXY [protocol://]<host>[:port]\n"
-" Sets proxy server to use for HTTP.\n"
+" HTTP_PROXY [protocol://]<host>[:port]\n"
+" Sets proxy server to use for HTTP.\n"
"\n"
-" HTTPS_PROXY [protocol://]<host>[:port]\n"
-" Sets proxy server to use for HTTPS.\n"
+" HTTPS_PROXY [protocol://]<host>[:port]\n"
+" Sets proxy server to use for HTTPS.\n"
"\n"
-" FTP_PROXY [protocol://]<host>[:port]\n"
-" Sets proxy server to use for FTP.\n"
+" FTP_PROXY [protocol://]<host>[:port]\n"
+" Sets proxy server to use for FTP.\n"
"\n"
-" GOPHER_PROXY [protocol://]<host>[:port]\n"
-" Sets proxy server to use for GOPHER.\n"
+" GOPHER_PROXY [protocol://]<host>[:port]\n"
+" Sets proxy server to use for GOPHER.\n"
"\n"
-" ALL_PROXY [protocol://]<host>[:port]\n"
-" Sets proxy server to use if no protocol-specific\n"
-" proxy is set.\n"
-" NO_PROXY <comma-separated list of hosts>\n"
-" list of host names that shouldn't go through any\n"
-" proxy. If set to a asterisk '*' only, it matches\n"
-" all hosts.\n"
+" ALL_PROXY [protocol://]<host>[:port]\n"
+" Sets proxy server to use if no protocol-specific proxy\n"
+" is set.\n"
"\n"
-" COLUMNS <integer>\n"
-" The width of the terminal. This variable only\n"
-" affects curl when the --progress-bar option is\n"
-" used.\n"
+" NO_PROXY <comma-separated list of hosts>\n"
+" list of host names that shouldn't go through any proxy.\n"
+" If set to a asterisk '*' only, it matches all hosts.\n"
+" COLUMNS <integer>\n"
+" The width of the terminal. This variable only affects\n"
+" curl when the --progress-bar option is used.\n"
"\n"
"EXIT CODES\n"
-" There exists a bunch of different error codes and their\n"
-" corresponding error messages that may appear during bad\n"
-" conditions. At the time of this writing, the exit codes\n"
-" are:\n"
+" There exists a bunch of different error codes and their cor­\n"
+" responding error messages that may appear during bad condi­\n"
+" tions. At the time of this writing, the exit codes are:\n"
"\n"
-" 1 Unsupported protocol. This build of curl has no\n"
-" support for this protocol.\n"
+" 1 Unsupported protocol. This build of curl has no support\n"
+" for this protocol.\n"
"\n"
-" 2 Failed to initialize.\n"
+" 2 Failed to initialize.\n"
"\n"
-" 3 URL malformat. The syntax was not correct.\n"
+" 3 URL malformat. The syntax was not correct.\n"
"\n"
-" 4 URL user malformatted. The user-part of the URL\n"
-" syntax was not correct.\n"
+" 4 URL user malformatted. The user-part of the URL syntax\n"
+" was not correct.\n"
"\n"
-" 5 Couldn't resolve proxy. The given proxy host could\n"
-" not be resolved.\n"
+" 5 Couldn't resolve proxy. The given proxy host could not\n"
+" be resolved.\n"
"\n"
-" 6 Couldn't resolve host. The given remote host was\n"
-" not resolved.\n"
+" 6 Couldn't resolve host. The given remote host was not\n"
+" resolved.\n"
"\n"
-" 7 Failed to connect to host.\n"
+" 7 Failed to connect to host.\n"
"\n"
-" 8 FTP weird server reply. The server sent data curl\n"
-" couldn't parse.\n"
+" 8 FTP weird server reply. The server sent data curl\n"
+" couldn't parse.\n"
"\n"
-" 9 FTP access denied. The server denied login.\n"
+" 9 FTP access denied. The server denied login.\n"
"\n"
-" 10 FTP user/password incorrect. Either one or both\n"
-" were not accepted by the server.\n"
+" 10 FTP user/password incorrect. Either one or both were\n"
+" not accepted by the server.\n"
"\n"
-" 11 FTP weird PASS reply. Curl couldn't parse the reply\n"
-" sent to the PASS request.\n"
+" 11 FTP weird PASS reply. Curl couldn't parse the reply\n"
+" sent to the PASS request.\n"
"\n"
-" 12 FTP weird USER reply. Curl couldn't parse the reply\n"
-" sent to the USER request.\n"
+" 12 FTP weird USER reply. Curl couldn't parse the reply\n"
+" sent to the USER request.\n"
"\n"
-" 13 FTP weird PASV reply, Curl couldn't parse the reply\n"
-" sent to the PASV request.\n"
+" 13 FTP weird PASV reply, Curl couldn't parse the reply\n"
+" sent to the PASV request.\n"
"\n"
-" 14 FTP weird 227 formay. Curl couldn't parse the\n"
-" 227-line the server sent.\n"
-" 15 FTP can't get host. Couldn't resolve the host IP we\n"
-" got in the 227-line.\n"
+" 14 FTP weird 227 formay. Curl couldn't parse the 227-line\n"
+" the server sent.\n"
"\n"
-" 16 FTP can't reconnect. Couldn't connect to the host\n"
-" we got in the 227-line.\n"
+" 15 FTP can't get host. Couldn't resolve the host IP we got\n"
+" in the 227-line.\n"
"\n"
-" 17 FTP couldn't set binary. Couldn't change transfer\n"
-" method to binary.\n"
+" 16 FTP can't reconnect. Couldn't connect to the host we\n"
+" got in the 227-line.\n"
+" 17 FTP couldn't set binary. Couldn't change transfer\n"
+" method to binary.\n"
"\n"
-" 18 Partial file. Only a part of the file was trans-\n"
-" fered.\n"
+" 18 Partial file. Only a part of the file was transfered.\n"
"\n"
-" 19 FTP couldn't RETR file. The RETR command failed.\n"
+" 19 FTP couldn't RETR file. The RETR command failed.\n"
"\n"
-" 20 FTP write error. The transfer was reported bad by\n"
-" the server.\n"
+" 20 FTP write error. The transfer was reported bad by the\n"
+" server.\n"
"\n"
-" 21 FTP quote error. A quote command returned error\n"
-" from the server.\n"
+" 21 FTP quote error. A quote command returned error from\n"
+" the server.\n"
"\n"
-" 22 HTTP not found. The requested page was not found.\n"
-" This return code only appears if --fail is used.\n"
+" 22 HTTP not found. The requested page was not found. This\n"
+" return code only appears if --fail is used.\n"
"\n"
-" 23 Write error. Curl couldn't write data to a local\n"
-" filesystem or similar.\n"
+" 23 Write error. Curl couldn't write data to a local\n"
+" filesystem or similar.\n"
"\n"
-" 24 Malformat user. User name badly specified.\n"
+" 24 Malformat user. User name badly specified.\n"
"\n"
-" 25 FTP couldn't STOR file. The server denied the STOR\n"
-" operation.\n"
+" 25 FTP couldn't STOR file. The server denied the STOR\n"
+" operation.\n"
"\n"
-" 26 Read error. Various reading problems.\n"
+" 26 Read error. Various reading problems.\n"
"\n"
-" 27 Out of memory. A memory allocation request failed.\n"
+" 27 Out of memory. A memory allocation request failed.\n"
"\n"
-" 28 Operation timeout. The specified time-out period\n"
-" was reached according to the conditions.\n"
+" 28 Operation timeout. The specified time-out period was\n"
+" reached according to the conditions.\n"
"\n"
-" 29 FTP couldn't set ASCII. The server returned an\n"
-" unknown reply.\n"
+" 29 FTP couldn't set ASCII. The server returned an unknown\n"
+" reply.\n"
"\n"
-" 30 FTP PORT failed. The PORT command failed.\n"
+" 30 FTP PORT failed. The PORT command failed.\n"
"\n"
-" 31 FTP couldn't use REST. The REST command failed.\n"
+" 31 FTP couldn't use REST. The REST command failed.\n"
"\n"
-" 32 FTP couldn't use SIZE. The SIZE command failed. The\n"
-" command is an extension to the original FTP spec\n"
-" RFC 959.\n"
+" 32 FTP couldn't use SIZE. The SIZE command failed. The\n"
+" command is an extension to the original FTP spec RFC\n"
+" 959.\n"
"\n"
-" 33 HTTP range error. The range \"command\" didn't work.\n"
+" 33 HTTP range error. The range \"command\" didn't work.\n"
"\n"
-" 34 HTTP post error. Internal post-request generation\n"
-" error.\n"
-" 35 SSL connect error. The SSL handshaking failed.\n"
+" 34 HTTP post error. Internal post-request generation\n"
+" error.\n"
"\n"
-" 36 FTP bad download resume. Couldn't continue an ear-\n"
-" lier aborted download.\n"
+" 35 SSL connect error. The SSL handshaking failed.\n"
"\n"
-" 37 FILE couldn't read file. Failed to open the file.\n"
-" Permissions?\n"
+" 36 FTP bad download resume. Couldn't continue an earlier\n"
+" aborted download.\n"
+" 37 FILE couldn't read file. Failed to open the file. Per­\n"
+" missions?\n"
"\n"
-" 38 LDAP cannot bind. LDAP bind operation failed.\n"
+" 38 LDAP cannot bind. LDAP bind operation failed.\n"
"\n"
-" 39 LDAP search failed.\n"
+" 39 LDAP search failed.\n"
"\n"
-" 40 Library not found. The LDAP library was not found.\n"
+" 40 Library not found. The LDAP library was not found.\n"
"\n"
-" 41 Function not found. A required LDAP function was\n"
-" not found.\n"
+" 41 Function not found. A required LDAP function was not\n"
+" found.\n"
"\n"
-" XX There will appear more error codes here in future\n"
-" releases. The existing ones are meant to never\n"
-" change.\n"
+" XX There will appear more error codes here in future\n"
+" releases. The existing ones are meant to never change.\n"
"\n"
"BUGS\n"
-" If you do find any (or have other suggestions), mail\n"
-" Daniel Stenberg <Daniel.Stenberg@haxx.nu>.\n"
+" If you do find any (or have other suggestions), mail Daniel\n"
+" Stenberg <Daniel.Stenberg@haxx.nu>.\n"
"\n"
"AUTHORS / CONTRIBUTORS\n"
-" - Daniel Stenberg <Daniel.Stenberg@haxx.nu>\n"
-" - Rafael Sagula <sagula@inf.ufrgs.br>\n"
-" - Sampo Kellomaki <sampo@iki.fi>\n"
-" - Linas Vepstas <linas@linas.org>\n"
-" - Bjorn Reese <breese@mail1.stofanet.dk>\n"
-" - Johan Anderson <johan@homemail.com>\n"
-" - Kjell Ericson <Kjell.Ericson@haxx,nu>\n"
-" - Troy Engel <tengel@sonic.net>\n"
-" - Ryan Nelson <ryan@inch.com>\n"
-" - Bjorn Stenberg <Bjorn.Stenberg@haxx.nu>\n"
-" - Angus Mackay <amackay@gus.ml.org>\n"
-" - Eric Young <eay@cryptsoft.com>\n"
-" - Simon Dick <simond@totally.irrelevant.org>\n"
-" - Oren Tirosh <oren@monty.hishome.net>\n"
-" - Steven G. Johnson <stevenj@alum.mit.edu>\n"
-" - Gilbert Ramirez Jr. <gram@verdict.uthscsa.edu>\n"
-" - Andr's Garc'a <ornalux@redestb.es>\n"
-" - Douglas E. Wegscheid <wegscd@whirlpool.com>\n"
-" - Mark Butler <butlerm@xmission.com>\n"
-" - Eric Thelin <eric@generation-i.com>\n"
-" - Marc Boucher <marc@mbsi.ca>\n"
-" - Greg Onufer <Greg.Onufer@Eng.Sun.COM>\n"
-" - Doug Kaufman <dkaufman@rahul.net>\n"
-" - David Eriksson <david@2good.com>\n"
-" - Ralph Beckmann <rabe@uni-paderborn.de>\n"
-" - T. Yamada <tai@imasy.or.jp>\n"
-" - Lars J. Aas <larsa@sim.no>\n"
-" - J\"rn Hartroth <Joern.Hartroth@telekom.de>\n"
-" - Matthew Clarke <clamat@van.maves.ca>\n"
-" - Linus Nielsen <Linus.Nielsen@haxx.nu>\n"
-" - Felix von Leitner <felix@convergence.de>\n"
-" - Dan Zitter <dzitter@zitter.net>\n"
-" - Jongki Suwandi <Jongki.Suwandi@eng.sun.com>\n"
-" - Chris Maltby <chris@aurema.com>\n"
-" - Ron Zapp <rzapper@yahoo.com>\n"
-" - Paul Marquis <pmarquis@iname.com>\n"
-" - Ellis Pritchard <ellis@citria.com>\n"
-" - Damien Adant <dams@usa.net>\n"
-" - Chris <cbayliss@csc.come>\n"
-" - Marco G. Salvagno <mgs@whiz.cjb.net>\n"
+" - Daniel Stenberg <Daniel.Stenberg@haxx.nu>\n"
+" - Rafael Sagula <sagula@inf.ufrgs.br>\n"
+" - Sampo Kellomaki <sampo@iki.fi>\n"
+" - Linas Vepstas <linas@linas.org>\n"
+" - Bjorn Reese <breese@mail1.stofanet.dk>\n"
+" - Johan Anderson <johan@homemail.com>\n"
+" - Kjell Ericson <Kjell.Ericson@haxx,nu>\n"
+" - Troy Engel <tengel@sonic.net>\n"
+" - Ryan Nelson <ryan@inch.com>\n"
+" - Bjorn Stenberg <Bjorn.Stenberg@haxx.nu>\n"
+" - Angus Mackay <amackay@gus.ml.org>\n"
+" - Eric Young <eay@cryptsoft.com>\n"
+" - Simon Dick <simond@totally.irrelevant.org>\n"
+" - Oren Tirosh <oren@monty.hishome.net>\n"
+" - Steven G. Johnson <stevenj@alum.mit.edu>\n"
+" - Gilbert Ramirez Jr. <gram@verdict.uthscsa.edu>\n"
+" - Andrés García <ornalux@redestb.es>\n"
+" - Douglas E. Wegscheid <wegscd@whirlpool.com>\n"
+" - Mark Butler <butlerm@xmission.com>\n"
+" - Eric Thelin <eric@generation-i.com>\n"
+" - Marc Boucher <marc@mbsi.ca>\n"
+" - Greg Onufer <Greg.Onufer@Eng.Sun.COM>\n"
+" - Doug Kaufman <dkaufman@rahul.net>\n"
+" - David Eriksson <david@2good.com>\n"
+" - Ralph Beckmann <rabe@uni-paderborn.de>\n"
+" - T. Yamada <tai@imasy.or.jp>\n"
+" - Lars J. Aas <larsa@sim.no>\n"
+" - Jörn Hartroth <Joern.Hartroth@telekom.de>\n"
+" - Matthew Clarke <clamat@van.maves.ca>\n"
+" - Linus Nielsen <Linus.Nielsen@haxx.nu>\n"
+" - Felix von Leitner <felix@convergence.de>\n"
+" - Dan Zitter <dzitter@zitter.net>\n"
+" - Jongki Suwandi <Jongki.Suwandi@eng.sun.com>\n"
+" - Chris Maltby <chris@aurema.com>\n"
+" - Ron Zapp <rzapper@yahoo.com>\n"
+" - Paul Marquis <pmarquis@iname.com>\n"
+" - Ellis Pritchard <ellis@citria.com>\n"
+" - Damien Adant <dams@usa.net>\n"
+" - Chris <cbayliss@csc.come>\n"
+" - Marco G. Salvagno <mgs@whiz.cjb.net>\n"
"\n"
"WWW\n"
-" http://curl.haxx.nu\n"
+" http://curl.haxx.nu\n"
"\n"
"FTP\n"
-" ftp://ftp.sunet.se/pub/www/utilities/curl/\n"
+" ftp://ftp.sunet.se/pub/www/utilities/curl/\n"
"\n"
"SEE ALSO\n"
-" ftp(1), wget(1), snarf(1)\n"
+" ftp(1), wget(1), snarf(1)\n"
"\n"
"LATEST VERSION\n"
"\n"
@@ -925,6 +894,41 @@ puts (
" curl -d \"name=Rafael%20Sagula&phone=3320780\" \\\n"
" http://www.where.com/guest.cgi\n"
"\n"
+" How to post a form with curl, lesson #1:\n"
+"\n"
+" Dig out all the <input> tags in the form that you want to fill in. (There's\n"
+" a perl program called formfind.pl on the curl site that helps with this).\n"
+"\n"
+" If there's a \"normal\" post, you use -d to post. -d takes a full \"post\n"
+" string\", which is in the format\n"
+"\n"
+" <variable1>=<data1>&<variable2>=<data2>&...\n"
+"\n"
+" The 'variable' names are the names set with \"name=\" in the <input> tags, and\n"
+" the data is the contents you want to fill in for the inputs. The data *must*\n"
+" be properly URL encoded. That means you replace space with + and that you\n"
+" write weird letters with %XX where XX is the hexadecimal representation of\n"
+" the letter's ASCII code.\n"
+"\n"
+" Example:\n"
+"\n"
+" (page located at http://www.formpost.com/getthis/\n"
+"\n"
+" <form action=\"post.cgi\" method=\"post\">\n"
+" <input name=user size=10>\n"
+" <input name=pass type=password size=10>\n"
+" <input name=id type=hidden value=\"blablabla\">\n"
+" <input name=ding value=\"submit\">\n"
+" </form>\n"
+"\n"
+" We want to enter user 'foobar' with password '12345'.\n"
+"\n"
+" To post to this, you enter a curl command line like:\n"
+"\n"
+" curl -d \"user=foobar&pass=12345&id=blablabla&dig=submit\" (continues)\n"
+" http://www.formpost.com/getthis/post.cgi\n"
+"\n"
+"\n"
" While -d uses the application/x-www-form-urlencoded mime-type, generally\n"
" understood by CGI's and similar, curl also supports the more capable\n"
" multipart/form-data type. This latter type supports things like file upload.\n"
diff --git a/src/main.c b/src/main.c
index 523a38ec4..d629de9e7 100644
--- a/src/main.c
+++ b/src/main.c
@@ -46,6 +46,8 @@
#include <ctype.h>
#include <curl/curl.h>
+#include <curl/types.h> /* new for v7 */
+#include <curl/easy.h> /* new for v7 */
#include <curl/mprintf.h>
#include "../lib/getdate.h"
@@ -71,6 +73,25 @@
#include <unistd.h>
#endif
+/* Just a set of bits */
+#define CONF_DEFAULT 0
+#define CONF_VERBOSE (1<<5) /* talk a lot */
+#define CONF_HEADER (1<<8) /* throw the header out too */
+#define CONF_NOPROGRESS (1<<10) /* shut off the progress meter */
+#define CONF_NOBODY (1<<11) /* use HEAD to get http document */
+#define CONF_FAILONERROR (1<<12) /* no output on http error codes >= 300 */
+#define CONF_UPLOAD (1<<14) /* this is an upload */
+#define CONF_POST (1<<15) /* HTTP POST method */
+#define CONF_FTPLISTONLY (1<<16) /* Use NLST when listing ftp dir */
+#define CONF_FTPAPPEND (1<<20) /* Append instead of overwrite on upload! */
+#define CONF_NETRC (1<<22) /* read user+password from .netrc */
+#define CONF_FOLLOWLOCATION (1<<23) /* use Location: Luke! */
+#define CONF_FTPASCII (1<<24) /* use TYPE A for transfer */
+#define CONF_HTTPPOST (1<<25) /* multipart/form-data HTTP POST */
+#define CONF_PUT (1<<27) /* PUT the input file */
+#define CONF_MUTE (1<<28) /* force NOPROGRESS */
+
+
#ifndef HAVE_STRDUP
/* Ultrix doesn't have strdup(), so make a quick clone: */
char *strdup(char *str)
@@ -113,7 +134,7 @@ static UrgError win32_init(void)
if (err != 0)
/* Tell the user that we couldn't find a useable */
/* winsock.dll. */
- return URG_FAILED_INIT;
+ return CURLE_FAILED_INIT;
/* Confirm that the Windows Sockets DLL supports 1.1.*/
/* Note that if the DLL supports versions greater */
@@ -127,13 +148,13 @@ static UrgError win32_init(void)
/* winsock.dll. */
WSACleanup();
- return URG_FAILED_INIT;
+ return CURLE_FAILED_INIT;
}
- return URG_OK;
+ return CURLE_OK;
}
/* The Windows Sockets DLL is acceptable. Proceed. */
#else
-static UrgError win32_init(void) { return URG_OK; }
+static CURLcode win32_init(void) { return CURLE_OK; }
#define win32_cleanup()
#endif
@@ -143,7 +164,7 @@ static UrgError win32_init(void) { return URG_OK; }
* _any_ libcurl usage. If this fails, *NO* libcurl functions may be
* used, or havoc may be the result.
*/
-UrgError main_init(void)
+CURLcode main_init(void)
{
return win32_init();
}
@@ -297,7 +318,10 @@ static void GetStr(char **string,
{
if(*string)
free(*string);
- *string = strdup(value);
+ if(value && *value)
+ *string = strdup(value);
+ else
+ *string = NULL;
}
static char *file2string(FILE *file)
@@ -420,7 +444,7 @@ static int getparameter(char *flag, /* f or -long-flag */
if(parse) {
/* this is the second match, we can't continue! */
helpf("option --%s is ambiguous\n", &flag[1]);
- return URG_FAILED_INIT;
+ return CURLE_FAILED_INIT;
}
parse = aliases[j].letter;
hit = j;
@@ -428,7 +452,7 @@ static int getparameter(char *flag, /* f or -long-flag */
}
if(hit < 0) {
helpf("unknown option -%s.\n", flag);
- return URG_FAILED_INIT;
+ return CURLE_FAILED_INIT;
}
}
else {
@@ -454,18 +478,18 @@ static int getparameter(char *flag, /* f or -long-flag */
}
if(hit < 0) {
helpf("unknown option -%c.\n", letter);
- return URG_FAILED_INIT;
+ return CURLE_FAILED_INIT;
}
}
if(hit < 0) {
helpf("unknown option -%c.\n", letter);
- return URG_FAILED_INIT;
+ return CURLE_FAILED_INIT;
}
if(!nextarg && aliases[hit].extraparam) {
helpf("option -%s/--%s requires an extra argument!\n",
aliases[hit].letter,
aliases[hit].lname);
- return URG_FAILED_INIT;
+ return CURLE_FAILED_INIT;
}
else if(nextarg && aliases[hit].extraparam)
*usedarg = TRUE; /* mark it as used */
@@ -491,7 +515,7 @@ static int getparameter(char *flag, /* f or -long-flag */
break;
}
now=time(NULL);
- config->condtime=get_date(nextarg, &now);
+ config->condtime=curl_getdate(nextarg, &now);
if(-1 == config->condtime) {
/* now let's see if it is a file name to get the time from instead! */
struct stat statbuf;
@@ -586,7 +610,6 @@ static int getparameter(char *flag, /* f or -long-flag */
break;
case 'e':
GetStr(&config->referer, nextarg);
- config->conf |= CONF_REFERER;
break;
case 'E':
{
@@ -610,13 +633,12 @@ static int getparameter(char *flag, /* f or -long-flag */
if(curl_FormParse(nextarg,
&config->httppost,
&config->last_post))
- return URG_FAILED_INIT;
- config->conf |= CONF_HTTPPOST; /* no toggle, OR! */
+ return CURLE_FAILED_INIT;
break;
case 'h': /* h for help */
help();
- return URG_FAILED_INIT;
+ return CURLE_FAILED_INIT;
case 'H':
head = (struct HttpHeader *)malloc(sizeof(struct HttpHeader));
if(head) {
@@ -659,7 +681,7 @@ static int getparameter(char *flag, /* f or -long-flag */
break;
case 'M': /* M for manual, huge help */
hugehelp();
- return URG_FAILED_INIT;
+ return CURLE_FAILED_INIT;
case 'n':
/* pick info from .netrc, if this is used for http, curl will
automatically enfore user+password with the request */
@@ -683,7 +705,6 @@ static int getparameter(char *flag, /* f or -long-flag */
this will make us try to get the "default" address.
NOTE: this is a changed behaviour since the released 4.1!
*/
- config->conf |= CONF_FTPPORT;
GetStr(&config->ftpport, nextarg);
break;
#if 0
@@ -712,7 +733,6 @@ static int getparameter(char *flag, /* f or -long-flag */
case 'r':
/* byte range requested */
GetStr(&config->range, nextarg);
- config->conf |= CONF_RANGE;
break;
case 's':
/* don't show progress meter, don't show errors : */
@@ -735,19 +755,17 @@ static int getparameter(char *flag, /* f or -long-flag */
case 'u':
/* user:password */
GetStr(&config->userpwd, nextarg);
- config->conf |= CONF_USERPWD;
break;
case 'U':
/* Proxy user:password */
GetStr(&config->proxyuserpwd, nextarg);
- config->conf |= CONF_PROXYUSERPWD;
break;
case 'v':
config->conf ^= CONF_VERBOSE; /* talk a lot */
break;
case 'V':
printf(CURL_ID "%s\n", curl_version());
- return URG_FAILED_INIT;
+ return CURLE_FAILED_INIT;
case 'w':
/* get the output string */
if('@' == *nextarg) {
@@ -768,14 +786,7 @@ static int getparameter(char *flag, /* f or -long-flag */
break;
case 'x':
/* proxy */
- if(!*nextarg) {
- /* disable proxy when no proxy is given */
- config->conf &= ~CONF_PROXY;
- }
- else {
- config->conf |= CONF_PROXY;
- GetStr(&config->proxy, nextarg);
- }
+ GetStr(&config->proxy, nextarg);
break;
case 'X':
/* HTTP request */
@@ -799,13 +810,13 @@ static int getparameter(char *flag, /* f or -long-flag */
helpf("Unknown option '%c'\n", letter);
else
helpf("Unknown option\n"); /* short help blurb */
- return URG_FAILED_INIT;
+ return CURLE_FAILED_INIT;
}
hit = -1;
} while(*++parse && !*usedarg);
- return URG_OK;
+ return CURLE_OK;
}
@@ -826,7 +837,7 @@ static int parseconfig(char *filename,
char *home = curl_GetEnv("HOME"); /* portable environment reader */
if(!home || (strlen(home)>(sizeof(filebuffer)-strlen(CURLRC))))
- return URG_OK;
+ return CURLE_OK;
sprintf(filebuffer, "%s%s%s", home, DIR_CHAR, CURLRC);
@@ -894,7 +905,7 @@ static int parseconfig(char *filename,
if(file != stdin)
fclose(file);
}
- return URG_OK;
+ return CURLE_OK;
}
struct OutStruct {
@@ -944,7 +955,8 @@ int main(int argc, char *argv[])
int infilesize=-1; /* -1 means unknown */
bool stillflags=TRUE;
- int res=URG_OK;
+ CURL *curl;
+ int res=CURLE_OK;
int i;
outs.stream = stdout;
@@ -981,7 +993,7 @@ int main(int argc, char *argv[])
if ((argc < 2) && !config.url) {
helpf(NULL);
- return URG_FAILED_INIT;
+ return CURLE_FAILED_INIT;
}
/* Parse options */
@@ -1014,7 +1026,7 @@ int main(int argc, char *argv[])
else {
if(url) {
helpf("only one URL is supported!\n");
- return URG_FAILED_INIT;
+ return CURLE_FAILED_INIT;
}
url = argv[i];
}
@@ -1027,7 +1039,7 @@ int main(int argc, char *argv[])
if(!url) {
helpf("no URL specified!\n");
- return URG_FAILED_INIT;
+ return CURLE_FAILED_INIT;
}
#if 0
fprintf(stderr, "URL: %s PROXY: %s\n", url, config.proxy?config.proxy:"none");
@@ -1036,7 +1048,7 @@ int main(int argc, char *argv[])
/* expand '{...}' and '[...]' expressions and return total number of URLs
in pattern set */
res = glob_url(&urls, url, &urlnum);
- if(res != URG_OK)
+ if(res != CURLE_OK)
return res;
outfiles = config.outfile; /* save outfile pattern befor expansion */
@@ -1058,7 +1070,7 @@ int main(int argc, char *argv[])
if(config.outfile && config.infile) {
helpf("you can't both upload and download!\n");
- return URG_FAILED_INIT;
+ return CURLE_FAILED_INIT;
}
if (config.outfile || config.remotefile) {
@@ -1077,7 +1089,7 @@ int main(int argc, char *argv[])
config.outfile = strrchr(config.outfile, '/');
if(!config.outfile || !strlen(++config.outfile)) {
helpf("Remote file name has no length!\n");
- return URG_WRITE_ERROR;
+ return CURLE_WRITE_ERROR;
}
}
else /* fill '#1' ... '#9' terms from URL pattern */
@@ -1100,7 +1112,7 @@ int main(int argc, char *argv[])
outs.stream=(FILE *) fopen(config.outfile, config.resume_from?"ab":"wb");
if (!outs.stream) {
helpf("Can't open '%s'!\n", config.outfile);
- return URG_WRITE_ERROR;
+ return CURLE_WRITE_ERROR;
}
}
else {
@@ -1127,7 +1139,7 @@ int main(int argc, char *argv[])
urlbuffer=(char *)malloc(strlen(url) + strlen(config.infile) + 3);
if(!urlbuffer) {
helpf("out of memory\n");
- return URG_OUT_OF_MEMORY;
+ return CURLE_OUT_OF_MEMORY;
}
if(ptr)
/* there is a trailing slash on the URL */
@@ -1142,7 +1154,7 @@ int main(int argc, char *argv[])
infd=(FILE *) fopen(config.infile, "rb");
if (!infd || stat(config.infile, &fileinfo)) {
helpf("Can't open '%s'!\n", config.infile);
- return URG_READ_ERROR;
+ return CURLE_READ_ERROR;
}
infilesize=fileinfo.st_size;
@@ -1189,48 +1201,121 @@ int main(int argc, char *argv[])
main_init();
- res = curl_urlget(URGTAG_FILE, (FILE *)&outs, /* where to store */
- URGTAG_WRITEFUNCTION, my_fwrite, /* what call to write */
- URGTAG_INFILE, infd, /* for uploads */
- URGTAG_INFILESIZE, infilesize, /* size of uploaded file */
- URGTAG_URL, url, /* what to fetch */
- URGTAG_PROXY, config.proxy, /* proxy to use */
- URGTAG_FLAGS, config.conf, /* flags */
- URGTAG_USERPWD, config.userpwd, /* user + passwd */
- URGTAG_PROXYUSERPWD, config.proxyuserpwd, /* Proxy user + passwd */
- URGTAG_RANGE, config.range, /* range of document */
- URGTAG_ERRORBUFFER, errorbuffer,
- URGTAG_TIMEOUT, config.timeout,
- URGTAG_POSTFIELDS, config.postfields,
- URGTAG_REFERER, config.referer,
- URGTAG_USERAGENT, config.useragent,
- URGTAG_FTPPORT, config.ftpport,
- URGTAG_LOW_SPEED_LIMIT, config.low_speed_limit,
- URGTAG_LOW_SPEED_TIME, config.low_speed_time,
- URGTAG_RESUME_FROM, config.use_resume?config.resume_from:0,
- URGTAG_COOKIE, config.cookie,
- URGTAG_HTTPHEADER, config.headers,
- URGTAG_HTTPPOST, config.httppost,
- URGTAG_SSLCERT, config.cert,
- URGTAG_SSLCERTPASSWD, config.cert_passwd,
- URGTAG_CRLF, config.crlf,
- URGTAG_QUOTE, config.quote,
- URGTAG_POSTQUOTE, config.postquote,
- URGTAG_WRITEHEADER, config.headerfile?&heads:NULL,
- URGTAG_COOKIEFILE, config.cookiefile,
- URGTAG_SSLVERSION, config.ssl_version,
- URGTAG_TIMECONDITION, config.timecond,
- URGTAG_TIMEVALUE, config.condtime,
- URGTAG_CUSTOMREQUEST, config.customrequest,
- URGTAG_STDERR, config.errors,
- URGTAG_PROGRESSMODE, config.progressmode,
- URGTAG_WRITEINFO, config.writeout,
- URGTAG_DONE); /* always terminate the list of tags */
+#if 0
+ /* This is code left from the pre-v7 time, left here mainly as a reminder
+ and possibly as a warning! ;-) */
+
+ res = curl_urlget(CURLOPT_FILE, (FILE *)&outs, /* where to store */
+ CURLOPT_WRITEFUNCTION, my_fwrite, /* what call to write */
+ CURLOPT_INFILE, infd, /* for uploads */
+ CURLOPT_INFILESIZE, infilesize, /* size of uploaded file */
+ CURLOPT_URL, url, /* what to fetch */
+ CURLOPT_PROXY, config.proxy, /* proxy to use */
+ CURLOPT_FLAGS, config.conf, /* flags */
+ CURLOPT_USERPWD, config.userpwd, /* user + passwd */
+ CURLOPT_PROXYUSERPWD, config.proxyuserpwd, /* Proxy user + passwd */
+ CURLOPT_RANGE, config.range, /* range of document */
+ CURLOPT_ERRORBUFFER, errorbuffer,
+ CURLOPT_TIMEOUT, config.timeout,
+ CURLOPT_POSTFIELDS, config.postfields,
+ CURLOPT_REFERER, config.referer,
+ CURLOPT_USERAGENT, config.useragent,
+ CURLOPT_FTPPORT, config.ftpport,
+ CURLOPT_LOW_SPEED_LIMIT, config.low_speed_limit,
+ CURLOPT_LOW_SPEED_TIME, config.low_speed_time,
+ CURLOPT_RESUME_FROM, config.use_resume?config.resume_from:0,
+ CURLOPT_COOKIE, config.cookie,
+ CURLOPT_HTTPHEADER, config.headers,
+ CURLOPT_HTTPPOST, config.httppost,
+ CURLOPT_SSLCERT, config.cert,
+ CURLOPT_SSLCERTPASSWD, config.cert_passwd,
+ CURLOPT_CRLF, config.crlf,
+ CURLOPT_QUOTE, config.quote,
+ CURLOPT_POSTQUOTE, config.postquote,
+ CURLOPT_WRITEHEADER, config.headerfile?&heads:NULL,
+ CURLOPT_COOKIEFILE, config.cookiefile,
+ CURLOPT_SSLVERSION, config.ssl_version,
+ CURLOPT_TIMECONDITION, config.timecond,
+ CURLOPT_TIMEVALUE, config.condtime,
+ CURLOPT_CUSTOMREQUEST, config.customrequest,
+ CURLOPT_STDERR, config.errors,
+ CURLOPT_PROGRESSMODE, config.progressmode,
+ CURLOPT_WRITEINFO, config.writeout,
+ CURLOPT_DONE); /* always terminate the list of tags */
+
+#endif
+ /* The new, v7-style easy-interface! */
+ curl = curl_easy_init();
+ if(curl) {
+ curl_easy_setopt(curl, CURLOPT_FILE, (FILE *)&outs); /* where to store */
+ curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, my_fwrite); /* what call to write */
+ curl_easy_setopt(curl, CURLOPT_INFILE, infd); /* for uploads */
+ curl_easy_setopt(curl, CURLOPT_INFILESIZE, infilesize); /* size of uploaded file */
+ curl_easy_setopt(curl, CURLOPT_URL, url); /* what to fetch */
+ curl_easy_setopt(curl, CURLOPT_PROXY, config.proxy); /* proxy to use */
+#if 0
+ curl_easy_setopt(curl, CURLOPT_FLAGS, config.conf); /* flags */
+#else
+ curl_easy_setopt(curl, CURLOPT_VERBOSE, config.conf&CONF_VERBOSE);
+ curl_easy_setopt(curl, CURLOPT_HEADER, config.conf&CONF_HEADER);
+ curl_easy_setopt(curl, CURLOPT_NOPROGRESS, config.conf&CONF_NOPROGRESS);
+ curl_easy_setopt(curl, CURLOPT_NOBODY, config.conf&CONF_NOBODY);
+ curl_easy_setopt(curl, CURLOPT_FAILONERROR, config.conf&CONF_FAILONERROR);
+ curl_easy_setopt(curl, CURLOPT_UPLOAD, config.conf&CONF_UPLOAD);
+ curl_easy_setopt(curl, CURLOPT_POST, config.conf&CONF_POST);
+ curl_easy_setopt(curl, CURLOPT_FTPLISTONLY, config.conf&CONF_FTPLISTONLY);
+ curl_easy_setopt(curl, CURLOPT_FTPAPPEND, config.conf&CONF_FTPAPPEND);
+ curl_easy_setopt(curl, CURLOPT_NETRC, config.conf&CONF_NETRC);
+ curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, config.conf&CONF_FOLLOWLOCATION);
+ curl_easy_setopt(curl, CURLOPT_FTPASCII, config.conf&CONF_FTPASCII);
+
+ curl_easy_setopt(curl, CURLOPT_PUT, config.conf&CONF_PUT);
+ curl_easy_setopt(curl, CURLOPT_MUTE, config.conf&CONF_MUTE);
+#endif
- main_free();
- if((res!=URG_OK) && config.showerror)
- fprintf(config.errors, "curl: (%d) %s\n", res, errorbuffer);
+ curl_easy_setopt(curl, CURLOPT_USERPWD, config.userpwd); /* user + passwd */
+ curl_easy_setopt(curl, CURLOPT_PROXYUSERPWD, config.proxyuserpwd); /* Proxy user + passwd */
+ curl_easy_setopt(curl, CURLOPT_RANGE, config.range); /* range of document */
+ curl_easy_setopt(curl, CURLOPT_ERRORBUFFER, errorbuffer);
+ curl_easy_setopt(curl, CURLOPT_TIMEOUT, config.timeout);
+ curl_easy_setopt(curl, CURLOPT_POSTFIELDS, config.postfields);
+ curl_easy_setopt(curl, CURLOPT_REFERER, config.referer);
+ curl_easy_setopt(curl, CURLOPT_USERAGENT, config.useragent);
+ curl_easy_setopt(curl, CURLOPT_FTPPORT, config.ftpport);
+ curl_easy_setopt(curl, CURLOPT_LOW_SPEED_LIMIT, config.low_speed_limit);
+ curl_easy_setopt(curl, CURLOPT_LOW_SPEED_TIME, config.low_speed_time);
+ curl_easy_setopt(curl, CURLOPT_RESUME_FROM, config.use_resume?config.resume_from:0);
+ curl_easy_setopt(curl, CURLOPT_COOKIE, config.cookie);
+ curl_easy_setopt(curl, CURLOPT_HTTPHEADER, config.headers);
+ curl_easy_setopt(curl, CURLOPT_HTTPPOST, config.httppost);
+ curl_easy_setopt(curl, CURLOPT_SSLCERT, config.cert);
+ curl_easy_setopt(curl, CURLOPT_SSLCERTPASSWD, config.cert_passwd);
+ curl_easy_setopt(curl, CURLOPT_CRLF, config.crlf);
+ curl_easy_setopt(curl, CURLOPT_QUOTE, config.quote);
+ curl_easy_setopt(curl, CURLOPT_POSTQUOTE, config.postquote);
+ curl_easy_setopt(curl, CURLOPT_WRITEHEADER, config.headerfile?&heads:NULL);
+ curl_easy_setopt(curl, CURLOPT_COOKIEFILE, config.cookiefile);
+ curl_easy_setopt(curl, CURLOPT_SSLVERSION, config.ssl_version);
+ curl_easy_setopt(curl, CURLOPT_TIMECONDITION, config.timecond);
+ curl_easy_setopt(curl, CURLOPT_TIMEVALUE, config.condtime);
+ curl_easy_setopt(curl, CURLOPT_CUSTOMREQUEST, config.customrequest);
+ curl_easy_setopt(curl, CURLOPT_STDERR, config.errors);
+ curl_easy_setopt(curl, CURLOPT_PROGRESSMODE, config.progressmode);
+ curl_easy_setopt(curl, CURLOPT_WRITEINFO, config.writeout);
+
+ res = curl_easy_perform(curl);
+
+ /* always cleanup */
+ curl_easy_cleanup(curl);
+
+ if((res!=CURLE_OK) && config.showerror)
+ fprintf(config.errors, "curl: (%d) %s\n", res, errorbuffer);
+ }
+ else
+ fprintf(config.errors, "curl: failed to init libcurl!\n");
+
+ main_free();
if((config.errors != stderr) &&
(config.errors != stdout))
diff --git a/src/urlglob.c b/src/urlglob.c
index 9f4134077..85acfa94b 100644
--- a/src/urlglob.c
+++ b/src/urlglob.c
@@ -69,18 +69,18 @@ int glob_set(char *pattern, int pos) {
switch (*pattern) {
case '\0': /* URL ended while set was still open */
printf("error: unmatched brace at pos %d\n", pos);
- exit (URG_URL_MALFORMAT);
+ exit (CURLE_URL_MALFORMAT);
case '{':
case '[': /* no nested expressions at this time */
printf("error: nested braces not supported %d\n", pos);
- exit (URG_URL_MALFORMAT);
+ exit (CURLE_URL_MALFORMAT);
case ',':
case '}': /* set element completed */
*buf = '\0';
pat->content.Set.elements = realloc(pat->content.Set.elements, (pat->content.Set.size + 1) * sizeof(char*));
if (!pat->content.Set.elements) {
printf("out of memory in set pattern\n");
- exit(URG_OUT_OF_MEMORY);
+ exit(CURLE_OUT_OF_MEMORY);
}
pat->content.Set.elements[pat->content.Set.size] = strdup(glob_buffer);
++pat->content.Set.size;
@@ -95,11 +95,11 @@ int glob_set(char *pattern, int pos) {
break;
case ']': /* illegal closing bracket */
printf("error: illegal pattern at pos %d\n", pos);
- exit (URG_URL_MALFORMAT);
+ exit (CURLE_URL_MALFORMAT);
case '\\': /* escaped character, skip '\' */
if (*(buf+1) == '\0') { /* but no escaping of '\0'! */
printf("error: illegal pattern at pos %d\n", pos);
- exit (URG_URL_MALFORMAT);
+ exit (CURLE_URL_MALFORMAT);
}
++pattern;
++pos; /* intentional fallthrough */
@@ -108,7 +108,7 @@ int glob_set(char *pattern, int pos) {
++pos;
}
}
- exit (URG_FAILED_INIT);
+ exit (CURLE_FAILED_INIT);
}
int glob_range(char *pattern, int pos) {
@@ -132,7 +132,7 @@ int glob_range(char *pattern, int pos) {
pat->content.CharRange.max_c - pat->content.CharRange.min_c > 'z' - 'a') {
/* the pattern is not well-formed */
printf("error: illegal pattern or range specification after pos %d\n", pos);
- exit (URG_URL_MALFORMAT);
+ exit (CURLE_URL_MALFORMAT);
}
pat->content.CharRange.ptr_c = pat->content.CharRange.min_c;
/* always check for a literal (may be "") between patterns */
@@ -146,7 +146,7 @@ int glob_range(char *pattern, int pos) {
pat->content.NumRange.min_n >= pat->content.NumRange.max_n) {
/* the pattern is not well-formed */
printf("error: illegal pattern or range specification after pos %d\n", pos);
- exit (URG_URL_MALFORMAT);
+ exit (CURLE_URL_MALFORMAT);
}
if (*pattern == '0') { /* leading zero specified */
c = pattern;
@@ -161,7 +161,7 @@ int glob_range(char *pattern, int pos) {
glob_word(c, pos + (c - pattern));
}
printf("error: illegal character in range specification at pos %d\n", pos);
- exit (URG_URL_MALFORMAT);
+ exit (CURLE_URL_MALFORMAT);
}
int glob_word(char *pattern, int pos) {
@@ -174,14 +174,14 @@ int glob_word(char *pattern, int pos) {
while (*pattern != '\0' && *pattern != '{' && *pattern != '[') {
if (*pattern == '}' || *pattern == ']') {
printf("illegal character at position %d\n", pos);
- exit (URG_URL_MALFORMAT);
+ exit (CURLE_URL_MALFORMAT);
}
if (*pattern == '\\') { /* escape character, skip '\' */
++pattern;
++pos;
if (*pattern == '\0') { /* but no escaping of '\0'! */
printf("illegal character at position %d\n", pos);
- exit (URG_URL_MALFORMAT);
+ exit (CURLE_URL_MALFORMAT);
}
}
*buf++ = *pattern++; /* copy character to literal */
@@ -201,21 +201,21 @@ int glob_word(char *pattern, int pos) {
return glob_range(++pattern, ++pos);/* process range pattern */
}
printf("internal error\n");
- exit (URG_FAILED_INIT);
+ exit (CURLE_FAILED_INIT);
}
int glob_url(URLGlob** glob, char* url, int *urlnum)
{
if (strlen(url)>URL_MAX_LENGTH) {
printf("Illegally sized URL\n");
- return URG_URL_MALFORMAT;
+ return CURLE_URL_MALFORMAT;
}
glob_expand = (URLGlob*)malloc(sizeof(URLGlob));
glob_expand->size = 0;
*urlnum = glob_word(url, 1);
*glob = glob_expand;
- return URG_OK;
+ return CURLE_OK;
}
char *next_url(URLGlob *glob)
@@ -258,7 +258,7 @@ char *next_url(URLGlob *glob)
break;
default:
printf("internal error: invalid pattern type (%d)\n", pat->type);
- exit (URG_FAILED_INIT);
+ exit (CURLE_FAILED_INIT);
}
}
if (carry) /* first pattern ptr has run into overflow, done! */
@@ -287,7 +287,7 @@ char *next_url(URLGlob *glob)
break;
default:
printf("internal error: invalid pattern type (%d)\n", pat->type);
- exit (URG_FAILED_INIT);
+ exit (CURLE_FAILED_INIT);
}
}
}
@@ -305,12 +305,12 @@ char *match_url(char *filename, URLGlob glob) {
if (!isdigit((int)*++filename) ||
*filename == '0') { /* only '#1' ... '#9' allowed */
printf("illegal matching expression\n");
- exit(URG_URL_MALFORMAT);
+ exit(CURLE_URL_MALFORMAT);
}
i = *filename - '1';
if (i + 1 > glob.size / 2) {
printf("match against nonexisting pattern\n");
- exit(URG_URL_MALFORMAT);
+ exit(CURLE_URL_MALFORMAT);
}
pat = glob.pattern[i];
switch (pat.type) {
@@ -327,7 +327,7 @@ char *match_url(char *filename, URLGlob glob) {
break;
default:
printf("internal error: invalid pattern type (%d)\n", pat.type);
- exit (URG_FAILED_INIT);
+ exit (CURLE_FAILED_INIT);
}
++filename;
}
diff --git a/src/version.h b/src/version.h
index 7eb55d704..96c57e396 100644
--- a/src/version.h
+++ b/src/version.h
@@ -1,3 +1,3 @@
#define CURL_NAME "curl"
-#define CURL_VERSION "6.5.2"
+#define CURL_VERSION "7.0beta"
#define CURL_ID CURL_NAME " " CURL_VERSION " (" OS ") "