diff options
Diffstat (limited to 'src')
-rw-r--r-- | src/hugehelp.c | 473 | ||||
-rw-r--r-- | src/version.h | 2 |
2 files changed, 256 insertions, 219 deletions
diff --git a/src/hugehelp.c b/src/hugehelp.c index bdef2e7c5..68c372be2 100644 --- a/src/hugehelp.c +++ b/src/hugehelp.c @@ -80,184 +80,196 @@ puts ( " To store cookies, save the HTTP headers to a file using\n" " -D/--dump-header!\n" "\n" -" -B/--ftp-ascii\n" -" (FTP/LDAP) Use ASCII transfer when getting an FTP file\n" -" or LDAP info. For FTP, this can also be enforced by\n" -" using an URL that ends with \";type=A\".\n" +" -B/--use-ascii\n" +" Use ASCII transfer when getting an FTP file or LDAP\n" +" info. For FTP, this can also be enforced by using an\n" +" URL that ends with \";type=A\". This option causes data\n" +" sent to stdout to be in text mode for win32 systems.\n" "\n" " -c/--continue\n" -" Continue/Resume a previous file transfer. This\n" -" instructs curl to continue appending data on the file\n" -" where it was previously left, possibly because of a\n" -" broken connection to the server. There must be a named\n" -" physical file to append to for this to work. Note:\n" -" Upload resume is depening on a command named SIZE not\n" +" Continue/Resume a previous file transfer. This\n" +" instructs curl to continue appending data on the file\n" +" where it was previously left, possibly because of a\n" +" broken connection to the server. There must be a named\n" +" physical file to append to for this to work. Note:\n" +" Upload resume is depening on a command named SIZE not\n" " always present in all ftp servers! Upload resume is for\n" -" FTP only. HTTP resume is only possible with HTTP/1.1\n" +" FTP only. HTTP resume is only possible with HTTP/1.1\n" " or later servers.\n" "\n" " -C/--continue-at <offset>\n" -" Continue/Resume a previous file transfer at the given\n" -" offset. The given offset is the exact number of bytes\n" -" that will be skipped counted from the beginning of the\n" +" Continue/Resume a previous file transfer at the given\n" +" offset. The given offset is the exact number of bytes\n" +" that will be skipped counted from the beginning of the\n" " source file before it is transfered to the destination.\n" -" If used with uploads, the ftp server command SIZE will\n" -" not be used by curl. Upload resume is for FTP only.\n" -" HTTP resume is only possible with HTTP/1.1 or later\n" +" If used with uploads, the ftp server command SIZE will\n" +" not be used by curl. Upload resume is for FTP only.\n" +" HTTP resume is only possible with HTTP/1.1 or later\n" " servers.\n" "\n" " -d/--data <data>\n" -" (HTTP) Sends the specified data in a POST request to\n" -" the HTTP server. Note that the data is sent exactly as\n" +" (HTTP) Sends the specified data in a POST request to\n" +" the HTTP server. Note that the data is sent exactly as\n" " specified with no extra processing. The data is\n" -" expected to be \"url-encoded\". This will cause curl to\n" -" pass the data to the server using the content-type\n" +" expected to be \"url-encoded\". This will cause curl to\n" +" pass the data to the server using the content-type\n" " application/x-www-form-urlencoded. Compare to -F.\n" "\n" -" If you start the data with the letter @, the rest\n" -" should be a file name to read the data from, or - if\n" -" you want curl to read the data from stdin. The con\n" +" If you start the data with the letter @, the rest\n" +" should be a file name to read the data from, or - if\n" +" you want curl to read the data from stdin. The con\n" " tents of the file must already be url-encoded.\n" "\n" " -D/--dump-header <file>\n" -" (HTTP/FTP) Write the HTTP headers to this file. Write\n" +" (HTTP/FTP) Write the HTTP headers to this file. Write\n" " the FTP file info to this file if -I/--head is used.\n" "\n" -" This option is handy to use when you want to store the\n" -" cookies that a HTTP site sends to you. The cookies\n" +" This option is handy to use when you want to store the\n" +" cookies that a HTTP site sends to you. The cookies\n" " could then be read in a second curl invoke by using the\n" " -b/--cookie option!\n" "\n" " -e/--referer <URL>\n" " (HTTP) Sends the \"Referer Page\" information to the HTTP\n" -" server. Some badly done CGIs fail if it's not set. This\n" -" can also be set with the -H/--header flag of course.\n" +" server. This can also be set with the -H/--header flag\n" +" of course. When used with -L/--location you can append\n" +" \";auto\" to the referer URL to make curl automatically\n" +" set the previous URL when it follows a Location:\n" +" header. The \";auto\" string can be used alone, even if\n" +" you don't set an initial referer.\n" "\n" " -E/--cert <certificate[:password]>\n" -" (HTTPS) Tells curl to use the specified certificate\n" -" file when getting a file with HTTPS. The certificate\n" -" must be in PEM format. If the optional password isn't\n" +" (HTTPS) Tells curl to use the specified certificate\n" +" file when getting a file with HTTPS. The certificate\n" +" must be in PEM format. If the optional password isn't\n" " specified, it will be queried for on the terminal. Note\n" -" that this certificate is the private key and the pri\n" +" that this certificate is the private key and the pri\n" " vate certificate concatenated!\n" "\n" " -f/--fail\n" -" (HTTP) Fail silently (no output at all) on server\n" -" errors. This is mostly done like this to better enable\n" -" scripts etc to better deal with failed attempts. In\n" +" (HTTP) Fail silently (no output at all) on server\n" +" errors. This is mostly done like this to better enable\n" +" scripts etc to better deal with failed attempts. In\n" " normal cases when a HTTP server fails to deliver a doc\n" -" ument, it returns a HTML document stating so (which\n" +" ument, it returns a HTML document stating so (which\n" " often also describes why and more). This flag will pre\n" " vent curl from outputting that and fail silently\n" " instead.\n" "\n" " -F/--form <name=content>\n" " (HTTP) This lets curl emulate a filled in form in which\n" -" a user has pressed the submit button. This causes curl\n" +" a user has pressed the submit button. This causes curl\n" " to POST data using the content-type multipart/form-data\n" -" according to RFC1867. This enables uploading of binary\n" -" files etc. To force the 'content' part to be read from\n" -" a file, prefix the file name with an @ sign. Example,\n" -" to send your password file to the server, where 'pass\n" -" word' is the name of the form-field to which\n" -" /etc/passwd will be the input:\n" +" according to RFC1867. This enables uploading of binary\n" +" files etc. To force the 'content' part to be be a file,\n" +" prefix the file name with an @ sign. To just get the\n" +" content part from a file, prefix the file name with the\n" +" letter <. The difference between @ and < is then that @\n" +" makes a file get attached in the post as a file upload,\n" +" while the < makes a text field and just get the con\n" +" tents for that text field from a file.\n" +"\n" +" Example, to send your password file to the server,\n" +" where input:\n" "\n" " curl -F password=@/etc/passwd www.mypasswords.com\n" +"\n" " To read the file's content from stdin insted of a file,\n" -" use - where the file name should've been.\n" +" use - where the file name should've been. This goes for\n" +" both @ and < constructs.\n" "\n" " -h/--help\n" " Usage help.\n" "\n" " -H/--header <header>\n" " (HTTP) Extra header to use when getting a web page. You\n" -" may specify any number of extra headers. Note that if\n" -" you should add a custom header that has the same name\n" +" may specify any number of extra headers. Note that if\n" +" you should add a custom header that has the same name\n" " as one of the internal ones curl would use, your exter\n" -" nally set header will be used instead of the internal\n" -" one. This allows you to make even trickier stuff than\n" -" curl would normally do. You should not replace inter\n" -" nally set headers without knowing perfectly well what\n" +" nally set header will be used instead of the internal\n" +" one. This allows you to make even trickier stuff than\n" +" curl would normally do. You should not replace inter\n" +" nally set headers without knowing perfectly well what\n" " you're doing.\n" "\n" " -i/--include\n" " (HTTP) Include the HTTP-header in the output. The HTTP-\n" -" header includes things like server-name, date of the\n" +" header includes things like server-name, date of the\n" " document, HTTP-version and more...\n" "\n" " -I/--head\n" -" (HTTP/FTP) Fetch the HTTP-header only! HTTP-servers\n" +" (HTTP/FTP) Fetch the HTTP-header only! HTTP-servers\n" " feature the command HEAD which this uses to get nothing\n" -" but the header of a document. When used on a FTP file,\n" +" but the header of a document. When used on a FTP file,\n" " curl displays the file size only.\n" "\n" " -K/--config <config file>\n" -" Specify which config file to read curl arguments from.\n" -" The config file is a text file in which command line\n" -" arguments can be written which then will be used as if\n" -" they were written on the actual command line. If the\n" -" first column of a config line is a '#' character, the\n" +" Specify which config file to read curl arguments from.\n" +" The config file is a text file in which command line\n" +" arguments can be written which then will be used as if\n" +" they were written on the actual command line. If the\n" +" first column of a config line is a '#' character, the\n" " rest of the line will be treated as a comment.\n" "\n" -" Specify the filename as '-' to make curl read the file\n" +" Specify the filename as '-' to make curl read the file\n" " from stdin.\n" "\n" " -l/--list-only\n" " (FTP) When listing an FTP directory, this switch forces\n" -" a name-only view. Especially useful if you want to\n" -" machine-parse the contents of an FTP directory since\n" -" the normal directory view doesn't use a standard look\n" +" a name-only view. Especially useful if you want to\n" +" machine-parse the contents of an FTP directory since\n" +" the normal directory view doesn't use a standard look\n" " or format.\n" "\n" " -L/--location\n" -" (HTTP/HTTPS) If the server reports that the requested\n" -" page has a different location (indicated with the\n" -" header line Location:) this flag will let curl attempt\n" +" (HTTP/HTTPS) If the server reports that the requested\n" +" page has a different location (indicated with the\n" +" header line Location:) this flag will let curl attempt\n" " to reattempt the get on the new place. If used together\n" " with -i or -I, headers from all requested pages will be\n" +); + puts( " shown.\n" "\n" " -m/--max-time <seconds>\n" " Maximum time in seconds that you allow the whole opera\n" " tion to take. This is useful for preventing your batch\n" -" jobs from hanging for hours due to slow networks or\n" -" links going down. This doesn't work properly in win32\n" +" jobs from hanging for hours due to slow networks or\n" +" links going down. This doesn't work properly in win32\n" " systems.\n" "\n" " -M/--manual\n" " Manual. Display the huge help text.\n" "\n" " -n/--netrc\n" -" Makes curl scan the .netrc file in the user's home\n" -" directory for login name and password. This is typi\n" -" cally used for ftp on unix. If used with http, curl\n" -); - puts( -" will enable user authentication. See netrc(5) for\n" -" details on the file format. Curl will not complain if\n" -" that file hasn't the right permissions (it should not\n" -" be world nor group readable). The environment variable\n" +" Makes curl scan the .netrc file in the user's home\n" +" directory for login name and password. This is typi\n" +" cally used for ftp on unix. If used with http, curl\n" +" will enable user authentication. See netrc(4) for\n" +" details on the file format. Curl will not complain if\n" +" that file hasn't the right permissions (it should not\n" +" be world nor group readable). The environment variable\n" " \"HOME\" is used to find the home directory.\n" "\n" -" A quick and very simple example of how to setup a\n" -" .netrc to allow curl to ftp to the machine\n" +" A quick and very simple example of how to setup a\n" +" .netrc to allow curl to ftp to the machine\n" " host.domain.com with user name\n" "\n" " machine host.domain.com login myself password secret\n" "\n" " -N/--no-buffer\n" -" Disables the buffering of the output stream. In normal\n" +" Disables the buffering of the output stream. In normal\n" " work situations, curl will use a standard buffered out\n" -" put stream that will have the effect that it will out\n" -" put the data in chunks, not necessarily exactly when\n" -" the data arrives. Using this option will disable that\n" +" put stream that will have the effect that it will out\n" +" put the data in chunks, not necessarily exactly when\n" +" the data arrives. Using this option will disable that\n" " buffering.\n" "\n" " -o/--output <file>\n" -" Write output to <file> instead of stdout. If you are\n" +" Write output to <file> instead of stdout. If you are\n" " using {} or [] to fetch multiple documents, you can use\n" -" '#' followed by a number in the <file> specifier. That\n" -" variable will be replaced with the current string for\n" +" '#' followed by a number in the <file> specifier. That\n" +" variable will be replaced with the current string for\n" " the URL being fetched. Like in:\n" "\n" " curl http://{one,two}.site.com -o \"file_#1.txt\"\n" @@ -272,14 +284,14 @@ puts ( " the path is cut off.)\n" "\n" " -P/--ftpport <address>\n" -" (FTP) Reverses the initiator/listener roles when con\n" -" necting with ftp. This switch makes Curl use the PORT\n" -" command instead of PASV. In practice, PORT tells the\n" +" (FTP) Reverses the initiator/listener roles when con\n" +" necting with ftp. This switch makes Curl use the PORT\n" +" command instead of PASV. In practice, PORT tells the\n" " server to connect to the client's specified address and\n" -" port, while PASV asks the server for an ip address and\n" +" port, while PASV asks the server for an ip address and\n" " port to connect to. <address> should be one of:\n" "\n" -" interface i.e \"eth0\" to specify which interface's IP\n" +" interface i.e \"eth0\" to specify which interface's IP\n" " address you want to use (Unix only)\n" "\n" " IP address i.e \"192.168.10.1\" to specify exact IP num\n" @@ -287,28 +299,28 @@ puts ( "\n" " host name i.e \"my.host.domain\" to specify machine\n" "\n" -" - (any single-letter string) to make it pick\n" +" - (any single-letter string) to make it pick\n" " the machine's default\n" "\n" " -q If used as the first parameter on the command line, the\n" -" $HOME/.curlrc file will not be read and used as a con\n" +" $HOME/.curlrc file will not be read and used as a con\n" " fig file.\n" "\n" " -Q/--quote <comand>\n" -" (FTP) Send an arbitrary command to the remote FTP\n" -" server, by using the QUOTE command of the server. Not\n" -" all servers support this command, and the set of QUOTE\n" -" commands are server specific! Quote commands are sent\n" -" BEFORE the transfer is taking place. To make commands\n" -" take place after a successful transfer, prefix them\n" +" (FTP) Send an arbitrary command to the remote FTP\n" +" server, by using the QUOTE command of the server. Not\n" +" all servers support this command, and the set of QUOTE\n" +" commands are server specific! Quote commands are sent\n" +" BEFORE the transfer is taking place. To make commands\n" +" take place after a successful transfer, prefix them\n" " with a dash '-'. You may specify any amount of commands\n" -" to be run before and after the transfer. If the server\n" -" returns failure for one of the commands, the entire\n" +" to be run before and after the transfer. If the server\n" +" returns failure for one of the commands, the entire\n" " operation will be aborted.\n" "\n" " -r/--range <range>\n" -" (HTTP/FTP) Retrieve a byte range (i.e a partial docu\n" -" ment) from a HTTP/1.1 or FTP server. Ranges can be\n" +" (HTTP/FTP) Retrieve a byte range (i.e a partial docu\n" +" ment) from a HTTP/1.1 or FTP server. Ranges can be\n" " specified in a number of ways.\n" "\n" " 0-499 specifies the first 500 bytes\n" @@ -321,165 +333,167 @@ puts ( " ward\n" "\n" " 0-0,-1 specifies the first and last byte only(*)(H)\n" +"\n" " 500-700,600-799\n" " specifies 300 bytes from offset 500(H)\n" "\n" " 100-199,500-599\n" " specifies two separate 100 bytes ranges(*)(H)\n" "\n" -" (*) = NOTE that this will cause the server to reply with a\n" +" (*) = NOTE that this will cause the server to reply with a\n" " multipart response!\n" "\n" -" You should also be aware that many HTTP/1.1 servers do not\n" +" You should also be aware that many HTTP/1.1 servers do not\n" " have this feature enabled, so that when you attempt to get a\n" " range, you'll instead get the whole document.\n" "\n" -" FTP range downloads only support the simple syntax 'start-\n" -" stop' (optionally with one of the numbers omitted). It\n" +" FTP range downloads only support the simple syntax 'start-\n" +" stop' (optionally with one of the numbers omitted). It\n" " depends on the non-RFC command SIZE.\n" "\n" " -s/--silent\n" -" Silent mode. Don't show progress meter or error mes\n" +" Silent mode. Don't show progress meter or error mes\n" " sages. Makes Curl mute.\n" "\n" " -S/--show-error\n" -" When used with -s it makes curl show error message if\n" +" When used with -s it makes curl show error message if\n" " it fails.\n" "\n" " -t/--upload\n" -" Transfer the stdin data to the specified file. Curl\n" -" will read everything from stdin until EOF and store\n" -" with the supplied name. If this is used on a http(s)\n" +" Transfer the stdin data to the specified file. Curl\n" +" will read everything from stdin until EOF and store\n" +" with the supplied name. If this is used on a http(s)\n" " server, the PUT command will be used.\n" "\n" " -T/--upload-file <file>\n" -" Like -t, but this transfers the specified local file.\n" -" If there is no file part in the specified URL, Curl\n" +" Like -t, but this transfers the specified local file.\n" +" If there is no file part in the specified URL, Curl\n" " will append the local file name. NOTE that you must use\n" -" a trailing / on the last directory to really prove to\n" +" a trailing / on the last directory to really prove to\n" " Curl that there is no file name or curl will think that\n" -" your last directory name is the remote file name to\n" -" use. That will most likely cause the upload operation\n" -" to fail. If this is used on a http(s) server, the PUT\n" +" your last directory name is the remote file name to\n" +" use. That will most likely cause the upload operation\n" +" to fail. If this is used on a http(s) server, the PUT\n" " command will be used.\n" "\n" " -u/--user <user:password>\n" -" Specify user and password to use when fetching. See\n" -" README.curl for detailed examples of how to use this.\n" -" If no password is specified, curl will ask for it\n" +" Specify user and password to use when fetching. See\n" +" README.curl for detailed examples of how to use this.\n" +" If no password is specified, curl will ask for it\n" " interactively.\n" "\n" " -U/--proxy-user <user:password>\n" -" Specify user and password to use for Proxy authentica\n" +" Specify user and password to use for Proxy authentica\n" " tion. If no password is specified, curl will ask for it\n" " interactively.\n" +"\n" " -v/--verbose\n" -" Makes the fetching more verbose/talkative. Mostly\n" -" usable for debugging. Lines starting with '>' means\n" +" Makes the fetching more verbose/talkative. Mostly\n" +" usable for debugging. Lines starting with '>' means\n" " data sent by curl, '<' means data received by curl that\n" -" is hidden in normal cases and lines starting with '*'\n" +" is hidden in normal cases and lines starting with '*'\n" " means additional info provided by curl.\n" "\n" " -V/--version\n" -" Displays the full version of curl, libcurl and other\n" +" Displays the full version of curl, libcurl and other\n" " 3rd party libraries linked with the executable.\n" "\n" " -w/--write-out <format>\n" -" Defines what to display after a completed and success\n" -" ful operation. The format is a string that may contain\n" -" plain text mixed with any number of variables. The\n" +" Defines what to display after a completed and success\n" +" ful operation. The format is a string that may contain\n" +" plain text mixed with any number of variables. The\n" " string can be specified as \"string\", to get read from a\n" -" particular file you specify it \"@filename\" and to tell\n" +" particular file you specify it \"@filename\" and to tell\n" " curl to read the format from stdin you write \"@-\".\n" "\n" " The variables present in the output format will be sub\n" -" stituted by the value or text that curl thinks fit, as\n" +" stituted by the value or text that curl thinks fit, as\n" " described below. All variables are specified like\n" -" %{variable_name} and to output a normal % you just\n" -" write them like %%. You can output a newline by using\n" +" %{variable_name} and to output a normal % you just\n" +" write them like %%. You can output a newline by using\n" " \\n, a carrige return with \\r and a tab space with \\t.\n" "\n" -" NOTE: The %-letter is a special letter in the\n" -" win32-environment, where all occurrences of % must be\n" +" NOTE: The %-letter is a special letter in the\n" +" win32-environment, where all occurrences of % must be\n" " doubled when using this option.\n" "\n" " Available variables are at this point:\n" "\n" -" url_effective The URL that was fetched last. This is\n" +" url_effective The URL that was fetched last. This is\n" " mostly meaningful if you've told curl to\n" " follow location: headers.\n" "\n" " http_code The numerical code that was found in the\n" " last retrieved HTTP(S) page.\n" -"\n" -" time_total The total time, in seconds, that the\n" -" full operation lasted. The time will be\n" +" time_total The total time, in seconds, that the\n" +" full operation lasted. The time will be\n" " displayed with millisecond resolution.\n" "\n" " time_namelookup\n" -" The time, in seconds, it took from the\n" -" start until the name resolving was com\n" +" The time, in seconds, it took from the\n" +" start until the name resolving was com\n" " pleted.\n" "\n" -" time_connect The time, in seconds, it took from the\n" -" start until the connect to the remote\n" +" time_connect The time, in seconds, it took from the\n" +" start until the connect to the remote\n" " host (or proxy) was completed.\n" +"\n" " time_pretransfer\n" -" The time, in seconds, it took from the\n" -" start until the file transfer is just\n" -" about to begin. This includes all pre-\n" -" transfer commands and negotiations that\n" -" are specific to the particular proto\n" +" The time, in seconds, it took from the\n" +" start until the file transfer is just\n" +" about to begin. This includes all pre-\n" +" transfer commands and negotiations that\n" +" are specific to the particular proto\n" " col(s) involved.\n" "\n" -" size_download The total amount of bytes that were\n" +" size_download The total amount of bytes that were\n" " downloaded.\n" "\n" -" size_upload The total amount of bytes that were\n" +" size_upload The total amount of bytes that were\n" " uploaded.\n" "\n" -" speed_download The average download speed that curl\n" +" speed_download The average download speed that curl\n" " measured for the complete download.\n" "\n" -" speed_upload The average upload speed that curl mea\n" +" speed_upload The average upload speed that curl mea\n" " sured for the complete download.\n" "\n" " -x/--proxy <proxyhost[:port]>\n" -" Use specified proxy. If the port number is not speci\n" +" Use specified proxy. If the port number is not speci\n" " fied, it is assumed at port 1080.\n" "\n" " -X/--request <command>\n" -" (HTTP) Specifies a custom request to use when communi\n" -" cating with the HTTP server. The specified request\n" +" (HTTP) Specifies a custom request to use when communi\n" +" cating with the HTTP server. The specified request\n" " will be used instead of the standard GET. Read the HTTP\n" " 1.1 specification for details and explanations.\n" "\n" -" (FTP) Specifies a custom FTP command to use instead of\n" +" (FTP) Specifies a custom FTP command to use instead of\n" +); + puts( " LIST when doing file lists with ftp.\n" "\n" " -y/--speed-time <time>\n" " If a download is slower than speed-limit bytes per sec\n" -" ond during a speed-time period, the download gets\n" +" ond during a speed-time period, the download gets\n" " aborted. If speed-time is used, the default speed-limit\n" " will be 1 unless set with -y.\n" -"\n" " -Y/--speed-limit <speed>\n" " If a download is slower than this given speed, in bytes\n" -" per second, for speed-time seconds it gets aborted.\n" +" per second, for speed-time seconds it gets aborted.\n" " speed-time is set with -Y and is 30 if not set.\n" "\n" " -z/--time-cond <date expression>\n" -" (HTTP) Request to get a file that has been modified\n" -); - puts( -" later than the given time and date, or one that has\n" +" (HTTP) Request to get a file that has been modified\n" +" later than the given time and date, or one that has\n" " been modified before that time. The date expression can\n" " be all sorts of date strings or if it doesn't match any\n" -" internal ones, it tries to get the time from a given\n" -" file name instead! See the GNU date(1) man page for\n" +" internal ones, it tries to get the time from a given\n" +" file name instead! See the GNU date(1) man page for\n" " date expression details.\n" -" Start the date expression with a dash (-) to make it\n" -" request for a document that is older than the given\n" +"\n" +" Start the date expression with a dash (-) to make it\n" +" request for a document that is older than the given\n" " date/time, default is a document that is newer than the\n" " specified date/time.\n" "\n" @@ -492,15 +506,15 @@ puts ( " ing with a remote SSL server.\n" "\n" " -#/--progress-bar\n" -" Make curl display progress information as a progress\n" +" Make curl display progress information as a progress\n" " bar instead of the default statistics.\n" "\n" " --crlf\n" -" (FTP) Convert LF to CRLF in upload. Useful for MVS\n" +" (FTP) Convert LF to CRLF in upload. Useful for MVS\n" " (OS/390).\n" "\n" " --stderr <file>\n" -" Redirect all writes to stderr to the specified file\n" +" Redirect all writes to stderr to the specified file\n" " instead. If the file name is a plain '-', it is instead\n" " written to stdout. This option has no point when you're\n" " using a shell with decent redirecting capabilities.\n" @@ -515,7 +529,6 @@ puts ( "\n" " HTTPS_PROXY [protocol://]<host>[:port]\n" " Sets proxy server to use for HTTPS.\n" -"\n" " FTP_PROXY [protocol://]<host>[:port]\n" " Sets proxy server to use for FTP.\n" "\n" @@ -523,19 +536,20 @@ puts ( " Sets proxy server to use for GOPHER.\n" "\n" " ALL_PROXY [protocol://]<host>[:port]\n" -" Sets proxy server to use if no protocol-specific proxy\n" +" Sets proxy server to use if no protocol-specific proxy\n" " is set.\n" "\n" " NO_PROXY <comma-separated list of hosts>\n" " list of host names that shouldn't go through any proxy.\n" " If set to a asterisk '*' only, it matches all hosts.\n" +"\n" " COLUMNS <integer>\n" -" The width of the terminal. This variable only affects\n" +" The width of the terminal. This variable only affects\n" " curl when the --progress-bar option is used.\n" "\n" "EXIT CODES\n" " There exists a bunch of different error codes and their cor\n" -" responding error messages that may appear during bad condi\n" +" responding error messages that may appear during bad condi\n" " tions. At the time of this writing, the exit codes are:\n" "\n" " 1 Unsupported protocol. This build of curl has no support\n" @@ -545,42 +559,42 @@ puts ( "\n" " 3 URL malformat. The syntax was not correct.\n" "\n" -" 4 URL user malformatted. The user-part of the URL syntax\n" +" 4 URL user malformatted. The user-part of the URL syntax\n" " was not correct.\n" "\n" -" 5 Couldn't resolve proxy. The given proxy host could not\n" +" 5 Couldn't resolve proxy. The given proxy host could not\n" " be resolved.\n" "\n" -" 6 Couldn't resolve host. The given remote host was not\n" +" 6 Couldn't resolve host. The given remote host was not\n" " resolved.\n" "\n" " 7 Failed to connect to host.\n" "\n" -" 8 FTP weird server reply. The server sent data curl\n" +" 8 FTP weird server reply. The server sent data curl\n" " couldn't parse.\n" "\n" " 9 FTP access denied. The server denied login.\n" "\n" -" 10 FTP user/password incorrect. Either one or both were\n" +" 10 FTP user/password incorrect. Either one or both were\n" " not accepted by the server.\n" "\n" -" 11 FTP weird PASS reply. Curl couldn't parse the reply\n" +" 11 FTP weird PASS reply. Curl couldn't parse the reply\n" " sent to the PASS request.\n" -"\n" -" 12 FTP weird USER reply. Curl couldn't parse the reply\n" +" 12 FTP weird USER reply. Curl couldn't parse the reply\n" " sent to the USER request.\n" "\n" -" 13 FTP weird PASV reply, Curl couldn't parse the reply\n" +" 13 FTP weird PASV reply, Curl couldn't parse the reply\n" " sent to the PASV request.\n" "\n" -" 14 FTP weird 227 formay. Curl couldn't parse the 227-line\n" +" 14 FTP weird 227 formay. Curl couldn't parse the 227-line\n" " the server sent.\n" "\n" " 15 FTP can't get host. Couldn't resolve the host IP we got\n" " in the 227-line.\n" "\n" -" 16 FTP can't reconnect. Couldn't connect to the host we\n" +" 16 FTP can't reconnect. Couldn't connect to the host we\n" " got in the 227-line.\n" +"\n" " 17 FTP couldn't set binary. Couldn't change transfer\n" " method to binary.\n" "\n" @@ -588,51 +602,51 @@ puts ( "\n" " 19 FTP couldn't RETR file. The RETR command failed.\n" "\n" -" 20 FTP write error. The transfer was reported bad by the\n" +" 20 FTP write error. The transfer was reported bad by the\n" " server.\n" "\n" -" 21 FTP quote error. A quote command returned error from\n" +" 21 FTP quote error. A quote command returned error from\n" " the server.\n" "\n" -" 22 HTTP not found. The requested page was not found. This\n" +" 22 HTTP not found. The requested page was not found. This\n" " return code only appears if --fail is used.\n" "\n" -" 23 Write error. Curl couldn't write data to a local\n" +" 23 Write error. Curl couldn't write data to a local\n" " filesystem or similar.\n" "\n" " 24 Malformat user. User name badly specified.\n" "\n" -" 25 FTP couldn't STOR file. The server denied the STOR\n" +" 25 FTP couldn't STOR file. The server denied the STOR\n" " operation.\n" "\n" " 26 Read error. Various reading problems.\n" "\n" " 27 Out of memory. A memory allocation request failed.\n" "\n" -" 28 Operation timeout. The specified time-out period was\n" +" 28 Operation timeout. The specified time-out period was\n" " reached according to the conditions.\n" "\n" -" 29 FTP couldn't set ASCII. The server returned an unknown\n" +" 29 FTP couldn't set ASCII. The server returned an unknown\n" " reply.\n" "\n" " 30 FTP PORT failed. The PORT command failed.\n" "\n" " 31 FTP couldn't use REST. The REST command failed.\n" -"\n" -" 32 FTP couldn't use SIZE. The SIZE command failed. The\n" -" command is an extension to the original FTP spec RFC\n" +" 32 FTP couldn't use SIZE. The SIZE command failed. The\n" +" command is an extension to the original FTP spec RFC\n" " 959.\n" "\n" " 33 HTTP range error. The range \"command\" didn't work.\n" "\n" -" 34 HTTP post error. Internal post-request generation\n" +" 34 HTTP post error. Internal post-request generation\n" " error.\n" "\n" " 35 SSL connect error. The SSL handshaking failed.\n" "\n" -" 36 FTP bad download resume. Couldn't continue an earlier\n" +" 36 FTP bad download resume. Couldn't continue an earlier\n" " aborted download.\n" -" 37 FILE couldn't read file. Failed to open the file. Per\n" +"\n" +" 37 FILE couldn't read file. Failed to open the file. Per\n" " missions?\n" "\n" " 38 LDAP cannot bind. LDAP bind operation failed.\n" @@ -641,27 +655,27 @@ puts ( "\n" " 40 Library not found. The LDAP library was not found.\n" "\n" -" 41 Function not found. A required LDAP function was not\n" +" 41 Function not found. A required LDAP function was not\n" " found.\n" "\n" -" XX There will appear more error codes here in future\n" -" releases. The existing ones are meant to never change.\n" +" XX There will appear more error codes here in future\n" +" releases. The existing ones are meant to never change.\n" "\n" "BUGS\n" -" If you do find any (or have other suggestions), mail Daniel\n" -" Stenberg <Daniel.Stenberg@haxx.nu>.\n" +" If you do find any (or have other suggestions), mail Daniel\n" +" Stenberg <Daniel.Stenberg@haxx.se>.\n" "\n" "AUTHORS / CONTRIBUTORS\n" -" - Daniel Stenberg <Daniel.Stenberg@haxx.nu>\n" +" - Daniel Stenberg <Daniel.Stenberg@haxx.se>\n" " - Rafael Sagula <sagula@inf.ufrgs.br>\n" " - Sampo Kellomaki <sampo@iki.fi>\n" " - Linas Vepstas <linas@linas.org>\n" " - Bjorn Reese <breese@mail1.stofanet.dk>\n" " - Johan Anderson <johan@homemail.com>\n" -" - Kjell Ericson <Kjell.Ericson@haxx,nu>\n" +" - Kjell Ericson <Kjell.Ericson@haxx.se>\n" " - Troy Engel <tengel@sonic.net>\n" " - Ryan Nelson <ryan@inch.com>\n" -" - Bjorn Stenberg <Bjorn.Stenberg@haxx.nu>\n" +" - Björn Stenberg <Bjorn.Stenberg@haxx.se>\n" " - Angus Mackay <amackay@gus.ml.org>\n" " - Eric Young <eay@cryptsoft.com>\n" " - Simon Dick <simond@totally.irrelevant.org>\n" @@ -679,9 +693,9 @@ puts ( " - Ralph Beckmann <rabe@uni-paderborn.de>\n" " - T. Yamada <tai@imasy.or.jp>\n" " - Lars J. Aas <larsa@sim.no>\n" -" - Jörn Hartroth <Joern.Hartroth@telekom.de>\n" +" - Jörn Hartroth <Joern.Hartroth@computer.org>\n" " - Matthew Clarke <clamat@van.maves.ca>\n" -" - Linus Nielsen <Linus.Nielsen@haxx.nu>\n" +" - Linus Nielsen <Linus.Nielsen@haxx.se>\n" " - Felix von Leitner <felix@convergence.de>\n" " - Dan Zitter <dzitter@zitter.net>\n" " - Jongki Suwandi <Jongki.Suwandi@eng.sun.com>\n" @@ -695,9 +709,12 @@ puts ( " - Paul Marquis <pmarquis@iname.com>\n" " - David LeBlanc <dleblanc@qnx.com>\n" " - Rich Gray at Plus Technologies\n" +" - Luong Dinh Dung <u8luong@lhsystems.hu>\n" +" - Torsten Foertsch <torsten.foertsch@gmx.net>\n" +" - Kristian Köhntopp <kris@koehntopp.de>\n" "\n" "WWW\n" -" http://curl.haxx.nu\n" +" http://curl.haxx.se\n" "\n" "FTP\n" " ftp://ftp.sunet.se/pub/www/utilities/curl/\n" @@ -710,7 +727,7 @@ puts ( " You always find news about what's going on as well as the latest versions\n" " from the curl web pages, located at:\n" "\n" -" http://curl.haxx.nu\n" +" http://curl.haxx.se\n" "\n" "SIMPLE USAGE\n" "\n" @@ -769,6 +786,8 @@ puts ( " pick a file like:\n" "\n" " curl http://name:passwd@machine.domain/full/path/to/file\n" +); + puts( "\n" " or specify user and password separately like in\n" "\n" @@ -798,8 +817,6 @@ puts ( " curl -u user:passwd -x my-proxy:888 http://www.get.this/\n" "\n" " Some proxies require special authentication. Specify by using -U as above:\n" -); - puts( "\n" " curl -U user:passwd -x my-proxy:888 http://www.get.this/\n" "\n" @@ -887,7 +904,7 @@ puts ( "\n" " Store the HTTP headers in a separate file:\n" "\n" -" curl --dump-header headers.txt curl.haxx.nu\n" +" curl --dump-header headers.txt curl.haxx.se\n" "\n" " Note that headers stored in a separate file can be very useful at a later\n" " time if you want curl to use cookies sent by the server. More about that in\n" @@ -1049,6 +1066,8 @@ puts ( "\n" " Note that by specifying -b you enable the \"cookie awareness\" and with -L\n" " you can make curl follow a location: (which often is used in combination\n" +); + puts( " with cookies). So that if a site sends cookies and a location, you can\n" " use a non-existing file to trig the cookie awareness like:\n" "\n" @@ -1069,8 +1088,6 @@ puts ( "\n" " From left-to-right:\n" " % - percentage completed of the whole transfer\n" -); - puts( " Total - total size of the whole expected transfer\n" " % - percentage completed of the download\n" " Received - currently downloaded amount of bytes\n" @@ -1245,6 +1262,26 @@ puts ( "\n" " Otherwise, curl will first attempt to use v3 and then v2.\n" "\n" +" To use OpenSSL to convert your favourite browser's certificate into a PEM\n" +" formatted one that curl can use, do something like this (assuming netscape,\n" +" but IE is likely to work similarly):\n" +"\n" +" You start with hitting the 'security' menu button in netscape. \n" +"\n" +" Select 'certificates->yours' and then pick a certificate in the list \n" +"\n" +" Press the 'export' button \n" +"\n" +" enter your PIN code for the certs \n" +"\n" +" select a proper place to save it \n" +"\n" +" Run the 'openssl' application to convert the certificate. If you cd to the\n" +" openssl installation, you can do it like:\n" +"\n" +" # ./apps/openssl pkcs12 -certfile [file you saved] -out [PEMfile]\n" +"\n" +"\n" "RESUMING FILE TRANSFERS\n" "\n" " To continue a file transfer where it was previously aborted, curl supports\n" @@ -1302,6 +1339,8 @@ puts ( "\n" " Aliases for 'm' are 'match' and 'find', and aliases for 'd' are 'define'\n" " and 'lookup'. For example,\n" +); + puts( "\n" " curl dict://dict.org/find:curl\n" "\n" @@ -1351,8 +1390,6 @@ puts ( "\n" "\n" " The usage of the -x/--proxy flag overrides the environment variables.\n" -); - puts( "\n" "NETRC\n" "\n" @@ -1369,7 +1406,7 @@ puts ( "\n" " A very simple .netrc file could look something like:\n" "\n" -" machine curl.haxx.nu login iamdaniel password mysecret\n" +" machine curl.haxx.se login iamdaniel password mysecret\n" "\n" "CUSTOM OUTPUT\n" "\n" diff --git a/src/version.h b/src/version.h index 56085c9e4..af29e6706 100644 --- a/src/version.h +++ b/src/version.h @@ -1,3 +1,3 @@ #define CURL_NAME "curl" -#define CURL_VERSION "7.0.1beta" +#define CURL_VERSION "7.0.11test" #define CURL_ID CURL_NAME " " CURL_VERSION " (" OS ") " |