summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDan Fandrich <dan@coneharvesters.com>2020-05-09 23:56:42 +0200
committerDan Fandrich <dan@coneharvesters.com>2020-05-10 00:36:43 +0200
commit05d2a1702f80bc7c1d9fab9f3e50645f555e64b5 (patch)
tree97943b430908884bd46fe0da24e58dd02ebfe245
parent54fa68bf4468a07cb9a1cc672052bdd6df840512 (diff)
downloadcurl-dfandrich/help.tar.gz
Add a demo of what an enhanced help system for curl might look likedfandrich/help
This script demonstrates what help output might look like if it were broken down into multiple categories so only a few related options are displayed at the same time rather than all 231 at once. The curlh script works just like curl but has different help options. run "curlh --help-demo" to see which options are available and "curlh --help" to see what ones might be available in the full version. Only a few of the possible categories are implemented here but it should be enough to get a flavour of what a full implementation might be like.
-rwxr-xr-xdocs/examples/curlh2626
1 files changed, 2626 insertions, 0 deletions
diff --git a/docs/examples/curlh b/docs/examples/curlh
new file mode 100755
index 000000000..28b9cce80
--- /dev/null
+++ b/docs/examples/curlh
@@ -0,0 +1,2626 @@
+#!/bin/bash
+# Test program to demonstrate what a more powerful set of curl --help options
+# might look like.
+# Based on curl 7.70.0
+# Dan Fandrich
+# May 2020
+
+if [[ -z "$1" ]]; then
+ echo "curl: try 'curlh --help' or 'curlh --manual' for more information"
+ echo "or 'curlh --help-demo' for information about this demonstration script"
+ exit 2
+fi
+
+ALL_OPTS="$@"
+HELP_TYPE=
+VERBOSE=
+while [[ -n "$1" ]]; do
+ case "$1" in
+ --help | -h)
+ HELP_TYPE=basic
+ ;;
+ --help-clientauth)
+ # Special case both --help-clientauth and --help-ftps being used at the same time
+ if [[ "$HELP_TYPE" == "ftps" ]]; then
+ HELP_TYPE=clientauth+ftps
+ else
+ HELP_TYPE=clientauth
+ fi
+ ;;
+ --no-help-https)
+ if [[ -n "$HELP_TYPE" ]]; then
+ HELP_TYPE="$HELP_TYPE"-https
+ else
+ HELP_TYPE=-https
+ fi
+ ;;
+ --help-ftps)
+ if [[ "$HELP_TYPE" == "clientauth" ]]; then
+ HELP_TYPE=clientauth+ftps
+ elif [[ "$HELP_TYPE" == "-https" ]]; then
+ HELP_TYPE=ftps-https
+ else
+ HELP_TYPE=ftps
+ fi
+ ;;
+ --help-search)
+ case "$2" in
+ sni)
+ if [[ -n "$VERBOSE" ]]; then
+ cat <<EOF
+These matching options were found for "sni":
+
+--connect-to <HOST1:PORT1:HOST2:PORT2>
+
+ For a request to the given HOST1:PORT1 pair, connect to
+ HOST2:PORT2 instead. This option is suitable to direct requests
+ at a specific server, e.g. at a specific cluster node in a clus-
+ ter of servers. This option is only used to establish the net-
+ work connection. It does NOT affect the hostname/port that is
+ used for TLS/SSL (e.g. SNI, certificate verification) or for the
+ application protocols. "HOST1" and "PORT1" may be the empty
+ string, meaning "any host/port". "HOST2" and "PORT2" may also be
+ the empty string, meaning "use the request's original
+ host/port".
+
+ A "host" specified to this option is compared as a string, so it
+ needs to match the name used in request URL. It can be either
+ numerical such as "127.0.0.1" or the full host name such as "ex-
+ ample.org".
+
+ This option can be used many times to add many connect rules.
+
+ See also --resolve and -H, --header. Added in 7.49.0.
+EOF
+ else
+ cat <<EOF
+These matching options were found for "sni":
+
+ --connect-to <HOST1:PORT1:HOST2:PORT2> Connect to host
+EOF
+ fi
+ exit 0;
+ ;;
+
+ epsv)
+ if [[ -n "$VERBOSE" ]]; then
+ cat <<EOF
+Verbose help TBD
+EOF
+ else
+ cat <<EOF
+These matching options were found for "epsv":
+
+ --disable-epsv Inhibit using EPSV
+ --ftp-pasv Use PASV/EPSV instead of PORT
+ --ftp-pret Send PRET before PASV
+ --ftp-skip-pasv-ip Skip the IP address for PASV
+EOF
+ fi
+ exit 0;
+ ;;
+ *)
+ echo "Error: you're confusing my brain with this search!"
+ echo "I only know about sni and epsv."
+ exit 1
+ ;;
+ esac
+ ;;
+
+ --help-*)
+ HELP_TYPE="${1#--help-}"
+ ;;
+ --verbose | -v)
+ VERBOSE=1
+ ;;
+ esac
+ shift
+done
+
+# Restore options in case we need to call curl
+set -- $ALL_OPTS
+
+if [[ -n "$HELP_TYPE" ]]; then
+ case "$HELP_TYPE" in
+ basic)
+ if [[ -n "$VERBOSE" ]]; then
+ cat <<EOF
+Basic options:
+
+--anyauth
+ (HTTP) Tells curl to figure out authentication method by itself, and
+ use the most secure one the remote site claims to support. This is
+ done by first doing a request and checking the response-headers, thus
+ possibly inducing an extra network round-trip. This is used instead of
+ setting a specific authentication method, which you can do with --ba‐
+ sic, --digest, --ntlm, and --negotiate.
+
+ Using --anyauth is not recommended if you do uploads from stdin, since
+ it may require data to be sent twice and then the client must be able
+ to rewind. If the need should arise when uploading from stdin, the up‐
+ load operation will fail.
+
+ Used together with -u, --user.
+
+ See also --proxy-anyauth and --basic and --digest.
+
+-a, --append
+ (FTP SFTP) When used in an upload, this makes curl append to the tar‐
+ get file instead of overwriting it. If the remote file doesn't exist,
+ it will be created. Note that this flag is ignored by some SFTP
+ servers (including OpenSSH).
+
+--basic
+ (HTTP) Tells curl to use HTTP Basic authentication with the remote
+ host. This is the default and this option is usually pointless, unless
+ you use it to override a previously set option that sets a different
+ authentication method (such as --ntlm, --digest, or --negotiate).
+
+ Used together with -u, --user.
+
+ See also --proxy-basic.
+
+-K, --config <file>
+
+ Specify a text file to read curl arguments from. The command line ar‐
+ guments found in the text file will be used as if they were provided
+ on the command line.
+
+ Options and their parameters must be specified on the same line in the
+ file, separated by whitespace, colon, or the equals sign. Long option
+ names can optionally be given in the config file without the initial
+ double dashes and if so, the colon or equals characters can be used as
+ separators. If the option is specified with one or two dashes, there
+ can be no colon or equals character between the option and its parame‐
+ ter.
+
+ If the parameter contains whitespace (or starts with : or =), the pa‐
+ rameter must be enclosed within quotes. Within double quotes, the fol‐
+ lowing escape sequences are available: \\, \", \t, \n, \r and \v. A
+ backslash preceding any other letter is ignored. If the first column
+ of a config line is a '#' character, the rest of the line will be
+ treated as a comment. Only write one option per physical line in the
+ config file.
+
+ Specify the filename to -K, --config as '-' to make curl read the file
+ from stdin.
+
+ Note that to be able to specify a URL in the config file, you need to
+ specify it using the --url option, and not by simply writing the URL
+ on its own line. So, it could look similar to this:
+
+ url = "https://curl.haxx.se/docs/"
+
+ When curl is invoked, it (unless -q, --disable is used) checks for a
+ default config file and uses it if found. The default config file is
+ checked for in the following places in this order:
+
+ 1) curl tries to find the "home dir": It first checks for the
+ CURL_HOME and then the HOME environment variables. Failing that, it
+ uses getpwuid() on Unix-like systems (which returns the home dir given
+ the current user in your system). On Windows, it then checks for the
+ APPDATA variable, or as a last resort the '%USERPROFILE%\Application
+ Data'.
+
+ 2) On windows, if there is no .curlrc file in the home dir, it checks
+ for one in the same dir the curl executable is placed. On Unix-like
+ systems, it will simply try to load .curlrc from the determined home
+ dir.
+
+ # --- Example file ---
+ # this is a comment
+ url = "example.com"
+ output = "curlhere.html"
+ user-agent = "superagent/1.0"
+
+ # and fetch another URL too
+ url = "example.com/docs/manpage.html"
+ -O
+ referer = "http://nowhereatall.example.com/"
+ # --- End of example file ---
+
+ This option can be used multiple times to load multiple config files.
+
+--connect-timeout <seconds>
+ Maximum time in seconds that you allow curl's connection to take.
+ This only limits the connection phase, so if curl connects within the
+ given period it will continue - if not it will exit. Since version
+ 7.32.0, this option accepts decimal values.
+
+ If this option is used several times, the last one will be used.
+
+ See also -m, --max-time.
+
+-c, --cookie-jar <filename>
+ (HTTP) Specify to which file you want curl to write all cookies after
+ a completed operation. Curl writes all cookies from its in-memory
+ cookie storage to the given file at the end of operations. If no cook‐
+ ies are known, no data will be written. The file will be written using
+ the Netscape cookie file format. If you set the file name to a single
+ dash, "-", the cookies will be written to stdout.
+
+ This command line option will activate the cookie engine that makes
+ curl record and use cookies. Another way to activate it is to use the
+ -b, --cookie option.
+
+ If the cookie jar can't be created or written to, the whole curl oper‐
+ ation won't fail or even report an error clearly. Using -v, --verbose
+ will get a warning displayed, but that is the only visible feedback
+ you get about this possibly lethal situation.
+
+ If this option is used several times, the last specified file name
+ will be used.
+
+-b, --cookie <data|filename>
+ (HTTP) Pass the data to the HTTP server in the Cookie header. It is
+ supposedly the data previously received from the server in a "Set-
+ Cookie:" line. The data should be in the format "NAME1=VALUE1;
+ NAME2=VALUE2".
+
+ If no '=' symbol is used in the argument, it is instead treated as a
+ filename to read previously stored cookie from. This option also acti‐
+ vates the cookie engine which will make curl record incoming cookies,
+ which may be handy if you're using this in combination with the -L,
+ --location option or do multiple URL transfers on the same invoke. If
+ the file name is exactly a minus ("-"), curl will instead the contents
+ from stdin.
+
+ The file format of the file to read cookies from should be plain HTTP
+ headers (Set-Cookie style) or the Netscape/Mozilla cookie file format.
+
+ The file specified with -b, --cookie is only used as input. No cookies
+ will be written to the file. To store cookies, use the -c, --cookie-
+ jar option.
+
+ Exercise caution if you are using this option and multiple transfers
+ may occur. If you use the NAME1=VALUE1; format, or in a file use the
+ Set-Cookie format and don't specify a domain, then the cookie is sent
+ for any domain (even after redirects are followed) and cannot be modi‐
+ fied by a server-set cookie. If the cookie engine is enabled and a
+ server sets a cookie of the same name then both will be sent on a fu‐
+ ture transfer to that server, likely not what you intended. To ad‐
+ dress these issues set a domain in Set-Cookie (doing that will include
+ sub domains) or use the Netscape format.
+
+ If this option is used several times, the last one will be used.
+
+ Users very often want to both read cookies from a file and write up‐
+ dated cookies back to a file, so using both -b, --cookie and -c,
+ --cookie-jar in the same command line is common.
+
+-d, --data <data>
+ (HTTP) Sends the specified data in a POST request to the HTTP server,
+ in the same way that a browser does when a user has filled in an HTML
+ form and presses the submit button. This will cause curl to pass the
+ data to the server using the content-type application/x-www-form-ur‐
+ lencoded. Compare to -F, --form.
+
+ --data-raw is almost the same but does not have a special interpreta‐
+ tion of the @ character. To post data purely binary, you should in‐
+ stead use the --data-binary option. To URL-encode the value of a form
+ field you may use --data-urlencode.
+
+ If any of these options is used more than once on the same command
+ line, the data pieces specified will be merged together with a sepa‐
+ rating &-symbol. Thus, using '-d name=daniel -d skill=lousy' would
+ generate a post chunk that looks like 'name=daniel&skill=lousy'.
+
+ If you start the data with the letter @, the rest should be a file
+ name to read the data from, or - if you want curl to read the data
+ from stdin. Multiple files can also be specified. Posting data from a
+ file named 'foobar' would thus be done with -d, --data @foobar. When
+ --data is told to read from a file like that, carriage returns and
+ newlines will be stripped out. If you don't want the @ character to
+ have a special interpretation use --data-raw instead.
+
+ See also --data-binary and --data-urlencode and --data-raw. This op‐
+ tion overrides -F, --form and -I, --head and -T, --upload-file.
+
+-f, --fail
+ (HTTP) Fail silently (no output at all) on server errors. This is
+ mostly done to better enable scripts etc to better deal with failed
+ attempts. In normal cases when an HTTP server fails to deliver a docu‐
+ ment, it returns an HTML document stating so (which often also de‐
+ scribes why and more). This flag will prevent curl from outputting
+ that and return error 22.
+
+ This method is not fail-safe and there are occasions where non-suc‐
+ cessful response codes will slip through, especially when authentica‐
+ tion is involved (response codes 401 and 407).
+
+--fail-early
+ Fail and exit on the first detected transfer error.
+
+ When curl is used to do multiple transfers on the command line, it
+ will attempt to operate on each given URL, one by one. By default, it
+ will ignore errors if there are more URLs given and the last URL's
+ success will determine the error code curl returns. So early failures
+ will be "hidden" by subsequent successful transfers.
+
+ Using this option, curl will instead return an error on the first
+ transfer that fails, independent of the amount of URLs that are given
+ on the command line. This way, no transfer failures go undetected by
+ scripts and similar.
+
+ This option is global and does not need to be specified for each use
+ of -:, --next.
+
+ This option does not imply -f, --fail, which causes transfers to fail
+ due to the server's HTTP status code. You can combine the two options,
+ however note -f, --fail is not global and is therefore contained by
+ -:, --next.
+
+ Added in 7.52.0.
+
+-F, --form <name=content>
+ (HTTP SMTP IMAP) For HTTP protocol family, this lets curl emulate a
+ filled-in form in which a user has pressed the submit button. This
+ causes curl to POST data using the Content-Type multipart/form-data
+ according to RFC 2388.
+
+ For SMTP and IMAP protocols, this is the mean to compose a multipart
+ mail message to transmit.
+
+ This enables uploading of binary files etc. To force the 'content'
+ part to be a file, prefix the file name with an @ sign. To just get
+ the content part from a file, prefix the file name with the symbol <.
+ The difference between @ and < is then that @ makes a file get at‐
+ tached in the post as a file upload, while the < makes a text field
+ and just get the contents for that text field from a file.
+
+ Tell curl to read content from stdin instead of a file by using - as
+ filename. This goes for both @ and < constructs. When stdin is used,
+ the contents is buffered in memory first by curl to determine its size
+ and allow a possible resend. Defining a part's data from a named non-
+ regular file (such as a named pipe or similar) is unfortunately not
+ subject to buffering and will be effectively read at transmission
+ time; since the full size is unknown before the transfer starts, such
+ data is sent as chunks by HTTP and rejected by IMAP.
+
+ Example: send an image to an HTTP server, where 'profile' is the name
+ of the form-field to which the file portrait.jpg will be the input:
+
+ curl -F profile=@portrait.jpg https://example.com/upload.cgi
+
+ Example: send a your name and shoe size in two text fields to the
+ server:
+
+ curl -F name=John -F shoesize=11 https://example.com/
+
+ Example: send a your essay in a text field to the server. Send it as a
+ plain text field, but get the contents for it from a local file:
+
+ curl -F "story=<hugefile.txt" https://example.com/
+
+ You can also tell curl what Content-Type to use by using 'type=', in a
+ manner similar to:
+
+ curl -F "web=@index.html;type=text/html" example.com
+
+ or
+
+ curl -F "name=daniel;type=text/foo" example.com
+
+ You can also explicitly change the name field of a file upload part by
+ setting filename=, like this:
+
+ curl -F "file=@localfile;filename=nameinpost" example.com
+
+ If filename/path contains ',' or ';', it must be quoted by double-
+ quotes like:
+
+ curl -F "file=@\"localfile\";filename=\"nameinpost\"" example.com
+
+ or
+
+ curl -F 'file=@"localfile";filename="nameinpost"' example.com
+
+ Note that if a filename/path is quoted by double-quotes, any double-
+ quote or backslash within the filename must be escaped by backslash.
+
+ Quoting must also be applied to non-file data if it contains semi‐
+ colons, leading/trailing spaces or leading double quotes:
+
+ curl -F 'colors="red; green; blue";type=text/x-myapp' example.com
+
+ You can add custom headers to the field by setting headers=, like
+
+ curl -F "submit=OK;headers=\"X-submit-type: OK\"" example.com
+
+ or
+
+ curl -F "submit=OK;headers=@headerfile" example.com
+
+ The headers= keyword may appear more that once and above notes about
+ quoting apply. When headers are read from a file, Empty lines and
+ lines starting with '#' are comments and ignored; each header can be
+ folded by splitting between two words and starting the continuation
+ line with a space; embedded carriage-returns and trailing spaces are
+ stripped. Here is an example of a header file contents:
+
+ # This file contain two headers.
+ X-header-1: this is a header
+
+ # The following header is folded.
+ X-header-2: this is
+ another header
+
+ To support sending multipart mail messages, the syntax is extended as
+ follows:
+ - name can be omitted: the equal sign is the first character of the
+ argument,
+ - if data starts with '(', this signals to start a new multipart: it
+ can be followed by a content type specification.
+ - a multipart can be terminated with a '=)' argument.
+
+ Example: the following command sends an SMTP mime e-mail consisting in
+ an inline part in two alternative formats: plain text and HTML. It at‐
+ taches a text file:
+
+ curl -F '=(;type=multipart/alternative' \
+ -F '=plain text message' \
+ -F '= <body>HTML message</body>;type=text/html' \
+ -F '=)' -F '=@textfile.txt' ... smtp://example.com
+
+ Data can be encoded for transfer using encoder=. Available encodings
+ are binary and 8bit that do nothing else than adding the corresponding
+ Content-Transfer-Encoding header, 7bit that only rejects 8-bit charac‐
+ ters with a transfer error, quoted-printable and base64 that encodes
+ data according to the corresponding schemes, limiting lines length to
+ 76 characters.
+
+ Example: send multipart mail with a quoted-printable text message and
+ a base64 attached file:
+
+ curl -F '=text message;encoder=quoted-printable' \
+ -F '=@localfile;encoder=base64' ... smtp://example.com
+
+ See further examples and details in the MANUAL.
+
+ This option can be used multiple times.
+
+ This option overrides -d, --data and -I, --head and -T, --upload-file.
+
+--form-string <name=string>
+ (HTTP SMTP IMAP) Similar to -F, --form except that the value string
+ for the named parameter is used literally. Leading '@' and '<' charac‐
+ ters, and the ';type=' string in the value have no special meaning.
+ Use this in preference to -F, --form if there's any possibility that
+ the string value may accidentally trigger the '@' or '<' features of
+ -F, --form.
+
+ See also -F, --form.
+
+-G, --get
+ When used, this option will make all data specified with -d, --data,
+ --data-binary or --data-urlencode to be used in an HTTP GET request
+ instead of the POST request that otherwise would be used. The data
+ will be appended to the URL with a '?' separator.
+
+ If used in combination with -I, --head, the POST data will instead be
+ appended to the URL with a HEAD request.
+
+ If this option is used several times, only the first one is used. This
+ is because undoing a GET doesn't make sense, but you should then in‐
+ stead enforce the alternative method you prefer.
+
+-g, --globoff
+ This option switches off the "URL globbing parser". When you set this
+ option, you can specify URLs that contain the letters {}[] without
+ having them being interpreted by curl itself. Note that these letters
+ are not normal legal URL contents but they should be encoded according
+ to the URI standard.
+
+-I, --head
+ (HTTP FTP FILE) Fetch the headers only! HTTP-servers feature the com‐
+ mand HEAD which this uses to get nothing but the header of a document.
+ When used on an FTP or FILE file, curl displays the file size and last
+ modification time only.
+
+-H, --header <header/@file>
+ (HTTP) Extra header to include in the request when sending HTTP to a
+ server. You may specify any number of extra headers. Note that if you
+ should add a custom header that has the same name as one of the inter‐
+ nal ones curl would use, your externally set header will be used in‐
+ stead of the internal one. This allows you to make even trickier stuff
+ than curl would normally do. You should not replace internally set
+ headers without knowing perfectly well what you're doing. Remove an
+ internal header by giving a replacement without content on the right
+ side of the colon, as in: -H "Host:". If you send the custom header
+ with no-value then its header must be terminated with a semicolon,
+ such as -H "X-Custom-Header;" to send "X-Custom-Header:".
+
+ curl will make sure that each header you add/replace is sent with the
+ proper end-of-line marker, you should thus not add that as a part of
+ the header content: do not add newlines or carriage returns, they will
+ only mess things up for you.
+
+ Starting in 7.55.0, this option can take an argument in @filename
+ style, which then adds a header for each line in the input file. Using
+ @- will make curl read the header file from stdin.
+
+ See also the -A, --user-agent and -e, --referer options.
+
+ Starting in 7.37.0, you need --proxy-header to send custom headers in‐
+ tended for a proxy.
+
+ Example:
+
+ curl -H "X-First-Name: Joe" http://example.com/
+
+ WARNING: headers set with this option will be set in all requests -
+ even after redirects are followed, like when told with -L, --location.
+ This can lead to the header being sent to other hosts than the origi‐
+ nal host, so sensitive headers should be used with caution combined
+ with following redirects.
+
+ This option can be used multiple times to add/replace/remove multiple
+ headers.
+
+-h, --help
+ Usage help. This lists all basic command line options with a short
+ description.
+
+-k, --insecure
+ (TLS) By default, every SSL connection curl makes is verified to be
+ secure. This option allows curl to proceed and operate even for server
+ connections otherwise considered insecure.
+
+ The server connection is verified by making sure the server's certifi‐
+ cate contains the right name and verifies successfully using the cert
+ store.
+
+ See this online resource for further details:
+ https://curl.haxx.se/docs/sslcerts.html
+
+ See also --proxy-insecure and --cacert.
+
+-L, --location
+ (HTTP) If the server reports that the requested page has moved to a
+ different location (indicated with a Location: header and a 3XX re‐
+ sponse code), this option will make curl redo the request on the new
+ place. If used together with -i, --include or -I, --head, headers from
+ all requested pages will be shown. When authentication is used, curl
+ only sends its credentials to the initial host. If a redirect takes
+ curl to a different host, it won't be able to intercept the user+pass‐
+ word. See also --location-trusted on how to change this. You can limit
+ the amount of redirects to follow by using the --max-redirs option.
+
+ When curl follows a redirect and the request is not a plain GET (for
+ example POST or PUT), it will do the following request with a GET if
+ the HTTP response was 301, 302, or 303. If the response code was any
+ other 3xx code, curl will re-send the following request using the same
+ unmodified method.
+
+ You can tell curl to not change the non-GET request method to GET af‐
+ ter a 30x response by using the dedicated options for that: --post301,
+ --post302 and --post303.
+
+-M, --manual
+ Manual. Display the huge help text.
+
+-m, --max-time <seconds>
+ Maximum time in seconds that you allow the whole operation to take.
+ This is useful for preventing your batch jobs from hanging for hours
+ due to slow networks or links going down. Since 7.32.0, this option
+ accepts decimal values, but the actual timeout will decrease in accu‐
+ racy as the specified timeout increases in decimal precision.
+
+ If this option is used several times, the last one will be used.
+
+ See also --connect-timeout.
+
+-n, --netrc
+ Makes curl scan the .netrc (_netrc on Windows) file in the user's home
+ directory for login name and password. This is typically used for FTP
+ on Unix. If used with HTTP, curl will enable user authentication. See
+ netrc(5) ftp(1) for details on the file format. Curl will not complain
+ if that file doesn't have the right permissions (it should not be ei‐
+ ther world- or group-readable). The environment variable "HOME" is
+ used to find the home directory.
+
+ A quick and very simple example of how to setup a .netrc to allow curl
+ to FTP to the machine host.domain.com with user name 'myself' and
+ password 'secret' should look similar to:
+
+ machine host.domain.com login myself password secret
+
+-o, --output <file>
+ Write output to <file> instead of stdout. If you are using {} or [] to
+ fetch multiple documents, you can use '#' followed by a number in the
+ <file> specifier. That variable will be replaced with the current
+ string for the URL being fetched. Like in:
+
+ curl http://{one,two}.example.com -o "file_#1.txt"
+
+ or use several variables like:
+
+ curl http://{site,host}.host[1-5].com -o "#1_#2"
+
+ You may use this option as many times as the number of URLs you have.
+ For example, if you specify two URLs on the same command line, you can
+ use it like this:
+
+ curl -o aa example.com -o bb example.net
+
+ and the order of the -o options and the URLs doesn't matter, just that
+ the first -o is for the first URL and so on, so the above command line
+ can also be written as
+
+ curl example.com example.net -o aa -o bb
+
+ See also the --create-dirs option to create the local directories dy‐
+ namically. Specifying the output as '-' (a single dash) will force the
+ output to be done to stdout.
+
+ See also -O, --remote-name and --remote-name-all and -J, --remote-
+ header-name.
+
+-#, --progress-bar
+ Make curl display transfer progress as a simple progress bar instead
+ of the standard, more informational, meter.
+
+ This progress bar draws a single line of '#' characters across the
+ screen and shows a percentage if the transfer size is known. For
+ transfers without a known size, there will be space ship (-=o=-) that
+ moves back and forth but only while data is being transferred, with a
+ set of flying hash sign symbols on top.
+
+-x, --proxy [protocol://]host[:port]
+ Use the specified proxy.
+
+ The proxy string can be specified with a protocol:// prefix. No proto‐
+ col specified or http:// will be treated as HTTP proxy. Use socks4://,
+ socks4a://, socks5:// or socks5h:// to request a specific SOCKS ver‐
+ sion to be used. (The protocol support was added in curl 7.21.7)
+
+ HTTPS proxy support via https:// protocol prefix was added in 7.52.0
+ for OpenSSL, GnuTLS and NSS.
+
+ Unrecognized and unsupported proxy protocols cause an error since
+ 7.52.0. Prior versions may ignore the protocol and use http:// in‐
+ stead.
+
+ If the port number is not specified in the proxy string, it is assumed
+ to be 1080.
+
+ This option overrides existing environment variables that set the
+ proxy to use. If there's an environment variable setting a proxy, you
+ can set proxy to "" to override it.
+
+ All operations that are performed over an HTTP proxy will transpar‐
+ ently be converted to HTTP. It means that certain protocol specific
+ operations might not be available. This is not the case if you can
+ tunnel through the proxy, as one with the -p, --proxytunnel option.
+
+ User and password that might be provided in the proxy string are URL
+ decoded by curl. This allows you to pass in special characters such as
+ @ by using %40 or pass in a colon with %3a.
+
+ The proxy host can be specified the exact same way as the proxy envi‐
+ ronment variables, including the protocol prefix (http://) and the em‐
+ bedded user + password.
+
+ If this option is used several times, the last one will be used.
+
+-U, --proxy-user <user:password>
+ Specify the user name and password to use for proxy authentication.
+
+ If you use a Windows SSPI-enabled curl binary and do either Negotiate
+ or NTLM authentication then you can tell curl to select the user name
+ and password from your environment by specifying a single colon with
+ this option: "-U :".
+
+ On systems where it works, curl will hide the given option argument
+ from process listings. This is not enough to protect credentials from
+ possibly getting seen by other users on the same system as they will
+ still be visible for a brief moment before cleared. Such sensitive
+ data should be retrieved from a file instead or similar and never used
+ in clear text in a command line.
+
+ If this option is used several times, the last one will be used.
+
+--retry <num>
+ If a transient error is returned when curl tries to perform a trans‐
+ fer, it will retry this number of times before giving up. Setting the
+ number to 0 makes curl do no retries (which is the default). Transient
+ error means either: a timeout, an FTP 4xx response code or an HTTP 408
+ or 5xx response code.
+
+ When curl is about to retry a transfer, it will first wait one second
+ and then for all forthcoming retries it will double the waiting time
+ until it reaches 10 minutes which then will be the delay between the
+ rest of the retries. By using --retry-delay you disable this exponen‐
+ tial backoff algorithm. See also --retry-max-time to limit the total
+ time allowed for retries.
+
+ Since curl 7.66.0, curl will comply with the Retry-After: response
+ header if one was present to know when to issue the next retry.
+
+ If this option is used several times, the last one will be used.
+
+ Added in 7.12.3.
+
+-s, --silent
+ Silent or quiet mode. Don't show progress meter or error messages.
+ Makes Curl mute. It will still output the data you ask for, poten‐
+ tially even to the terminal/stdout unless you redirect it.
+
+ Use -S, --show-error in addition to this option to disable progress
+ meter but still show error messages.
+
+ See also -v, --verbose and --stderr.
+
+--ssl (FTP IMAP POP3 SMTP) Try to use SSL/TLS for the connection. Reverts
+ to a non-secure connection if the server doesn't support SSL/TLS. See
+ also --ftp-ssl-control and --ssl-reqd for different levels of encryp‐
+ tion required.
+
+ This option was formerly known as --ftp-ssl (Added in 7.11.0). That
+ option name can still be used but will be removed in a future version.
+
+ Added in 7.20.0.
+
+--trace <file>
+ Enables a full trace dump of all incoming and outgoing data, including
+ descriptive information, to the given output file. Use "-" as filename
+ to have the output sent to stdout. Use "%" as filename to have the
+ output sent to stderr.
+
+ If this option is used several times, the last one will be used.
+
+ This option overrides -v, --verbose and --trace-ascii.
+
+-T, --upload-file <file>
+ This transfers the specified local file to the remote URL. If there is
+ no file part in the specified URL, curl will append the local file
+ name. NOTE that you must use a trailing / on the last directory to re‐
+ ally prove to Curl that there is no file name or curl will think that
+ your last directory name is the remote file name to use. That will
+ most likely cause the upload operation to fail. If this is used on an
+ HTTP(S) server, the PUT command will be used.
+
+ Use the file name "-" (a single dash) to use stdin instead of a given
+ file. Alternately, the file name "." (a single period) may be speci‐
+ fied instead of "-" to use stdin in non-blocking mode to allow reading
+ server output while stdin is being uploaded.
+
+ You can specify one -T, --upload-file for each URL on the command
+ line. Each -T, --upload-file + URL pair specifies what to upload and
+ to where. curl also supports "globbing" of the -T, --upload-file argu‐
+ ment, meaning that you can upload multiple files to a single URL by
+ using the same URL globbing style supported in the URL, like this:
+
+ curl --upload-file "{file1,file2}" http://www.example.com
+
+ or even
+
+ curl -T "img[1-1000].png" ftp://ftp.example.com/upload/
+
+ When uploading to an SMTP server: the uploaded data is assumed to be
+ RFC 5322 formatted. It has to feature the necessary set of headers and
+ mail body formatted correctly by the user as curl will not transcode
+ nor encode it further in any way.
+
+-u, --user <user:password>
+ Specify the user name and password to use for server authentication.
+ Overrides -n, --netrc and --netrc-optional.
+
+ If you simply specify the user name, curl will prompt for a password.
+
+ The user name and passwords are split up on the first colon, which
+ makes it impossible to use a colon in the user name with this option.
+ The password can, still.
+
+ On systems where it works, curl will hide the given option argument
+ from process listings. This is not enough to protect credentials from
+ possibly getting seen by other users on the same system as they will
+ still be visible for a brief moment before cleared. Such sensitive
+ data should be retrieved from a file instead or similar and never used
+ in clear text in a command line.
+
+ When using Kerberos V5 with a Windows based server you should include
+ the Windows domain name in the user name, in order for the server to
+ successfully obtain a Kerberos Ticket. If you don't then the initial
+ authentication handshake may fail.
+
+ When using NTLM, the user name can be specified simply as the user
+ name, without the domain, if there is a single domain and forest in
+ your setup for example.
+
+ To specify the domain name use either Down-Level Logon Name or UPN
+ (User Principal Name) formats. For example, EXAMPLE\user and user@ex‐
+ ample.com respectively.
+
+ If you use a Windows SSPI-enabled curl binary and perform Kerberos V5,
+ Negotiate, NTLM or Digest authentication then you can tell curl to se‐
+ lect the user name and password from your environment by specifying a
+ single colon with this option: "-u :".
+
+ If this option is used several times, the last one will be used.
+
+-v, --verbose
+ Makes curl verbose during the operation. Useful for debugging and see‐
+ ing what's going on "under the hood". A line starting with '>' means
+ "header data" sent by curl, '<' means "header data" received by curl
+ that is hidden in normal cases, and a line starting with '*' means ad‐
+ ditional info provided by curl.
+
+ If you only want HTTP headers in the output, -i, --include might be
+ the option you're looking for.
+
+ If you think this option still doesn't give you enough details, con‐
+ sider using --trace or --trace-ascii instead.
+
+ Use -s, --silent to make curl really quiet.
+
+ See also -i, --include. This option overrides --trace and --trace-
+ ascii.
+
+
+-V, --version
+ Displays information about curl and the libcurl version it uses.
+
+ The first line includes the full version of curl, libcurl and
+ other 3rd party libraries linked with the executable.
+
+ The second line (starts with "Protocols:") shows all protocols
+ that libcurl reports to support.
+
+ The third line (starts with "Features:") shows specific features
+ libcurl reports to offer. Available features include:
+
+ IPv6 You can use IPv6 with this.
+
+ krb4 Krb4 for FTP is supported.
+
+ SSL SSL versions of various protocols are supported, such as
+ HTTPS, FTPS, POP3S and so on.
+
+ libz Automatic decompression of compressed files over HTTP is
+ supported.
+
+ NTLM NTLM authentication is supported.
+
+ Debug This curl uses a libcurl built with Debug. This enables
+ more error-tracking and memory debugging etc. For curl-
+ developers only!
+
+ AsynchDNS
+ This curl uses asynchronous name resolves. Asynchronous
+ name resolves can be done using either the c-ares or the
+ threaded resolver backends.
+
+ SPNEGO SPNEGO authentication is supported.
+
+ Largefile
+ This curl supports transfers of large files, files larger
+ than 2GB.
+
+ IDN This curl supports IDN - international domain names.
+
+ GSS-API
+ GSS-API is supported.
+
+ SSPI SSPI is supported.
+
+ TLS-SRP
+ SRP (Secure Remote Password) authentication is supported
+ for TLS.
+
+ HTTP2 HTTP/2 support has been built-in.
+
+ UnixSockets
+ Unix sockets support is provided.
+
+ HTTPS-proxy
+ This curl is built to support HTTPS proxy.
+
+ Metalink
+ This curl supports Metalink (both version 3 and 4 (RFC
+ 5854)), which describes mirrors and hashes. curl will
+ use mirrors for failover if there are errors (such as the
+ file or server not being available).
+
+ PSL PSL is short for Public Suffix List and means that this
+ curl has been built with knowledge about "public suf‐
+ fixes".
+
+ MultiSSL
+ This curl supports multiple TLS backends.
+EOF
+ else
+ cat <<EOF
+Usage: curl [options...] <url>
+For help on a specific category, use --help-CATEGORY where CATEGORY is one of:
+ all, clientauth, debug, encryption, net, output, post, proxy, resolv,
+ request, script, serverauth,
+or a protocol scheme:
+ dict, file, ftp, ftps, gopher, http, https, imap, imaps, ldap, ldaps, pop3,
+ pop3s, rtsp, scp, sftp, smb, smbs, smtp, smtps, telnet, tftp,
+If multiple help options are given the result is the intersection.
+Search the manual text for matching options with --help-search KEYWORD
+For detailed option help, use --verbose or -v with a --help option.
+For the curl manual, use --manual.
+
+Basic options:
+ --anyauth Pick any authentication method
+ -a, --append Append to target file when uploading
+ --basic Use HTTP Basic Authentication
+ -K, --config <file> Read config from a file
+ --connect-timeout <seconds> Maximum time allowed for connection
+ -b, --cookie <data|filename> Send cookies from string/file
+ -c, --cookie-jar <filename> Write cookies to <filename> after operation
+ -d, --data <data> HTTP POST data
+ -f, --fail Fail silently (no output at all) on HTTP errors
+ --fail-early Fail on first transfer error, do not continue
+ -F, --form <name=content> Specify multipart MIME data
+ --form-string <name=string> Specify multipart MIME data
+ -G, --get Put the post data in the URL and use GET
+ -g, --globoff Disable URL sequences and ranges using {} and []
+ -I, --head Show document info only
+ -H, --header <header/@file> Pass custom header(s) to server
+ -h, --help This help text
+ -k, --insecure Allow insecure server connections when using SSL
+ -L, --location Follow redirects
+ -M, --manual Display the full manual
+ -m, --max-time <seconds> Maximum time allowed for the transfer
+ -n, --netrc Must read .netrc for user name and password
+ -o, --output <file> Write to file instead of stdout
+ -#, --progress-bar Display transfer progress as a bar
+ -x, --proxy [protocol://]host[:port] Use this proxy
+ -U, --proxy-user <user:password> Proxy user and password
+ --retry <num> Retry request if transient problems occur
+ -s, --silent Silent mode
+ --ssl Try SSL/TLS
+ --trace <file> Write a debug trace to FILE
+ -T, --upload-file <file> Transfer local FILE to destination
+ -u, --user <user:password> Server user and password
+ -v, --verbose Make the operation more talkative
+ -V, --version Show version number and quit
+EOF
+ fi
+ exit 0
+ ;;
+
+ clientauth+ftps)
+ if [[ -n "$VERBOSE" ]]; then
+ cat <<EOF
+Verbose help TBD
+EOF
+ else
+ cat <<EOF
+Usage: curl [options...] <url>
+These options affect client authentication with ftps:
+ -E, --cert <certificate[:password]> Client certificate file and password
+ --cert-type <type> Certificate file type (DER/PEM/ENG)
+ --delegation <LEVEL> GSS-API delegation permission
+ --disallow-username-in-url Disallow username in url
+ --ftp-account <data> Account data string
+ --ftp-alternative-to-user <command> String to replace USER [name]
+ --key <key> Private key file name
+ --key-type <type> Private key file type (DER/PEM/ENG)
+ --krb <level> Enable Kerberos with security <level>
+ --negotiate Use HTTP Negotiate (SPNEGO) authentication
+ -n, --netrc Must read .netrc for user name and password
+ --pass <phrase> Pass phrase for the private key
+ --proxy-anyauth Pick any proxy authentication method
+ --proxy-basic Use Basic authentication on the proxy
+ --proxy-cert <cert[:passwd]> Set client certificate for proxy
+ --proxy-cert-type <type> Client certificate type for HTTPS proxy
+ --proxy-digest Use Digest authentication on the proxy
+ --proxy-key <key> Private key for HTTPS proxy
+ --proxy-key-type <type> Private key file type for proxy
+ --proxy-negotiate Use HTTP Negotiate (SPNEGO) authentication on the proxy
+ --proxy-ntlm Use NTLM authentication on the proxy
+ --proxy-pass <phrase> Pass phrase for the private key for HTTPS proxy
+ --proxy-service-name <name> SPNEGO proxy service name
+ --proxy-tlsauthtype <type> TLS authentication type for HTTPS proxy
+ --proxy-tlspassword <string> TLS password for HTTPS proxy
+ --proxy-tlsuser <name> TLS username for HTTPS proxy
+ -U, --proxy-user <user:password> Proxy user and password
+ --service-name <name> SPNEGO service name
+ --socks5-basic Enable username/password auth for SOCKS5 proxies
+ --socks5-gssapi Enable GSS-API auth for SOCKS5 proxies
+ --socks5-gssapi-nec Compatibility with NEC SOCKS5 server
+ --socks5-gssapi-service <name> SOCKS5 proxy service name for GSS-API
+ --tlsauthtype <type> TLS authentication type
+ --tlspassword TLS password
+ --tlsuser <name> TLS user name
+ -u, --user <user:password> Server user and password
+EOF
+ fi
+ exit 0
+ ;;
+
+
+ clientauth)
+ if [[ -n "$VERBOSE" ]]; then
+ cat <<EOF
+Verbose help TBD
+EOF
+ else
+ cat <<EOF
+Usage: curl [options...] <url>
+These options affect transport- or protocol-level client authentication:
+ --anyauth Pick any authentication method
+ --basic Use HTTP Basic Authentication
+ -E, --cert <certificate[:password]> Client certificate file and password
+ --cert-type <type> Certificate file type (DER/PEM/ENG)
+ --delegation <LEVEL> GSS-API delegation permission
+ --digest Use HTTP Digest Authentication
+ --disallow-username-in-url Disallow username in url
+ --ftp-account <data> Account data string
+ --ftp-alternative-to-user <command> String to replace USER [name]
+ --key <key> Private key file name
+ --key-type <type> Private key file type (DER/PEM/ENG)
+ --krb <level> Enable Kerberos with security <level>
+ --login-options <options> Server login options
+ --negotiate Use HTTP Negotiate (SPNEGO) authentication
+ -n, --netrc Must read .netrc for user name and password
+ --ntlm Use HTTP NTLM authentication
+ --ntlm-wb Use HTTP NTLM authentication with winbind
+ --oauth2-bearer <token> OAuth 2 Bearer Token
+ --pass <phrase> Pass phrase for the private key
+ --proxy-anyauth Pick any proxy authentication method
+ --proxy-basic Use Basic authentication on the proxy
+ --proxy-cert <cert[:passwd]> Set client certificate for proxy
+ --proxy-cert-type <type> Client certificate type for HTTPS proxy
+ --proxy-digest Use Digest authentication on the proxy
+ --proxy-key <key> Private key for HTTPS proxy
+ --proxy-key-type <type> Private key file type for proxy
+ --proxy-negotiate Use HTTP Negotiate (SPNEGO) authentication on the proxy
+ --proxy-ntlm Use NTLM authentication on the proxy
+ --proxy-pass <phrase> Pass phrase for the private key for HTTPS proxy
+ --proxy-service-name <name> SPNEGO proxy service name
+ --proxy-tlsauthtype <type> TLS authentication type for HTTPS proxy
+ --proxy-tlspassword <string> TLS password for HTTPS proxy
+ --proxy-tlsuser <name> TLS username for HTTPS proxy
+ -U, --proxy-user <user:password> Proxy user and password
+ --pubkey <key> SSH Public key file name
+ --sasl-authzid <identity> Use this identity to act as during SASL PLAIN authentication
+ --sasl-ir Enable initial response in SASL authentication
+ --service-name <name> SPNEGO service name
+ --socks5-basic Enable username/password auth for SOCKS5 proxies
+ --socks5-gssapi Enable GSS-API auth for SOCKS5 proxies
+ --socks5-gssapi-nec Compatibility with NEC SOCKS5 server
+ --socks5-gssapi-service <name> SOCKS5 proxy service name for GSS-API
+ --tlsauthtype <type> TLS authentication type
+ --tlspassword TLS password
+ --tlsuser <name> TLS user name
+ -u, --user <user:password> Server user and password
+EOF
+ fi
+ exit 0
+ ;;
+
+ serverauth)
+ if [[ -n "$VERBOSE" ]]; then
+ cat <<EOF
+Verbose help TBD
+EOF
+ else
+ cat <<EOF
+Usage: curl [options...] <url>
+These options affect transport- or protocol-level server authentication:
+Usage: curl [options...] <url>
+ --cacert <file> CA certificate to verify peer against
+ --capath <dir> CA directory to verify peer against
+ --cert-status Verify the status of the server certificate
+ --crlfile <file> Get a CRL list in PEM format from the given file
+ --delegation <LEVEL> GSS-API delegation permission
+ --hostpubmd5 <md5> Acceptable MD5 hash of the host public key
+ -k, --insecure Allow insecure server connections when using SSL
+ --krb <level> Enable Kerberos with security <level>
+ --negotiate Use HTTP Negotiate (SPNEGO) authentication
+ --pinnedpubkey <hashes> FILE/HASHES Public key to verify peer against
+ --proxy-cacert <file> CA certificate to verify peer against for proxy
+ --proxy-capath <dir> CA directory to verify peer against for proxy
+ --proxy-crlfile <file> Set a CRL list for proxy
+ --proxy-insecure Do HTTPS proxy connections without verifying the proxy
+ --proxy-negotiate Use HTTP Negotiate (SPNEGO) authentication on the proxy
+ --proxy-pinnedpubkey <hashes> FILE/HASHES public key to verify proxy with
+ --proxy-service-name <name> SPNEGO proxy service name
+ --service-name <name> SPNEGO service name
+ --socks5-gssapi Enable GSS-API auth for SOCKS5 proxies
+ --ssl-revoke-best-effort Ignore revocation offline or missing revocation list errors (Schannel)
+EOF
+ fi
+ exit 0
+ ;;
+
+
+ all)
+ if [[ -n "$VERBOSE" ]]; then
+ curl --manual
+ else
+ cat <<EOF
+Usage: curl [options...] <url>
+ --abstract-unix-socket <path> Connect via abstract Unix domain socket
+ --alt-svc <file name> Enable alt-svc with this cache file
+ --anyauth Pick any authentication method
+ -a, --append Append to target file when uploading
+ --basic Use HTTP Basic Authentication
+ --cacert <file> CA certificate to verify peer against
+ --capath <dir> CA directory to verify peer against
+ -E, --cert <certificate[:password]> Client certificate file and password
+ --cert-status Verify the status of the server certificate
+ --cert-type <type> Certificate file type (DER/PEM/ENG)
+ --ciphers <list of ciphers> SSL ciphers to use
+ --compressed Request compressed response
+ --compressed-ssh Enable SSH compression
+ -K, --config <file> Read config from a file
+ --connect-timeout <seconds> Maximum time allowed for connection
+ --connect-to <HOST1:PORT1:HOST2:PORT2> Connect to host
+ -C, --continue-at <offset> Resumed transfer offset
+ -b, --cookie <data|filename> Send cookies from string/file
+ -c, --cookie-jar <filename> Write cookies to <filename> after operation
+ --create-dirs Create necessary local directory hierarchy
+ --crlf Convert LF to CRLF in upload
+ --crlfile <file> Get a CRL list in PEM format from the given file
+ -d, --data <data> HTTP POST data
+ --data-ascii <data> HTTP POST ASCII data
+ --data-binary <data> HTTP POST binary data
+ --data-raw <data> HTTP POST data, '@' allowed
+ --data-urlencode <data> HTTP POST data url encoded
+ --delegation <LEVEL> GSS-API delegation permission
+ --digest Use HTTP Digest Authentication
+ -q, --disable Disable .curlrc
+ --disable-eprt Inhibit using EPRT or LPRT
+ --disable-epsv Inhibit using EPSV
+ --disallow-username-in-url Disallow username in url
+ --dns-interface <interface> Interface to use for DNS requests
+ --dns-ipv4-addr <address> IPv4 address to use for DNS requests
+ --dns-ipv6-addr <address> IPv6 address to use for DNS requests
+ --dns-servers <addresses> DNS server addrs to use
+ --doh-url <URL> Resolve host names over DOH
+ -D, --dump-header <filename> Write the received headers to <filename>
+ --egd-file <file> EGD socket path for random data
+ --engine <name> Crypto engine to use
+ --etag-save <file> Get an ETag from response header and save it to a FILE
+ --etag-compare <file> Get an ETag from a file and send a conditional request
+ --expect100-timeout <seconds> How long to wait for 100-continue
+ -f, --fail Fail silently (no output at all) on HTTP errors
+ --fail-early Fail on first transfer error, do not continue
+ --false-start Enable TLS False Start
+ -F, --form <name=content> Specify multipart MIME data
+ --form-string <name=string> Specify multipart MIME data
+ --ftp-account <data> Account data string
+ --ftp-alternative-to-user <command> String to replace USER [name]
+ --ftp-create-dirs Create the remote dirs if not present
+ --ftp-method <method> Control CWD usage
+ --ftp-pasv Use PASV/EPSV instead of PORT
+ -P, --ftp-port <address> Use PORT instead of PASV
+ --ftp-pret Send PRET before PASV
+ --ftp-skip-pasv-ip Skip the IP address for PASV
+ --ftp-ssl-ccc Send CCC after authenticating
+ --ftp-ssl-ccc-mode <active/passive> Set CCC mode
+ --ftp-ssl-control Require SSL/TLS for FTP login, clear for transfer
+ -G, --get Put the post data in the URL and use GET
+ -g, --globoff Disable URL sequences and ranges using {} and []
+ --happy-eyeballs-timeout-ms <milliseconds> How long to wait in milliseconds for IPv6 before trying IPv4
+ --haproxy-protocol Send HAProxy PROXY protocol v1 header
+ -I, --head Show document info only
+ -H, --header <header/@file> Pass custom header(s) to server
+ -h, --help This help text
+ --hostpubmd5 <md5> Acceptable MD5 hash of the host public key
+ --http0.9 Allow HTTP 0.9 responses
+ -0, --http1.0 Use HTTP 1.0
+ --http1.1 Use HTTP 1.1
+ --http2 Use HTTP 2
+ --http2-prior-knowledge Use HTTP 2 without HTTP/1.1 Upgrade
+ --http3 Use HTTP v3
+ --ignore-content-length Ignore the size of the remote resource
+ -i, --include Include protocol response headers in the output
+ -k, --insecure Allow insecure server connections when using SSL
+ --interface <name> Use network INTERFACE (or address)
+ -4, --ipv4 Resolve names to IPv4 addresses
+ -6, --ipv6 Resolve names to IPv6 addresses
+ -j, --junk-session-cookies Ignore session cookies read from file
+ --keepalive-time <seconds> Interval time for keepalive probes
+ --key <key> Private key file name
+ --key-type <type> Private key file type (DER/PEM/ENG)
+ --krb <level> Enable Kerberos with security <level>
+ --libcurl <file> Dump libcurl equivalent code of this command line
+ --limit-rate <speed> Limit transfer speed to RATE
+ -l, --list-only List only mode
+ --local-port <num/range> Force use of RANGE for local port numbers
+ -L, --location Follow redirects
+ --location-trusted Like --location, and send auth to other hosts
+ --login-options <options> Server login options
+ --mail-auth <address> Originator address of the original email
+ --mail-from <address> Mail from this address
+ --mail-rcpt <address> Mail to this address
+ --mail-rcpt-allowfails Allow RCPT TO command to fail for some recipients
+ -M, --manual Display the full manual
+ --max-filesize <bytes> Maximum file size to download
+ --max-redirs <num> Maximum number of redirects allowed
+ -m, --max-time <seconds> Maximum time allowed for the transfer
+ --metalink Process given URLs as metalink XML file
+ --negotiate Use HTTP Negotiate (SPNEGO) authentication
+ -n, --netrc Must read .netrc for user name and password
+ --netrc-file <filename> Specify FILE for netrc
+ --netrc-optional Use either .netrc or URL
+ -:, --next Make next URL use its separate set of options
+ --no-alpn Disable the ALPN TLS extension
+ -N, --no-buffer Disable buffering of the output stream
+ --no-keepalive Disable TCP keepalive on the connection
+ --no-npn Disable the NPN TLS extension
+ --no-progress-meter Do not show the progress meter
+ --no-sessionid Disable SSL session-ID reusing
+ --noproxy <no-proxy-list> List of hosts which do not use proxy
+ --ntlm Use HTTP NTLM authentication
+ --ntlm-wb Use HTTP NTLM authentication with winbind
+ --oauth2-bearer <token> OAuth 2 Bearer Token
+ -o, --output <file> Write to file instead of stdout
+ -Z, --parallel Perform transfers in parallel
+ --parallel-immediate Do not wait for multiplexing (with --parallel)
+ --parallel-max Maximum concurrency for parallel transfers
+ --pass <phrase> Pass phrase for the private key
+ --path-as-is Do not squash .. sequences in URL path
+ --pinnedpubkey <hashes> FILE/HASHES Public key to verify peer against
+ --post301 Do not switch to GET after following a 301
+ --post302 Do not switch to GET after following a 302
+ --post303 Do not switch to GET after following a 303
+ --preproxy [protocol://]host[:port] Use this proxy first
+ -#, --progress-bar Display transfer progress as a bar
+ --proto <protocols> Enable/disable PROTOCOLS
+ --proto-default <protocol> Use PROTOCOL for any URL missing a scheme
+ --proto-redir <protocols> Enable/disable PROTOCOLS on redirect
+ -x, --proxy [protocol://]host[:port] Use this proxy
+ --proxy-anyauth Pick any proxy authentication method
+ --proxy-basic Use Basic authentication on the proxy
+ --proxy-cacert <file> CA certificate to verify peer against for proxy
+ --proxy-capath <dir> CA directory to verify peer against for proxy
+ --proxy-cert <cert[:passwd]> Set client certificate for proxy
+ --proxy-cert-type <type> Client certificate type for HTTPS proxy
+ --proxy-ciphers <list> SSL ciphers to use for proxy
+ --proxy-crlfile <file> Set a CRL list for proxy
+ --proxy-digest Use Digest authentication on the proxy
+ --proxy-header <header/@file> Pass custom header(s) to proxy
+ --proxy-insecure Do HTTPS proxy connections without verifying the proxy
+ --proxy-key <key> Private key for HTTPS proxy
+ --proxy-key-type <type> Private key file type for proxy
+ --proxy-negotiate Use HTTP Negotiate (SPNEGO) authentication on the proxy
+ --proxy-ntlm Use NTLM authentication on the proxy
+ --proxy-pass <phrase> Pass phrase for the private key for HTTPS proxy
+ --proxy-pinnedpubkey <hashes> FILE/HASHES public key to verify proxy with
+ --proxy-service-name <name> SPNEGO proxy service name
+ --proxy-ssl-allow-beast Allow security flaw for interop for HTTPS proxy
+ --proxy-tls13-ciphers <list> TLS 1.3 ciphersuites for proxy (OpenSSL)
+ --proxy-tlsauthtype <type> TLS authentication type for HTTPS proxy
+ --proxy-tlspassword <string> TLS password for HTTPS proxy
+ --proxy-tlsuser <name> TLS username for HTTPS proxy
+ --proxy-tlsv1 Use TLSv1 for HTTPS proxy
+ -U, --proxy-user <user:password> Proxy user and password
+ --proxy1.0 <host[:port]> Use HTTP/1.0 proxy on given port
+ -p, --proxytunnel Operate through an HTTP proxy tunnel (using CONNECT)
+ --pubkey <key> SSH Public key file name
+ -Q, --quote Send command(s) to server before transfer
+ --random-file <file> File for reading random data from
+ -r, --range <range> Retrieve only the bytes within RANGE
+ --raw Do HTTP "raw"; no transfer decoding
+ -e, --referer <URL> Referrer URL
+ -J, --remote-header-name Use the header-provided filename
+ -O, --remote-name Write output to a file named as the remote file
+ --remote-name-all Use the remote file name for all URLs
+ -R, --remote-time Set the remote file's time on the local output
+ -X, --request <command> Specify request command to use
+ --request-target Specify the target for this request
+ --resolve <host:port:address[,address]...> Resolve the host+port to this address
+ --retry <num> Retry request if transient problems occur
+ --retry-connrefused Retry on connection refused (use with --retry)
+ --retry-delay <seconds> Wait time between retries
+ --retry-max-time <seconds> Retry only within this period
+ --sasl-authzid <identity> Use this identity to act as during SASL PLAIN authentication
+ --sasl-ir Enable initial response in SASL authentication
+ --service-name <name> SPNEGO service name
+ -S, --show-error Show error even when -s is used
+ -s, --silent Silent mode
+ --socks4 <host[:port]> SOCKS4 proxy on given host + port
+ --socks4a <host[:port]> SOCKS4a proxy on given host + port
+ --socks5 <host[:port]> SOCKS5 proxy on given host + port
+ --socks5-basic Enable username/password auth for SOCKS5 proxies
+ --socks5-gssapi Enable GSS-API auth for SOCKS5 proxies
+ --socks5-gssapi-nec Compatibility with NEC SOCKS5 server
+ --socks5-gssapi-service <name> SOCKS5 proxy service name for GSS-API
+ --socks5-hostname <host[:port]> SOCKS5 proxy, pass host name to proxy
+ -Y, --speed-limit <speed> Stop transfers slower than this
+ -y, --speed-time <seconds> Trigger 'speed-limit' abort after this time
+ --ssl Try SSL/TLS
+ --ssl-allow-beast Allow security flaw to improve interop
+ --ssl-no-revoke Disable cert revocation checks (Schannel)
+ --ssl-revoke-best-effort Ignore revocation offline or missing revocation list errors (Schannel)
+ --ssl-reqd Require SSL/TLS
+ -2, --sslv2 Use SSLv2
+ -3, --sslv3 Use SSLv3
+ --stderr Where to redirect stderr
+ --styled-output Enable styled output for HTTP headers
+ --suppress-connect-headers Suppress proxy CONNECT response headers
+ --tcp-fastopen Use TCP Fast Open
+ --tcp-nodelay Use the TCP_NODELAY option
+ -t, --telnet-option <opt=val> Set telnet option
+ --tftp-blksize <value> Set TFTP BLKSIZE option
+ --tftp-no-options Do not send any TFTP options
+ -z, --time-cond <time> Transfer based on a time condition
+ --tls-max <VERSION> Set maximum allowed TLS version
+ --tls13-ciphers <list> TLS 1.3 ciphersuites (OpenSSL)
+ --tlsauthtype <type> TLS authentication type
+ --tlspassword TLS password
+ --tlsuser <name> TLS user name
+ -1, --tlsv1 Use TLSv1.0 or greater
+ --tlsv1.0 Use TLSv1.0 or greater
+ --tlsv1.1 Use TLSv1.1 or greater
+ --tlsv1.2 Use TLSv1.2 or greater
+ --tlsv1.3 Use TLSv1.3 or greater
+ --tr-encoding Request compressed transfer encoding
+ --trace <file> Write a debug trace to FILE
+ --trace-ascii <file> Like --trace, but without hex output
+ --trace-time Add time stamps to trace/verbose output
+ --unix-socket <path> Connect through this Unix domain socket
+ -T, --upload-file <file> Transfer local FILE to destination
+ --url <url> URL to work with
+ -B, --use-ascii Use ASCII/text transfer
+ -u, --user <user:password> Server user and password
+ -A, --user-agent <name> Send User-Agent <name> to server
+ -v, --verbose Make the operation more talkative
+ -V, --version Show version number and quit
+ -w, --write-out <format> Use output FORMAT after completion
+ --xattr Store metadata in extended file attributes
+EOF
+ fi
+ exit 0
+ ;;
+
+ debug)
+ if [[ -n "$VERBOSE" ]]; then
+ cat <<EOF
+These options are used for debugging a transfer:
+
+-D, --dump-header <filename>
+ (HTTP FTP) Write the received protocol headers to the specified
+ file.
+
+ This option is handy to use when you want to store the headers
+ that an HTTP site sends to you. Cookies from the headers could
+ then be read in a second curl invocation by using the -b,
+ --cookie option! The -c, --cookie-jar option is a better way to
+ store cookies.
+
+ If no headers are received, the use of this option will create
+ an empty file.
+
+ When used in FTP, the FTP server response lines are considered
+ being "headers" and thus are saved there.
+
+ If this option is used several times, the last one will be used.
+
+ See also -o, --output.
+
+-I, --head
+ (HTTP FTP FILE) Fetch the headers only! HTTP-servers feature the
+ command HEAD which this uses to get nothing but the header of a
+ document. When used on an FTP or FILE file, curl displays the
+ file size and last modification time only.
+
+-i, --include
+ Include the HTTP response headers in the output. The HTTP re-
+ sponse headers can include things like server name, cookies,
+ date of the document, HTTP version and more...
+
+ To view the request headers, consider the -v, --verbose option.
+
+ See also -v, --verbose.
+
+--stderr
+ Redirect all writes to stderr to the specified file instead. If
+ the file name is a plain '-', it is instead written to stdout.
+
+ If this option is used several times, the last one will be used.
+
+ See also -v, --verbose and -s, --silent.
+
+--trace-ascii <file>
+ Enables a full trace dump of all incoming and outgoing data, in-
+ cluding descriptive information, to the given output file. Use
+ "-" as filename to have the output sent to stdout.
+
+ This is very similar to --trace, but leaves out the hex part and
+ only shows the ASCII part of the dump. It makes smaller output
+ that might be easier to read for untrained humans.
+
+ If this option is used several times, the last one will be used.
+
+ This option overrides --trace and -v, --verbose.
+
+--trace-time
+ Prepends a time stamp to each trace or verbose line that curl
+ displays.
+
+ Added in 7.14.0.
+
+--trace <file>
+ Enables a full trace dump of all incoming and outgoing data, in-
+ cluding descriptive information, to the given output file. Use
+ "-" as filename to have the output sent to stdout. Use "%" as
+ filename to have the output sent to stderr.
+
+ If this option is used several times, the last one will be used.
+
+ This option overrides -v, --verbose and --trace-ascii.
+
+-v, --verbose
+ Makes curl verbose during the operation. Useful for debugging
+ and seeing what's going on "under the hood". A line starting
+ with '>' means "header data" sent by curl, '<' means "header
+ data" received by curl that is hidden in normal cases, and a
+ line starting with '*' means additional info provided by curl.
+
+ If you only want HTTP headers in the output, -i, --include might
+ be the option you're looking for.
+
+ If you think this option still doesn't give you enough details,
+ consider using --trace or --trace-ascii instead.
+
+ Use -s, --silent to make curl really quiet.
+
+ See also -i, --include. This option overrides --trace and
+ --trace-ascii.
+
+-V, --version
+ Displays information about curl and the libcurl version it uses.
+
+ The first line includes the full version of curl, libcurl and
+ other 3rd party libraries linked with the executable.
+
+ The second line (starts with "Protocols:") shows all protocols
+ that libcurl reports to support.
+
+ The third line (starts with "Features:") shows specific features
+ libcurl reports to offer. Available features include:
+
+ IPv6 You can use IPv6 with this.
+
+ krb4 Krb4 for FTP is supported.
+
+ SSL SSL versions of various protocols are supported, such as
+ HTTPS, FTPS, POP3S and so on.
+
+ libz Automatic decompression of compressed files over HTTP is
+ supported.
+
+ NTLM NTLM authentication is supported.
+
+ Debug This curl uses a libcurl built with Debug. This enables
+ more error-tracking and memory debugging etc. For curl-
+ developers only!
+
+ AsynchDNS
+ This curl uses asynchronous name resolves. Asynchronous
+ name resolves can be done using either the c-ares or the
+ threaded resolver backends.
+
+ SPNEGO SPNEGO authentication is supported.
+
+ Largefile
+ This curl supports transfers of large files, files larger
+ than 2GB.
+
+ IDN This curl supports IDN - international domain names.
+
+ GSS-API
+ GSS-API is supported.
+
+ SSPI SSPI is supported.
+
+ TLS-SRP
+ SRP (Secure Remote Password) authentication is supported
+ for TLS.
+
+ HTTP2 HTTP/2 support has been built-in.
+
+ UnixSockets
+ Unix sockets support is provided.
+
+ HTTPS-proxy
+ This curl is built to support HTTPS proxy.
+
+ Metalink
+ This curl supports Metalink (both version 3 and 4 (RFC
+ 5854)), which describes mirrors and hashes. curl will
+ use mirrors for failover if there are errors (such as the
+ file or server not being available).
+
+ PSL PSL is short for Public Suffix List and means that this
+ curl has been built with knowledge about "public suf‐
+ fixes".
+
+ MultiSSL
+ This curl supports multiple TLS backends.
+
+-w, --write-out <format>
+ Make curl display information on stdout after a completed trans‐
+ fer. The format is a string that may contain plain text mixed
+ with any number of variables. The format can be specified as a
+ literal "string", or you can have curl read the format from a
+ file with "@filename" and to tell curl to read the format from
+ stdin you write "@-".
+
+ The variables present in the output format will be substituted
+ by the value or text that curl thinks fit, as described below.
+ All variables are specified as %{variable_name} and to output a
+ normal % you just write them as %%. You can output a newline by
+ using \n, a carriage return with \r and a tab space with \t.
+
+ The output will be written to standard output, but this can be
+ switched to standard error by using %{stderr}.
+
+ NOTE: The %-symbol is a special symbol in the win32-environment,
+ where all occurrences of % must be doubled when using this op‐
+ tion.
+
+ The variables available are:
+
+ content_type The Content-Type of the requested document, if
+ there was any.
+
+ filename_effective
+ The ultimate filename that curl writes out to.
+ This is only meaningful if curl is told to write
+ to a file with the -O, --remote-name or -o,
+ --output option. It's most useful in combination
+ with the -J, --remote-header-name option. (Added
+ in 7.26.0)
+
+ ftp_entry_path The initial path curl ended up in when logging on
+ to the remote FTP server. (Added in 7.15.4)
+
+ http_code The numerical response code that was found in the
+ last retrieved HTTP(S) or FTP(s) transfer. In
+ 7.18.2 the alias response_code was added to show
+ the same info.
+
+ http_connect The numerical code that was found in the last re‐
+ sponse (from a proxy) to a curl CONNECT request.
+ (Added in 7.12.4)
+
+ http_version The http version that was effectively used.
+ (Added in 7.50.0)
+
+ local_ip The IP address of the local end of the most re‐
+ cently done connection - can be either IPv4 or
+ IPv6 (Added in 7.29.0)
+
+ local_port The local port number of the most recently done
+ connection (Added in 7.29.0)
+
+ num_connects Number of new connects made in the recent trans‐
+ fer. (Added in 7.12.3)
+
+ num_redirects Number of redirects that were followed in the re‐
+ quest. (Added in 7.12.3)
+
+ proxy_ssl_verify_result
+ The result of the HTTPS proxy's SSL peer certifi‐
+ cate verification that was requested. 0 means the
+ verification was successful. (Added in 7.52.0)
+
+ redirect_url When an HTTP request was made without -L, --loca‐
+ tion to follow redirects (or when --max-redir is
+ met), this variable will show the actual URL a
+ redirect would have gone to. (Added in 7.18.2)
+
+ remote_ip The remote IP address of the most recently done
+ connection - can be either IPv4 or IPv6 (Added in
+ 7.29.0)
+
+ remote_port The remote port number of the most recently done
+ connection (Added in 7.29.0)
+
+ scheme The URL scheme (sometimes called protocol) that
+ was effectively used (Added in 7.52.0)
+
+ size_download The total amount of bytes that were downloaded.
+
+ size_header The total amount of bytes of the downloaded head‐
+ ers.
+
+ size_request The total amount of bytes that were sent in the
+ HTTP request.
+
+ size_upload The total amount of bytes that were uploaded.
+
+ speed_download The average download speed that curl measured for
+ the complete download. Bytes per second.
+
+ speed_upload The average upload speed that curl measured for
+ the complete upload. Bytes per second.
+
+ ssl_verify_result
+ The result of the SSL peer certificate verifica‐
+ tion that was requested. 0 means the verification
+ was successful. (Added in 7.19.0)
+
+ stderr From this point on, the -w, --write-out output
+ will be written to standard error. (Added in
+ 7.63.0)
+
+ stdout From this point on, the -w, --write-out output
+ will be written to standard output. This is the
+ default, but can be used to switch back after
+ switching to stderr. (Added in 7.63.0)
+
+ time_appconnect
+ The time, in seconds, it took from the start un‐
+ til the SSL/SSH/etc connect/handshake to the re‐
+ mote host was completed. (Added in 7.19.0)
+
+ time_connect The time, in seconds, it took from the start un‐
+ til the TCP connect to the remote host (or proxy)
+ was completed.
+
+ time_namelookup
+ The time, in seconds, it took from the start un‐
+ til the name resolving was completed.
+
+ time_pretransfer
+ The time, in seconds, it took from the start un‐
+ til the file transfer was just about to begin.
+ This includes all pre-transfer commands and nego‐
+ tiations that are specific to the particular pro‐
+ tocol(s) involved.
+
+ time_redirect The time, in seconds, it took for all redirection
+ steps including name lookup, connect, pretransfer
+ and transfer before the final transaction was
+ started. time_redirect shows the complete execu‐
+ tion time for multiple redirections. (Added in
+ 7.12.3)
+
+ time_starttransfer
+ The time, in seconds, it took from the start un‐
+ til the first byte was just about to be trans‐
+ ferred. This includes time_pretransfer and also
+ the time the server needed to calculate the re‐
+ sult.
+
+ time_total The total time, in seconds, that the full opera‐
+ tion lasted.
+
+ url_effective The URL that was fetched last. This is most mean‐
+ ingful if you've told curl to follow location:
+ headers.
+
+ If this option is used several times, the last one will be used.
+
+
+
+EOF
+ else
+ cat <<EOF
+Usage: curl [options...] <url>
+ -D, --dump-header <filename> Write the received headers to <filename>
+ -I, --head Show document info only
+ -i, --include Include protocol response headers in the output
+ --stderr Where to redirect stderr
+ --trace <file> Write a debug trace to FILE
+ --trace-ascii <file> Like --trace, but without hex output
+ --trace-time Add time stamps to trace/verbose output
+ -v, --verbose Make the operation more talkative
+ -V, --version Show version number and quit
+ -w, --write-out <format> Use output FORMAT after completion
+EOF
+ fi
+ exit 0
+ ;;
+
+ ftps)
+ if [[ -n "$VERBOSE" ]]; then
+ cat <<EOF
+Verbose help TBD
+EOF
+ else
+ cat <<EOF
+Usage: curl [options...] <url>
+These options are valid on ftps transfers:
+ -a, --append Append to target file when uploading
+ --cacert <file> CA certificate to verify peer against
+ --capath <dir> CA directory to verify peer against
+ -E, --cert <certificate[:password]> Client certificate file and password
+ --cert-status Verify the status of the server certificate
+ --cert-type <type> Certificate file type (DER/PEM/ENG)
+ --ciphers <list of ciphers> SSL ciphers to use
+ -K, --config <file> Read config from a file
+ --connect-timeout <seconds> Maximum time allowed for connection
+ --connect-to <HOST1:PORT1:HOST2:PORT2> Connect to host
+ -C, --continue-at <offset> Resumed transfer offset
+ --create-dirs Create necessary local directory hierarchy
+ --crlf Convert LF to CRLF in upload
+ --crlfile <file> Get a CRL list in PEM format from the given file
+ --delegation <LEVEL> GSS-API delegation permission
+ -q, --disable Disable .curlrc
+ --disable-eprt Inhibit using EPRT or LPRT
+ --disable-epsv Inhibit using EPSV
+ --disallow-username-in-url Disallow username in url
+ --dns-interface <interface> Interface to use for DNS requests
+ --dns-ipv4-addr <address> IPv4 address to use for DNS requests
+ --dns-ipv6-addr <address> IPv6 address to use for DNS requests
+ --dns-servers <addresses> DNS server addrs to use
+ --doh-url <URL> Resolve host names over DOH
+ -D, --dump-header <filename> Write the received headers to <filename>
+ --egd-file <file> EGD socket path for random data
+ --engine <name> Crypto engine to use
+ -f, --fail Fail silently (no output at all) on HTTP errors
+ --fail-early Fail on first transfer error, do not continue
+ --false-start Enable TLS False Start
+ --ftp-account <data> Account data string
+ --ftp-alternative-to-user <command> String to replace USER [name]
+ --ftp-create-dirs Create the remote dirs if not present
+ --ftp-method <method> Control CWD usage
+ --ftp-pasv Use PASV/EPSV instead of PORT
+ -P, --ftp-port <address> Use PORT instead of PASV
+ --ftp-pret Send PRET before PASV
+ --ftp-skip-pasv-ip Skip the IP address for PASV
+ --ftp-ssl-ccc Send CCC after authenticating
+ --ftp-ssl-ccc-mode <active/passive> Set CCC mode
+ --ftp-ssl-control Require SSL/TLS for FTP login, clear for transfer
+ -g, --globoff Disable URL sequences and ranges using {} and []
+ --happy-eyeballs-timeout-ms <milliseconds> How long to wait in milliseconds for IPv6 before trying IPv4
+ --haproxy-protocol Send HAProxy PROXY protocol v1 header
+ -I, --head Show document info only
+ --ignore-content-length Ignore the size of the remote resource
+ -i, --include Include protocol response headers in the output
+ -k, --insecure Allow insecure server connections when using SSL
+ --interface <name> Use network INTERFACE (or address)
+ -4, --ipv4 Resolve names to IPv4 addresses
+ -6, --ipv6 Resolve names to IPv6 addresses
+ --keepalive-time <seconds> Interval time for keepalive probes
+ --key <key> Private key file name
+ --key-type <type> Private key file type (DER/PEM/ENG)
+ --krb <level> Enable Kerberos with security <level>
+ --libcurl <file> Dump libcurl equivalent code of this command line
+ --limit-rate <speed> Limit transfer speed to RATE
+ -l, --list-only List only mode
+ --local-port <num/range> Force use of RANGE for local port numbers
+ -M, --manual Display the full manual
+ --max-filesize <bytes> Maximum file size to download
+ -m, --max-time <seconds> Maximum time allowed for the transfer
+ --metalink Process given URLs as metalink XML file
+ --negotiate Use HTTP Negotiate (SPNEGO) authentication
+ -n, --netrc Must read .netrc for user name and password
+ --netrc-file <filename> Specify FILE for netrc
+ --netrc-optional Use either .netrc or URL
+ -:, --next Make next URL use its separate set of options
+ --no-alpn Disable the ALPN TLS extension
+ -N, --no-buffer Disable buffering of the output stream
+ --no-keepalive Disable TCP keepalive on the connection
+ --no-npn Disable the NPN TLS extension
+ --no-progress-meter Do not show the progress meter
+ --no-sessionid Disable SSL session-ID reusing
+ --noproxy <no-proxy-list> List of hosts which do not use proxy
+ -o, --output <file> Write to file instead of stdout
+ -Z, --parallel Perform transfers in parallel
+ --parallel-immediate Do not wait for multiplexing (with --parallel)
+ --parallel-max Maximum concurrency for parallel transfers
+ --pass <phrase> Pass phrase for the private key
+ --path-as-is Do not squash .. sequences in URL path
+ --pinnedpubkey <hashes> FILE/HASHES Public key to verify peer against
+ --preproxy [protocol://]host[:port] Use this proxy first
+ -#, --progress-bar Display transfer progress as a bar
+ --proto <protocols> Enable/disable PROTOCOLS
+ --proto-default <protocol> Use PROTOCOL for any URL missing a scheme
+ --proto-redir <protocols> Enable/disable PROTOCOLS on redirect
+ -x, --proxy [protocol://]host[:port] Use this proxy
+ --proxy-anyauth Pick any proxy authentication method
+ --proxy-basic Use Basic authentication on the proxy
+ --proxy-cacert <file> CA certificate to verify peer against for proxy
+ --proxy-capath <dir> CA directory to verify peer against for proxy
+ --proxy-cert <cert[:passwd]> Set client certificate for proxy
+ --proxy-cert-type <type> Client certificate type for HTTPS proxy
+ --proxy-ciphers <list> SSL ciphers to use for proxy
+ --proxy-crlfile <file> Set a CRL list for proxy
+ --proxy-digest Use Digest authentication on the proxy
+ --proxy-header <header/@file> Pass custom header(s) to proxy
+ --proxy-insecure Do HTTPS proxy connections without verifying the proxy
+ --proxy-key <key> Private key for HTTPS proxy
+ --proxy-key-type <type> Private key file type for proxy
+ --proxy-negotiate Use HTTP Negotiate (SPNEGO) authentication on the proxy
+ --proxy-ntlm Use NTLM authentication on the proxy
+ --proxy-pass <phrase> Pass phrase for the private key for HTTPS proxy
+ --proxy-pinnedpubkey <hashes> FILE/HASHES public key to verify proxy with
+ --proxy-service-name <name> SPNEGO proxy service name
+ --proxy-ssl-allow-beast Allow security flaw for interop for HTTPS proxy
+ --proxy-tls13-ciphers <list> TLS 1.3 ciphersuites for proxy (OpenSSL)
+ --proxy-tlsauthtype <type> TLS authentication type for HTTPS proxy
+ --proxy-tlspassword <string> TLS password for HTTPS proxy
+ --proxy-tlsuser <name> TLS username for HTTPS proxy
+ --proxy-tlsv1 Use TLSv1 for HTTPS proxy
+ -U, --proxy-user <user:password> Proxy user and password
+ --proxy1.0 <host[:port]> Use HTTP/1.0 proxy on given port
+ -p, --proxytunnel Operate through an HTTP proxy tunnel (using CONNECT)
+ -Q, --quote Send command(s) to server before transfer
+ --random-file <file> File for reading random data from
+ -r, --range <range> Retrieve only the bytes within RANGE
+ -e, --referer <URL> Referrer URL
+ -O, --remote-name Write output to a file named as the remote file
+ --remote-name-all Use the remote file name for all URLs
+ -R, --remote-time Set the remote file's time on the local output
+ -X, --request <command> Specify request command to use
+ --resolve <host:port:address[,address]...> Resolve the host+port to this address
+ --retry <num> Retry request if transient problems occur
+ --retry-connrefused Retry on connection refused (use with --retry)
+ --retry-delay <seconds> Wait time between retries
+ --retry-max-time <seconds> Retry only within this period
+ --service-name <name> SPNEGO service name
+ -S, --show-error Show error even when -s is used
+ -s, --silent Silent mode
+ --socks4 <host[:port]> SOCKS4 proxy on given host + port
+ --socks4a <host[:port]> SOCKS4a proxy on given host + port
+ --socks5 <host[:port]> SOCKS5 proxy on given host + port
+ --socks5-basic Enable username/password auth for SOCKS5 proxies
+ --socks5-gssapi Enable GSS-API auth for SOCKS5 proxies
+ --socks5-gssapi-nec Compatibility with NEC SOCKS5 server
+ --socks5-gssapi-service <name> SOCKS5 proxy service name for GSS-API
+ --socks5-hostname <host[:port]> SOCKS5 proxy, pass host name to proxy
+ -Y, --speed-limit <speed> Stop transfers slower than this
+ -y, --speed-time <seconds> Trigger 'speed-limit' abort after this time
+ --ssl Try SSL/TLS
+ --ssl-allow-beast Allow security flaw to improve interop
+ --ssl-no-revoke Disable cert revocation checks (Schannel)
+ --ssl-revoke-best-effort Ignore revocation offline or missing revocation list errors (Schannel)
+ --ssl-reqd Require SSL/TLS
+ -2, --sslv2 Use SSLv2
+ -3, --sslv3 Use SSLv3
+ --stderr Where to redirect stderr
+ --suppress-connect-headers Suppress proxy CONNECT response headers
+ --tcp-fastopen Use TCP Fast Open
+ --tcp-nodelay Use the TCP_NODELAY option
+ -z, --time-cond <time> Transfer based on a time condition
+ --tls-max <VERSION> Set maximum allowed TLS version
+ --tls13-ciphers <list> TLS 1.3 ciphersuites (OpenSSL)
+ --tlsauthtype <type> TLS authentication type
+ --tlspassword TLS password
+ --tlsuser <name> TLS user name
+ -1, --tlsv1 Use TLSv1.0 or greater
+ --tlsv1.0 Use TLSv1.0 or greater
+ --tlsv1.1 Use TLSv1.1 or greater
+ --tlsv1.2 Use TLSv1.2 or greater
+ --tlsv1.3 Use TLSv1.3 or greater
+ --trace <file> Write a debug trace to FILE
+ --trace-ascii <file> Like --trace, but without hex output
+ --trace-time Add time stamps to trace/verbose output
+ -T, --upload-file <file> Transfer local FILE to destination
+ --url <url> URL to work with
+ -B, --use-ascii Use ASCII/text transfer
+ -u, --user <user:password> Server user and password
+ -A, --user-agent <name> Send User-Agent <name> to server
+ -v, --verbose Make the operation more talkative
+ -V, --version Show version number and quit
+ -w, --write-out <format> Use output FORMAT after completion
+ --xattr Store metadata in extended file attributes
+EOF
+ fi
+ exit 0
+ ;;
+
+ https)
+ if [[ -n "$VERBOSE" ]]; then
+ cat <<EOF
+Verbose help TBD
+EOF
+ else
+ cat <<EOF
+Usage: curl [options...] <url>
+These options are valid on https transfers:
+ --abstract-unix-socket <path> Connect via abstract Unix domain socket
+ --alt-svc <file name> Enable alt-svc with this cache file
+ --anyauth Pick any authentication method
+ -a, --append Append to target file when uploading
+ --basic Use HTTP Basic Authentication
+ --cacert <file> CA certificate to verify peer against
+ --capath <dir> CA directory to verify peer against
+ -E, --cert <certificate[:password]> Client certificate file and password
+ --cert-status Verify the status of the server certificate
+ --cert-type <type> Certificate file type (DER/PEM/ENG)
+ --ciphers <list of ciphers> SSL ciphers to use
+ --compressed Request compressed response
+ -K, --config <file> Read config from a file
+ --connect-timeout <seconds> Maximum time allowed for connection
+ --connect-to <HOST1:PORT1:HOST2:PORT2> Connect to host
+ -C, --continue-at <offset> Resumed transfer offset
+ -b, --cookie <data|filename> Send cookies from string/file
+ -c, --cookie-jar <filename> Write cookies to <filename> after operation
+ --create-dirs Create necessary local directory hierarchy
+ --crlf Convert LF to CRLF in upload
+ --crlfile <file> Get a CRL list in PEM format from the given file
+ -d, --data <data> HTTP POST data
+ --data-ascii <data> HTTP POST ASCII data
+ --data-binary <data> HTTP POST binary data
+ --data-raw <data> HTTP POST data, '@' allowed
+ --data-urlencode <data> HTTP POST data url encoded
+ --delegation <LEVEL> GSS-API delegation permission
+ --digest Use HTTP Digest Authentication
+ -q, --disable Disable .curlrc
+ --disable-eprt Inhibit using EPRT or LPRT
+ --disable-epsv Inhibit using EPSV
+ --disallow-username-in-url Disallow username in url
+ --dns-interface <interface> Interface to use for DNS requests
+ --dns-ipv4-addr <address> IPv4 address to use for DNS requests
+ --dns-ipv6-addr <address> IPv6 address to use for DNS requests
+ --dns-servers <addresses> DNS server addrs to use
+ --doh-url <URL> Resolve host names over DOH
+ -D, --dump-header <filename> Write the received headers to <filename>
+ --egd-file <file> EGD socket path for random data
+ --engine <name> Crypto engine to use
+ --etag-save <file> Get an ETag from response header and save it to a FILE
+ --etag-compare <file> Get an ETag from a file and send a conditional request
+ --expect100-timeout <seconds> How long to wait for 100-continue
+ -f, --fail Fail silently (no output at all) on HTTP errors
+ --fail-early Fail on first transfer error, do not continue
+ --false-start Enable TLS False Start
+ -F, --form <name=content> Specify multipart MIME data
+ --form-string <name=string> Specify multipart MIME data
+ -G, --get Put the post data in the URL and use GET
+ -g, --globoff Disable URL sequences and ranges using {} and []
+ --happy-eyeballs-timeout-ms <milliseconds> How long to wait in milliseconds for IPv6 before trying IPv4
+ --haproxy-protocol Send HAProxy PROXY protocol v1 header
+ -I, --head Show document info only
+ -H, --header <header/@file> Pass custom header(s) to server
+ -h, --help This help text
+ --http0.9 Allow HTTP 0.9 responses
+ -0, --http1.0 Use HTTP 1.0
+ --http1.1 Use HTTP 1.1
+ --http2 Use HTTP 2
+ --http2-prior-knowledge Use HTTP 2 without HTTP/1.1 Upgrade
+ --http3 Use HTTP v3
+ --ignore-content-length Ignore the size of the remote resource
+ -i, --include Include protocol response headers in the output
+ -k, --insecure Allow insecure server connections when using SSL
+ --interface <name> Use network INTERFACE (or address)
+ -4, --ipv4 Resolve names to IPv4 addresses
+ -6, --ipv6 Resolve names to IPv6 addresses
+ -j, --junk-session-cookies Ignore session cookies read from file
+ --keepalive-time <seconds> Interval time for keepalive probes
+ --key <key> Private key file name
+ --key-type <type> Private key file type (DER/PEM/ENG)
+ --krb <level> Enable Kerberos with security <level>
+ --libcurl <file> Dump libcurl equivalent code of this command line
+ --limit-rate <speed> Limit transfer speed to RATE
+ --local-port <num/range> Force use of RANGE for local port numbers
+ -L, --location Follow redirects
+ --location-trusted Like --location, and send auth to other hosts
+ -M, --manual Display the full manual
+ --max-filesize <bytes> Maximum file size to download
+ --max-redirs <num> Maximum number of redirects allowed
+ -m, --max-time <seconds> Maximum time allowed for the transfer
+ --metalink Process given URLs as metalink XML file
+ --negotiate Use HTTP Negotiate (SPNEGO) authentication
+ -n, --netrc Must read .netrc for user name and password
+ --netrc-file <filename> Specify FILE for netrc
+ --netrc-optional Use either .netrc or URL
+ -:, --next Make next URL use its separate set of options
+ --no-alpn Disable the ALPN TLS extension
+ -N, --no-buffer Disable buffering of the output stream
+ --no-keepalive Disable TCP keepalive on the connection
+ --no-npn Disable the NPN TLS extension
+ --no-progress-meter Do not show the progress meter
+ --no-sessionid Disable SSL session-ID reusing
+ --noproxy <no-proxy-list> List of hosts which do not use proxy
+ --ntlm Use HTTP NTLM authentication
+ --ntlm-wb Use HTTP NTLM authentication with winbind
+ --oauth2-bearer <token> OAuth 2 Bearer Token
+ -o, --output <file> Write to file instead of stdout
+ -Z, --parallel Perform transfers in parallel
+ --parallel-immediate Do not wait for multiplexing (with --parallel)
+ --parallel-max Maximum concurrency for parallel transfers
+ --pass <phrase> Pass phrase for the private key
+ --path-as-is Do not squash .. sequences in URL path
+ --pinnedpubkey <hashes> FILE/HASHES Public key to verify peer against
+ --post301 Do not switch to GET after following a 301
+ --post302 Do not switch to GET after following a 302
+ --post303 Do not switch to GET after following a 303
+ --preproxy [protocol://]host[:port] Use this proxy first
+ -#, --progress-bar Display transfer progress as a bar
+ --proto <protocols> Enable/disable PROTOCOLS
+ --proto-default <protocol> Use PROTOCOL for any URL missing a scheme
+ --proto-redir <protocols> Enable/disable PROTOCOLS on redirect
+ -x, --proxy [protocol://]host[:port] Use this proxy
+ --proxy-anyauth Pick any proxy authentication method
+ --proxy-basic Use Basic authentication on the proxy
+ --proxy-cacert <file> CA certificate to verify peer against for proxy
+ --proxy-capath <dir> CA directory to verify peer against for proxy
+ --proxy-cert <cert[:passwd]> Set client certificate for proxy
+ --proxy-cert-type <type> Client certificate type for HTTPS proxy
+ --proxy-ciphers <list> SSL ciphers to use for proxy
+ --proxy-crlfile <file> Set a CRL list for proxy
+ --proxy-digest Use Digest authentication on the proxy
+ --proxy-header <header/@file> Pass custom header(s) to proxy
+ --proxy-insecure Do HTTPS proxy connections without verifying the proxy
+ --proxy-key <key> Private key for HTTPS proxy
+ --proxy-key-type <type> Private key file type for proxy
+ --proxy-negotiate Use HTTP Negotiate (SPNEGO) authentication on the proxy
+ --proxy-ntlm Use NTLM authentication on the proxy
+ --proxy-pass <phrase> Pass phrase for the private key for HTTPS proxy
+ --proxy-pinnedpubkey <hashes> FILE/HASHES public key to verify proxy with
+ --proxy-service-name <name> SPNEGO proxy service name
+ --proxy-ssl-allow-beast Allow security flaw for interop for HTTPS proxy
+ --proxy-tls13-ciphers <list> TLS 1.3 ciphersuites for proxy (OpenSSL)
+ --proxy-tlsauthtype <type> TLS authentication type for HTTPS proxy
+ --proxy-tlspassword <string> TLS password for HTTPS proxy
+ --proxy-tlsuser <name> TLS username for HTTPS proxy
+ --proxy-tlsv1 Use TLSv1 for HTTPS proxy
+ -U, --proxy-user <user:password> Proxy user and password
+ --proxy1.0 <host[:port]> Use HTTP/1.0 proxy on given port
+ -p, --proxytunnel Operate through an HTTP proxy tunnel (using CONNECT)
+ --random-file <file> File for reading random data from
+ -r, --range <range> Retrieve only the bytes within RANGE
+ --raw Do HTTP "raw"; no transfer decoding
+ -e, --referer <URL> Referrer URL
+ -J, --remote-header-name Use the header-provided filename
+ -O, --remote-name Write output to a file named as the remote file
+ --remote-name-all Use the remote file name for all URLs
+ -R, --remote-time Set the remote file's time on the local output
+ -X, --request <command> Specify request command to use
+ --request-target Specify the target for this request
+ --resolve <host:port:address[,address]...> Resolve the host+port to this address
+ --retry <num> Retry request if transient problems occur
+ --retry-connrefused Retry on connection refused (use with --retry)
+ --retry-delay <seconds> Wait time between retries
+ --retry-max-time <seconds> Retry only within this period
+ --service-name <name> SPNEGO service name
+ -S, --show-error Show error even when -s is used
+ -s, --silent Silent mode
+ --socks4 <host[:port]> SOCKS4 proxy on given host + port
+ --socks4a <host[:port]> SOCKS4a proxy on given host + port
+ --socks5 <host[:port]> SOCKS5 proxy on given host + port
+ --socks5-basic Enable username/password auth for SOCKS5 proxies
+ --socks5-gssapi Enable GSS-API auth for SOCKS5 proxies
+ --socks5-gssapi-nec Compatibility with NEC SOCKS5 server
+ --socks5-gssapi-service <name> SOCKS5 proxy service name for GSS-API
+ --socks5-hostname <host[:port]> SOCKS5 proxy, pass host name to proxy
+ -Y, --speed-limit <speed> Stop transfers slower than this
+ -y, --speed-time <seconds> Trigger 'speed-limit' abort after this time
+ --ssl Try SSL/TLS
+ --ssl-allow-beast Allow security flaw to improve interop
+ --ssl-no-revoke Disable cert revocation checks (Schannel)
+ --ssl-revoke-best-effort Ignore revocation offline or missing revocation list errors (Schannel)
+ --ssl-reqd Require SSL/TLS
+ -2, --sslv2 Use SSLv2
+ -3, --sslv3 Use SSLv3
+ --stderr Where to redirect stderr
+ --styled-output Enable styled output for HTTP headers
+ --suppress-connect-headers Suppress proxy CONNECT response headers
+ --tcp-fastopen Use TCP Fast Open
+ --tcp-nodelay Use the TCP_NODELAY option
+ -z, --time-cond <time> Transfer based on a time condition
+ --tls-max <VERSION> Set maximum allowed TLS version
+ --tls13-ciphers <list> TLS 1.3 ciphersuites (OpenSSL)
+ --tlsauthtype <type> TLS authentication type
+ --tlspassword TLS password
+ --tlsuser <name> TLS user name
+ -1, --tlsv1 Use TLSv1.0 or greater
+ --tlsv1.0 Use TLSv1.0 or greater
+ --tlsv1.1 Use TLSv1.1 or greater
+ --tlsv1.2 Use TLSv1.2 or greater
+ --tlsv1.3 Use TLSv1.3 or greater
+ --tr-encoding Request compressed transfer encoding
+ --trace <file> Write a debug trace to FILE
+ --trace-ascii <file> Like --trace, but without hex output
+ --trace-time Add time stamps to trace/verbose output
+ --unix-socket <path> Connect through this Unix domain socket
+ -T, --upload-file <file> Transfer local FILE to destination
+ --url <url> URL to work with
+ -u, --user <user:password> Server user and password
+ -A, --user-agent <name> Send User-Agent <name> to server
+ -v, --verbose Make the operation more talkative
+ -V, --version Show version number and quit
+ -w, --write-out <format> Use output FORMAT after completion
+ --xattr Store metadata in extended file attributes
+EOF
+ fi
+ exit 0
+ ;;
+
+ ftps-https)
+ if [[ -n "$VERBOSE" ]]; then
+ cat <<EOF
+Verbose help TBD
+EOF
+ else
+ cat <<EOF
+Usage: curl [options...] <url>
+These options affect ftps but not https:
+ -B, --use-ascii Use ASCII/text transfer
+ --ftp-account <data> Account data string
+ --ftp-alternative-to-user <command> String to replace USER [name]
+ --ftp-create-dirs Create the remote dirs if not present
+ --ftp-method <method> Control CWD usage
+ --ftp-pasv Use PASV/EPSV instead of PORT
+ --ftp-pret Send PRET before PASV
+ --ftp-skip-pasv-ip Skip the IP address for PASV
+ --ftp-ssl-ccc-mode <active/passive> Set CCC mode
+ --ftp-ssl-ccc Send CCC after authenticating
+ --ftp-ssl-control Require SSL/TLS for FTP login, clear for transfer
+ -l, --list-only List only mode
+ -P, --ftp-port <address> Use PORT instead of PASV
+ -Q, --quote Send command(s) to server before transfer
+EOF
+ fi
+ exit 0
+ ;;
+
+
+
+ output)
+ if [[ -n "$VERBOSE" ]]; then
+ cat <<EOF
+Usage: curl [options...] <url>
+These options are used to modify the local output of the transfer:
+
+--create-dirs
+ When used in conjunction with the -o, --output option, curl will
+ create the necessary local directory hierarchy as needed. This
+ option creates the dirs mentioned with the -o, --output option,
+ nothing else. If the --output file name uses no dir or if the
+ dirs it mentions already exist, no dir will be created.
+
+ To create remote directories when using FTP or SFTP, try --ftp-
+ create-dirs.
+
+-i, --include
+ Include the HTTP response headers in the output. The HTTP re‐
+ sponse headers can include things like server name, cookies,
+ date of the document, HTTP version and more...
+
+ To view the request headers, consider the -v, --verbose option.
+
+ See also -v, --verbose.
+
+-N, --no-buffer
+ Disables the buffering of the output stream. In normal work sit‐
+ uations, curl will use a standard buffered output stream that
+ will have the effect that it will output the data in chunks, not
+ necessarily exactly when the data arrives. Using this option
+ will disable that buffering.
+
+ Note that this is the negated option name documented. You can
+ thus use --buffer to enforce the buffering.
+
+-o, --output <file>
+ Write output to <file> instead of stdout. If you are using {} or
+ [] to fetch multiple documents, you can use '#' followed by a
+ number in the <file> specifier. That variable will be replaced
+ with the current string for the URL being fetched. Like in:
+
+ curl http://{one,two}.example.com -o "file_#1.txt"
+
+ or use several variables like:
+
+ curl http://{site,host}.host[1-5].com -o "#1_#2"
+
+ You may use this option as many times as the number of URLs you
+ have. For example, if you specify two URLs on the same command
+ line, you can use it like this:
+
+ curl -o aa example.com -o bb example.net
+
+ and the order of the -o options and the URLs doesn't matter,
+ just that the first -o is for the first URL and so on, so the
+ above command line can also be written as
+
+ curl example.com example.net -o aa -o bb
+
+ See also the --create-dirs option to create the local directo‐
+ ries dynamically. Specifying the output as '-' (a single dash)
+ will force the output to be done to stdout.
+
+ See also -O, --remote-name and --remote-name-all and -J, --re‐
+ mote-header-name.
+
+-J, --remote-header-name
+ (HTTP) This option tells the -O, --remote-name option to use the
+ server-specified Content-Disposition filename instead of ex‐
+ tracting a filename from the URL.
+
+ If the server specifies a file name and a file with that name
+ already exists in the current working directory it will not be
+ overwritten and an error will occur. If the server doesn't spec‐
+ ify a file name then this option has no effect.
+
+ There's no attempt to decode %-sequences (yet) in the provided
+ file name, so this option may provide you with rather unexpected
+ file names.
+
+ WARNING: Exercise judicious use of this option, especially on
+ Windows. A rogue server could send you the name of a DLL or
+ other file that could possibly be loaded automatically by Win‐
+ dows or some third party software.
+
+--remote-name-all
+ This option changes the default action for all given URLs to be
+ dealt with as if -O, --remote-name were used for each one. So if
+ you want to disable that for a specific URL after --remote-name-
+ all has been used, you must use "-o -" or --no-remote-name.
+
+ Added in 7.19.0.
+
+-O, --remote-name
+ Write output to a local file named like the remote file we get.
+ (Only the file part of the remote file is used, the path is cut
+ off.)
+
+ The file will be saved in the current working directory. If you
+ want the file saved in a different directory, make sure you
+ change the current working directory before invoking curl with
+ this option.
+
+ The remote file name to use for saving is extracted from the
+ given URL, nothing else, and if it already exists it will be
+ overwritten. If you want the server to be able to choose the
+ file name refer to -J, --remote-header-name which can be used in
+ addition to this option. If the server chooses a file name and
+ that name already exists it will not be overwritten.
+
+ There is no URL decoding done on the file name. If it has %20 or
+ other URL encoded parts of the name, they will end up as-is as
+ file name.
+
+ You may use this option as many times as the number of URLs you
+ have.
+
+-R, --remote-time
+ When used, this will make curl attempt to figure out the time‐
+ stamp of the remote file, and if that is available make the lo‐
+ cal file get that same timestamp.
+
+--stderr
+ Redirect all writes to stderr to the specified file instead. If
+ the file name is a plain '-', it is instead written to stdout.
+
+ If this option is used several times, the last one will be used.
+
+ See also -v, --verbose and -s, --silent.
+
+--styled-output
+ Enables the automatic use of bold font styles when writing HTTP
+ headers to the terminal. Use --no-styled-output to switch them
+ off.
+
+ Added in 7.61.0.
+
+--suppress-connect-headers
+ When -p, --proxytunnel is used and a CONNECT request is made
+ don't output proxy CONNECT response headers. This option is
+ meant to be used with -D, --dump-header or -i, --include which
+ are used to show protocol headers in the output. It has no ef‐
+ fect on debug options such as -v, --verbose or --trace, or any
+ statistics.
+
+ See also -D, --dump-header and -i, --include and -p, --proxytun‐
+ nel.
+
+-B, --use-ascii
+ (FTP LDAP) Enable ASCII transfer. For FTP, this can also be en‐
+ forced by using a URL that ends with ";type=A". This option
+ causes data sent to stdout to be in text mode for win32 systems.
+
+-v, --verbose
+ Makes curl verbose during the operation. Useful for debugging
+ and seeing what's going on "under the hood". A line starting
+ with '>' means "header data" sent by curl, '<' means "header
+ data" received by curl that is hidden in normal cases, and a
+ line starting with '*' means additional info provided by curl.
+
+ If you only want HTTP headers in the output, -i, --include might
+ be the option you're looking for.
+
+ If you think this option still doesn't give you enough details,
+ consider using --trace or --trace-ascii instead.
+
+ Use -s, --silent to make curl really quiet.
+
+ See also -i, --include. This option overrides --trace and
+ --trace-ascii.
+
+-V, --version
+ Displays information about curl and the libcurl version it uses.
+
+ The first line includes the full version of curl, libcurl and
+ other 3rd party libraries linked with the executable.
+
+ The second line (starts with "Protocols:") shows all protocols
+ that libcurl reports to support.
+
+ The third line (starts with "Features:") shows specific features
+ libcurl reports to offer. Available features include:
+
+ IPv6 You can use IPv6 with this.
+
+ krb4 Krb4 for FTP is supported.
+
+ SSL SSL versions of various protocols are supported, such as
+ HTTPS, FTPS, POP3S and so on.
+
+ libz Automatic decompression of compressed files over HTTP is
+ supported.
+
+ NTLM NTLM authentication is supported.
+
+ Debug This curl uses a libcurl built with Debug. This enables
+ more error-tracking and memory debugging etc. For curl-
+ developers only!
+
+ AsynchDNS
+ This curl uses asynchronous name resolves. Asynchronous
+ name resolves can be done using either the c-ares or the
+ threaded resolver backends.
+
+ SPNEGO SPNEGO authentication is supported.
+
+ Largefile
+ This curl supports transfers of large files, files larger
+ than 2GB.
+
+ IDN This curl supports IDN - international domain names.
+
+ GSS-API
+ GSS-API is supported.
+
+ SSPI SSPI is supported.
+
+ TLS-SRP
+ SRP (Secure Remote Password) authentication is supported
+ for TLS.
+
+ HTTP2 HTTP/2 support has been built-in.
+
+ UnixSockets
+ Unix sockets support is provided.
+
+ HTTPS-proxy
+ This curl is built to support HTTPS proxy.
+
+ Metalink
+ This curl supports Metalink (both version 3 and 4 (RFC
+ 5854)), which describes mirrors and hashes. curl will
+ use mirrors for failover if there are errors (such as the
+ file or server not being available).
+
+ PSL PSL is short for Public Suffix List and means that this
+ curl has been built with knowledge about "public suf‐
+ fixes".
+
+ MultiSSL
+ This curl supports multiple TLS backends.
+
+-w, --write-out <format>
+ Make curl display information on stdout after a completed trans‐
+ fer. The format is a string that may contain plain text mixed
+ with any number of variables. The format can be specified as a
+ literal "string", or you can have curl read the format from a
+ file with "@filename" and to tell curl to read the format from
+ stdin you write "@-".
+
+ The variables present in the output format will be substituted
+ by the value or text that curl thinks fit, as described below.
+ All variables are specified as %{variable_name} and to output a
+ normal % you just write them as %%. You can output a newline by
+ using \n, a carriage return with \r and a tab space with \t.
+
+ The output will be written to standard output, but this can be
+ switched to standard error by using %{stderr}.
+
+ NOTE: The %-symbol is a special symbol in the win32-environment,
+ where all occurrences of % must be doubled when using this op‐
+ tion.
+
+ The variables available are:
+
+ content_type The Content-Type of the requested document, if
+ there was any.
+
+ filename_effective
+ The ultimate filename that curl writes out to.
+ This is only meaningful if curl is told to write
+ to a file with the -O, --remote-name or -o,
+ --output option. It's most useful in combination
+ with the -J, --remote-header-name option. (Added
+ in 7.26.0)
+
+ ftp_entry_path The initial path curl ended up in when logging on
+ to the remote FTP server. (Added in 7.15.4)
+
+ http_code The numerical response code that was found in the
+ last retrieved HTTP(S) or FTP(s) transfer. In
+ 7.18.2 the alias response_code was added to show
+ the same info.
+
+ http_connect The numerical code that was found in the last re‐
+ sponse (from a proxy) to a curl CONNECT request.
+ (Added in 7.12.4)
+
+ http_version The http version that was effectively used.
+ (Added in 7.50.0)
+
+ local_ip The IP address of the local end of the most re‐
+ cently done connection - can be either IPv4 or
+ IPv6 (Added in 7.29.0)
+
+ local_port The local port number of the most recently done
+ connection (Added in 7.29.0)
+
+ num_connects Number of new connects made in the recent trans‐
+ fer. (Added in 7.12.3)
+
+ num_redirects Number of redirects that were followed in the re‐
+ quest. (Added in 7.12.3)
+
+ proxy_ssl_verify_result
+ The result of the HTTPS proxy's SSL peer certifi‐
+ cate verification that was requested. 0 means the
+ verification was successful. (Added in 7.52.0)
+
+ redirect_url When an HTTP request was made without -L, --loca‐
+ tion to follow redirects (or when --max-redir is
+ met), this variable will show the actual URL a
+ redirect would have gone to. (Added in 7.18.2)
+
+ remote_ip The remote IP address of the most recently done
+ connection - can be either IPv4 or IPv6 (Added in
+ 7.29.0)
+
+ remote_port The remote port number of the most recently done
+ connection (Added in 7.29.0)
+
+ scheme The URL scheme (sometimes called protocol) that
+ was effectively used (Added in 7.52.0)
+
+ size_download The total amount of bytes that were downloaded.
+
+ size_header The total amount of bytes of the downloaded head‐
+ ers.
+
+ size_request The total amount of bytes that were sent in the
+ HTTP request.
+
+ size_upload The total amount of bytes that were uploaded.
+
+ speed_download The average download speed that curl measured for
+ the complete download. Bytes per second.
+
+ speed_upload The average upload speed that curl measured for
+ the complete upload. Bytes per second.
+
+ ssl_verify_result
+ The result of the SSL peer certificate verifica‐
+ tion that was requested. 0 means the verification
+ was successful. (Added in 7.19.0)
+
+ stderr From this point on, the -w, --write-out output
+ will be written to standard error. (Added in
+ 7.63.0)
+
+ stdout From this point on, the -w, --write-out output
+ will be written to standard output. This is the
+ default, but can be used to switch back after
+ switching to stderr. (Added in 7.63.0)
+
+ time_appconnect
+ The time, in seconds, it took from the start un‐
+ til the SSL/SSH/etc connect/handshake to the re‐
+ mote host was completed. (Added in 7.19.0)
+
+ time_connect The time, in seconds, it took from the start un‐
+ til the TCP connect to the remote host (or proxy)
+ was completed.
+
+ time_namelookup
+ The time, in seconds, it took from the start un‐
+ til the name resolving was completed.
+
+ time_pretransfer
+ The time, in seconds, it took from the start un‐
+ til the file transfer was just about to begin.
+ This includes all pre-transfer commands and nego‐
+ tiations that are specific to the particular pro‐
+ tocol(s) involved.
+
+ time_redirect The time, in seconds, it took for all redirection
+ steps including name lookup, connect, pretransfer
+ and transfer before the final transaction was
+ started. time_redirect shows the complete execu‐
+ tion time for multiple redirections. (Added in
+ 7.12.3)
+
+ time_starttransfer
+ The time, in seconds, it took from the start un‐
+ til the first byte was just about to be trans‐
+ ferred. This includes time_pretransfer and also
+ the time the server needed to calculate the re‐
+ sult.
+
+ time_total The total time, in seconds, that the full opera‐
+ tion lasted.
+
+ url_effective The URL that was fetched last. This is most mean‐
+ ingful if you've told curl to follow location:
+ headers.
+
+ If this option is used several times, the last one will be used.
+
+--xattr
+ When saving output to a file, this option tells curl to store
+ certain file metadata in extended file attributes. Currently,
+ the URL is stored in the xdg.origin.url attribute and, for HTTP,
+ the content type is stored in the mime_type attribute. If the
+ file system does not support extended attributes, a warning is
+ issued.
+EOF
+ else
+ cat <<EOF
+Usage: curl [options...] <url>
+These options are used to modify the local output of the transfer:
+ --create-dirs Create necessary local directory hierarchy
+ -i, --include Include protocol response headers in the output
+ -N, --no-buffer Disable buffering of the output stream
+ -o, --output <file> Write to file instead of stdout
+ -J, --remote-header-name Use the header-provided filename
+ -O, --remote-name Write output to a file named as the remote file
+ --remote-name-all Use the remote file name for all URLs
+ -R, --remote-time Set the remote file's time on the local output
+ --stderr Where to redirect stderr
+ --styled-output Enable styled output for HTTP headers
+ --suppress-connect-headers Suppress proxy CONNECT response headers
+ -B, --use-ascii Use ASCII/text transfer
+ -v, --verbose Make the operation more talkative
+ -V, --version Show version number and quit
+ -w, --write-out <format> Use output FORMAT after completion
+ --xattr Store metadata in extended file attributes
+EOF
+ fi
+ exit 0
+ ;;
+
+
+
+ *)
+ cat <<EOF
+No help for --help-$HELP_TYPE. See --help for a help list.
+THIS IS A DEMONSTRATION PROGRAM. The only help options implemented are:
+--help
+--help-basic
+--help-all
+--help-debug
+--help-ftps
+--help-https
+--help-clientauth
+--help-output
+--help-serverauth
+
+as well as the set intersection ones (use both of these at the same time):
+--help-clientauth --help-ftps
+--help-ftps --no-help-https # i.e. ftp options that aren't also https options
+
+and this demo search queries:
+--help-search epsv
+--help-search sni
+
+as well as the verbose versions:
+--help -v
+--help-all -v
+--help-debug -v
+--help-output -v
+-v --help-search sni
+
+
+Note that some of the help options might not be entirely accurate; this is
+just intended as a quick demo.
+
+This was inspired by the talk "--help me if you can" given at curl://up 2020
+https://github.com/curl/curl-up/wiki/2020#recorded-talks
+EOF
+ exit 1
+ ;;
+ esac
+fi
+exec curl "$@"