diff --git a/.azure-pipelines.yml b/.azure-pipelines.yml new file mode 100644 index 00000000000000..9359dfa909dfc9 --- /dev/null +++ b/.azure-pipelines.yml @@ -0,0 +1,198 @@ +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 1998 - 2020, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.haxx.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +########################################################################### +# Starter pipeline +# Start with a minimal pipeline that you can customize to build and deploy your code. +# Add steps that build, run tests, deploy, and more: +# https://aka.ms/yaml + +trigger: + branches: + include: + - 'master' + - '*/ci' + +pr: + branches: + include: + - 'master' + +stages: + +########################################## +### Linux jobs first +########################################## + +- stage: linux + dependsOn: [] + jobs: + - job: ubuntu + # define defaults to make sure variables are always expanded/replaced + variables: + install: '' + configure: '' + tests: '' + timeoutInMinutes: 60 + pool: + vmImage: 'ubuntu-latest' + strategy: + matrix: + default: + name: default + configure: --enable-debug + disable_ipv6: + name: w/o IPv6 + configure: --disable-ipv6 + disable_http_smtp_imap: + name: w/o HTTP/SMTP/IMAP + configure: --disable-http --disable-smtp --disable-imap + disable_thredres: + name: sync resolver + configure: --disable-threaded-resolver + http_only: + name: HTTP only + configure: --disable-dict --disable-file --disable-ftp --disable-gopher --disable-imap --disable-ldap --disable-pop3 --disable-rtmp --disable-rtsp --disable-scp --disable-sftp --disable-smb --disable-smtp --disable-telnet --disable-tftp + torture: + name: torture + install: libnghttp2-dev + configure: --enable-debug --disable-shared --disable-threaded-resolver --enable-alt-svc + tests: -n -t --shallow=40 !FTP + steps: + - script: sudo apt-get update && sudo apt-get install -y stunnel4 python-impacket libzstd-dev libbrotli-dev $(install) + displayName: 'apt install' + + - script: ./buildconf && ./configure --enable-warnings --enable-werror $(configure) + displayName: 'configure $(name)' + + - script: make + displayName: 'make' + + - script: make test-nonflaky + displayName: 'test' + env: + AZURE_ACCESS_TOKEN: "$(System.AccessToken)" + TFLAGS: "-r $(tests)" + +########################################## +### Windows jobs below +########################################## + +- stage: windows + dependsOn: [] + variables: + agent.preferPowerShellOnContainers: true + jobs: + - job: windows + # define defaults to make sure variables are always expanded/replaced + variables: + container_img: '' + container_cmd: '' + configure: '' + tests: '' + timeoutInMinutes: 120 + pool: + vmImage: 'windows-2019' + strategy: + matrix: + msys2_mingw32_debug_openssl: + name: 32-bit OpenSSL and MQTT + container_img: mback2k/curl-docker-winbuildenv-msys2-mingw32:ltsc2019 + container_cmd: C:\msys64\usr\bin\sh + prepare: pacman -S --needed --noconfirm --noprogressbar libssh2-devel mingw-w64-i686-libssh2 + configure: --host=i686-w64-mingw32 --build=i686-w64-mingw32 --prefix=/mingw32 --enable-debug --enable-werror --with-libssh2 + tests: ~571 ~612 ~1056 ~1299 !SCP + msys2_mingw64_debug_openssl: + name: 64-bit OpenSSL and MQTT + container_img: mback2k/curl-docker-winbuildenv-msys2-mingw64:ltsc2019 + container_cmd: C:\msys64\usr\bin\sh + prepare: pacman -S --needed --noconfirm --noprogressbar libssh2-devel mingw-w64-x86_64-libssh2 + configure: --host=x86_64-w64-mingw32 --build=x86_64-w64-mingw32 --prefix=/mingw64 --enable-debug --enable-werror --with-libssh2 + tests: ~571 ~612 ~1056 ~1299 !SCP + msys1_mingw_debug_openssl: + name: 32-bit OpenSSL (legacy) + container_img: mback2k/curl-docker-winbuildenv-msys1-mingw:ltsc2019 + container_cmd: C:\MinGW\msys\1.0\bin\sh + configure: --host=i686-pc-mingw32 --build=i686-pc-mingw32 --prefix=/mingw --enable-debug + tests: ~203 ~1056 ~1143 + msys1_mingw32_debug_openssl: + name: 32-bit OpenSSL w/o zlib + container_img: mback2k/curl-docker-winbuildenv-msys1-mingw32:ltsc2019 + container_cmd: C:\MinGW\msys\1.0\bin\sh + configure: --host=i686-w64-mingw32 --build=i686-w64-mingw32 --prefix=/mingw32 --enable-debug --enable-werror --without-zlib + tests: ~203 ~1056 ~1143 ~1299 + msys1_mingw64_debug_openssl: + name: 64-bit OpenSSL w/o zlib + container_img: mback2k/curl-docker-winbuildenv-msys1-mingw64:ltsc2019 + container_cmd: C:\MinGW\msys\1.0\bin\sh + configure: --host=x86_64-w64-mingw32 --build=x86_64-w64-mingw32 --prefix=/mingw64 --enable-debug --enable-werror --without-zlib + tests: ~203 ~1056 ~1143 ~1299 + msys2_mingw32_debug_schannel: + name: 32-bit Schannel/SSPI/WinIDN + container_img: mback2k/curl-docker-winbuildenv-msys2-mingw32:ltsc2019 + container_cmd: C:\msys64\usr\bin\sh + prepare: pacman -S --needed --noconfirm --noprogressbar libssh2-devel mingw-w64-i686-libssh2 + configure: --host=i686-w64-mingw32 --build=i686-w64-mingw32 --prefix=/mingw32 --enable-debug --enable-werror --enable-sspi --without-ssl --with-schannel --with-winidn --with-libssh2 + tests: ~165 ~310 ~571 ~612 ~1056 ~1299 ~1448 ~2034 ~2037 ~2041 ~2046 ~2047 ~3000 ~3001 !SCP + msys2_mingw64_debug_schannel: + name: 64-bit Schannel/SSPI/WinIDN + container_img: mback2k/curl-docker-winbuildenv-msys2-mingw64:ltsc2019 + container_cmd: C:\msys64\usr\bin\sh + prepare: pacman -S --needed --noconfirm --noprogressbar libssh2-devel mingw-w64-x86_64-libssh2 + configure: --host=x86_64-w64-mingw32 --build=x86_64-w64-mingw32 --prefix=/mingw64 --enable-debug --enable-werror --enable-sspi --without-ssl --with-schannel --with-winidn --with-libssh2 + tests: ~165 ~310 ~571 ~612 ~1056 ~1299 ~1448 ~2034 ~2037 ~2041 ~2046 ~2047 ~3000 ~3001 !SCP + msys1_mingw_debug_schannel: + name: 32-bit Schannel/SSPI/WinIDN (legacy) + container_img: mback2k/curl-docker-winbuildenv-msys1-mingw:ltsc2019 + container_cmd: C:\MinGW\msys\1.0\bin\sh + configure: --host=i686-pc-mingw32 --build=i686-pc-mingw32 --prefix=/mingw --enable-debug --enable-sspi --without-ssl --with-schannel --with-winidn + tests: ~203 ~305 ~310 ~311 ~312 ~313 ~404 ~1056 ~1143 ~2034 ~2035 ~2037 ~2038 ~2041 ~2042 ~2048 ~3000 ~3001 + msys1_mingw32_debug_schannel: + name: 32-bit Schannel/SSPI/WinIDN w/o zlib + container_img: mback2k/curl-docker-winbuildenv-msys1-mingw32:ltsc2019 + container_cmd: C:\MinGW\msys\1.0\bin\sh + configure: --host=i686-w64-mingw32 --build=i686-w64-mingw32 --prefix=/mingw32 --enable-debug --enable-werror --enable-sspi --without-ssl --with-schannel --with-winidn --without-zlib + tests: ~203 ~310 ~1056 ~1143 ~1299 ~2034 ~2037 ~2041 ~3000 ~3001 + msys1_mingw64_debug_schannel: + name: 64-bit Schannel/SSPI/WinIDN w/o zlib + container_img: mback2k/curl-docker-winbuildenv-msys1-mingw64:ltsc2019 + container_cmd: C:\MinGW\msys\1.0\bin\sh + configure: --host=x86_64-w64-mingw32 --build=x86_64-w64-mingw32 --prefix=/mingw64 --enable-debug --enable-werror --enable-sspi --without-ssl --with-schannel --with-winidn --without-zlib + tests: ~203 ~310 ~1056 ~1143 ~1299 ~2034 ~2037 ~2041 ~3000 ~3001 + container: + image: $(container_img) + env: + MSYS2_PATH_TYPE: inherit + steps: + - script: $(container_cmd) -l -c "cd $(echo '%cd%') && $(prepare)" + displayName: 'prepare' + condition: variables.prepare + + - script: $(container_cmd) -l -c "cd $(echo '%cd%') && ./buildconf && ./configure $(configure)" + displayName: 'configure $(name)' + + - script: $(container_cmd) -l -c "cd $(echo '%cd%') && make" + displayName: 'make' + + - script: $(container_cmd) -l -c "cd $(echo '%cd%') && make test-nonflaky" + displayName: 'test' + env: + AZURE_ACCESS_TOKEN: "$(System.AccessToken)" + TFLAGS: "-r $(tests)" diff --git a/.cirrus.yml b/.cirrus.yml index 21d7b62abb0d83..823c9f5770d55e 100644 --- a/.cirrus.yml +++ b/.cirrus.yml @@ -1,3 +1,24 @@ +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 1998 - 2020, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.haxx.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +########################################################################### # Cirrus CI configuration # https://cirrus-ci.com/github/curl/curl @@ -5,32 +26,52 @@ task: name: FreeBSD freebsd_instance: matrix: - image: freebsd-12-0-release-amd64 - image: freebsd-11-2-release-amd64 - image: freebsd-10-4-release-amd64 + # A stable 13.0 image likely won't be available before early 2021 + # image_family: freebsd-13-0-snap + image_family: freebsd-12-1 + image_family: freebsd-11-4 env: - CIRRUS_CLONE_DEPTH: 1 + CIRRUS_CLONE_DEPTH: 10 MAKE_FLAGS: -j 2 pkginstall_script: + - pkg update -f - pkg install -y autoconf automake libtool pkgconf brotli openldap-client heimdal libpsl libmetalink libssh2 openssh-portable libidn2 librtmp libnghttp2 nghttp2 stunnel + - case `python -V` in + Python?3.7*) pkg install -y py37-impacket ;; + Python?2.7*) pkg install -y py27-impacket ;; + esac - pkg delete -y curl configure_script: - ./buildconf + - case `uname -r` in + 12.1*) + export CC=clang; + export CFLAGS="-fsanitize=address,undefined,signed-integer-overflow -fno-sanitize-recover=undefined,integer -Wformat -Werror=format-security -Werror=array-bounds -g"; + export CXXFLAGS="-fsanitize=address,undefined -fno-sanitize-recover=undefined,integer -Wformat -Werror=format-security -Werror=array-bounds -g"; + export LDFLAGS="-fsanitize=address,undefined -fno-sanitize-recover=undefined,integer" ;; + esac - ./configure --prefix="${HOME}"/install --enable-debug --with-libssh2 --with-brotli --with-gssapi --with-libidn2 --enable-manual --enable-ldap --enable-ldaps --with-librtmp --with-libmetalink --with-libpsl --with-nghttp2 || { tail -300 config.log; false; } compile_script: - make V=1 test_script: + # blackhole? + - sysctl net.inet.tcp.blackhole + # make sure we don't run blackhole != 0 + - sudo sysctl net.inet.tcp.blackhole=0 # Some tests won't run if run as root so run them as another user. # Make directories world writable so the test step can write wherever it needs. - find . -type d -exec chmod 777 {} \; # TODO: A number of tests are failing on different FreeBSD versions and so # are disabled. This should be investigated. - SKIP_TESTS='' - - if [ `uname -r` = "12.0-RELEASE" ] ; then SKIP_TESTS='!303 !304 !323 !504 !1242 !1243 !2002 !2003'; fi - - if [ `uname -r` = "11.2-RELEASE" ] ; then SKIP_TESTS='!303 !304 !310 !311 !312 !313 !504 !1082 !1242 !1243 !2002 !2003 !2034 !2035 !2037 !2038 !2041 !2042 !2048 !3000 !3001'; fi - - if [ `uname -r` = "10.4-RELEASE" ] ; then SKIP_TESTS='!303 !304 !310 !311 !312 !313 !504 !1082 !1242 !1243 !2002 !2003 !2034 !2035 !2037 !2038 !2041 !2042 !2048 !3000 !3001'; fi + - uname -r + - case `uname -r` in + 13.0*) SKIP_TESTS='!SFTP !SCP';; + 12.1*) SKIP_TESTS='!SFTP !SCP';; + 11.*) SKIP_TESTS='!SFTP !SCP';; + esac - sudo -u nobody make V=1 TFLAGS="-n -a -p !flaky ${SKIP_TESTS}" test-nonflaky install_script: - make V=1 install diff --git a/.dcignore b/.dcignore new file mode 100644 index 00000000000000..73b1e71692a4a0 --- /dev/null +++ b/.dcignore @@ -0,0 +1,3 @@ +tests/** +docs/** +docs/examples/** diff --git a/.dir-locals.el b/.dir-locals.el index ed91b128c55e7f..7c209357db072a 100644 --- a/.dir-locals.el +++ b/.dir-locals.el @@ -1,3 +1,24 @@ +;;;*************************************************************************** +;;; _ _ ____ _ +;;; Project ___| | | | _ \| | +;;; / __| | | | |_) | | +;;; | (__| |_| | _ <| |___ +;;; \___|\___/|_| \_\_____| +;;; +;;; Copyright (C) 1998 - 2020, Daniel Stenberg, , et al. +;;; +;;; This software is licensed as described in the file COPYING, which +;;; you should have received as part of this distribution. The terms +;;; are also available at https://curl.haxx.se/docs/copyright.html. +;;; +;;; You may opt to use, copy, modify, merge, publish, distribute and/or sell +;;; copies of the Software, and permit persons to whom the Software is +;;; furnished to do so, under the terms of the COPYING file. +;;; +;;; This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +;;; KIND, either express or implied. +;;; +;;;*************************************************************************** ;;; Directory Local Variables ;;; See Info node `(emacs) Directory Variables' for more information. diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml new file mode 100644 index 00000000000000..fb9cb5b485de5f --- /dev/null +++ b/.github/FUNDING.yml @@ -0,0 +1 @@ +open_collective: curl diff --git a/.github/ISSUE_TEMPLATE b/.github/ISSUE_TEMPLATE deleted file mode 100644 index a705e79e518d41..00000000000000 --- a/.github/ISSUE_TEMPLATE +++ /dev/null @@ -1,16 +0,0 @@ - - -### I did this - -### I expected the following - -### curl/libcurl version - -[curl -V output] - -### operating system diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 00000000000000..65069d5cf8355e --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,28 @@ +--- +name: Bug report +about: Create a report to help us improve +title: '' +labels: '' +assignees: '' + +--- + + + +### I did this + +### I expected the following + +### curl/libcurl version + +[curl -V output] + +### operating system + + diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 00000000000000..084a84ea177ceb --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,11 @@ +blank_issues_enabled: false +contact_links: + - name: Feature request + url: https://curl.haxx.se/mail/ + about: To propose new features or enhancements, please bring that discussion to a suitable curl mailing list. + - name: Question + url: https://curl.haxx.se/mail/ + about: Questions should go to the mailing list + - name: Commercial support + url: https://curl.haxx.se/support.html + about: Several companies are offering paid support for curl/libcurl diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml new file mode 100644 index 00000000000000..fbdcf8a26f1893 --- /dev/null +++ b/.github/workflows/codeql-analysis.yml @@ -0,0 +1,55 @@ +name: CI + +on: + # Trigger the workflow on push or pull requests, but only for the + # master branch + push: + branches: + - master + - '*/ci' + pull_request: + branches: + - master + schedule: + - cron: '0 0 * * 4' + +jobs: + codeql: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v2 + with: + # We must fetch at least the immediate parents so that if this is + # a pull request then we can checkout the head. + fetch-depth: 2 + + # If this run was triggered by a pull request event, then checkout + # the head of the pull request instead of the merge commit. + - run: git checkout HEAD^2 + if: ${{ github.event_name == 'pull_request' }} + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v1 + with: + languages: cpp + + # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). + # If this step fails, then you should remove it and run the build manually (see below) + - name: Autobuild + uses: github/codeql-action/autobuild@v1 + + # ℹ️ Command-line programs to run using the OS shell. + # 📚 https://git.io/JvXDl + + # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines + # and modify them (or add more) to build your code if your project + # uses a compiled language + + #- run: | + # make bootstrap + # make release + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v1 diff --git a/.github/workflows/fuzz.yml b/.github/workflows/fuzz.yml new file mode 100644 index 00000000000000..f46fa6c4e536fe --- /dev/null +++ b/.github/workflows/fuzz.yml @@ -0,0 +1,36 @@ +name: CI + +on: + # Trigger the workflow on push or pull requests, but only for the + # master branch + push: + branches: + - master + - '*/ci' + pull_request: + branches: + - master + +jobs: + fuzzing: + runs-on: ubuntu-latest + steps: + - name: Build Fuzzers + uses: google/oss-fuzz/infra/cifuzz/actions/build_fuzzers@master + with: + oss-fuzz-project-name: 'curl' + dry-run: false + + - name: Run Fuzzers + uses: google/oss-fuzz/infra/cifuzz/actions/run_fuzzers@master + with: + oss-fuzz-project-name: 'curl' + fuzz-seconds: 2400 + dry-run: false + + - name: Upload Crash + uses: actions/upload-artifact@v1 + if: failure() + with: + name: artifacts + path: ./out/artifacts diff --git a/.github/workflows/macos.yml b/.github/workflows/macos.yml new file mode 100644 index 00000000000000..5d8f376bc95e16 --- /dev/null +++ b/.github/workflows/macos.yml @@ -0,0 +1,122 @@ +name: CI + +on: + # Trigger the workflow on push or pull requests, but only for the + # master branch + push: + branches: + - master + - '*/ci' + pull_request: + branches: + - master + +jobs: + autotools: + name: macos ${{ matrix.build.name }} + runs-on: 'macos-latest' + strategy: + fail-fast: false + matrix: + build: + - name: normal + install: nghttp2 + macosx-version-min: 10.9 + - name: debug + install: nghttp2 + configure: --enable-debug --enable-mqtt + macosx-version-min: 10.9 + - name: libssh2 + install: nghttp2 libssh2 + configure: --enable-debug --with-libssh2 + macosx-version-min: 10.9 + - name: c-ares + install: nghttp2 + configure: --enable-debug --enable-ares + macosx-version-min: 10.9 + - name: HTTP only + install: nghttp2 + configure: --enable-debug --enable-maintainer-mode --disable-dict --disable-file --disable-ftp --disable-gopher --disable-imap --disable-ldap --disable-pop3 --disable-rtmp --disable-rtsp --disable-scp --disable-sftp --disable-smb --disable-smtp --disable-telnet --disable-tftp --disable-unix-sockets --disable-shared --without-brotli --without-gssapi --without-libidn2 --without-libmetalink --without-libpsl --without-librtmp --without-libssh2 --without-nghttp2 --without-ntlm-auth --without-ssl --without-zlib + macosx-version-min: 10.15 + - name: SecureTransport metalink + install: nghttp2 openssl libmetalink + configure: --enable-debug --without-ssl --with-secure-transport --with-libmetalink + macosx-version-min: 10.8 + - name: OpenSSL metalink + install: nghttp2 openssl libmetalink + configure: --enable-debug --with-ssl=/usr/local/opt/openssl --with-libmetalink + macosx-version-min: 10.9 + - name: LibreSSL metalink + install: nghttp2 libressl libmetalink + configure: --enable-debug --with-ssl=/usr/local/opt/libressl --with-libmetalink + macosx-version-min: 10.9 + - name: torture + install: nghttp2 openssl + configure: --enable-debug --disable-shared --disable-threaded-resolver --enable-alt-svc + tflags: -n -t --shallow=25 !FTP + macosx-version-min: 10.9 + - name: macOS 10.15 + install: nghttp2 libmetalink libssh2 + configure: --enable-debug --disable-ldap + macosx-version-min: 10.15 + steps: + - run: echo libtool autoconf automake pkg-config ${{ matrix.build.install }} | xargs -Ix -n1 echo brew '"x"' > /tmp/Brewfile + name: 'brew bundle' + + - run: brew update && brew bundle install --no-lock --file /tmp/Brewfile + name: 'brew install' + + - uses: actions/checkout@v2 + + - run: ./buildconf && ./configure --enable-warnings --enable-werror ${{ matrix.build.configure }} + name: 'configure' + env: + # -Wvla is caused by brotli + CFLAGS: "-Wno-vla -mmacosx-version-min=${{ matrix.build.macosx-version-min }}" + + - run: make + name: 'make' + + - run: make test-nonflaky + name: 'test' + env: + TFLAGS: "${{ matrix.build.tflags }} ~1452" + + cmake: + name: macos cmake ${{ matrix.compiler.CC }} ${{ matrix.build.name }} + runs-on: 'macos-latest' + env: ${{ matrix.compiler }} + strategy: + fail-fast: false + matrix: + compiler: + - CC: clang + CXX: clang++ + CFLAGS: "-mmacosx-version-min=10.15" + - CC: gcc-8 + CXX: g++-8 + CFLAGS: "-mmacosx-version-min=10.15 -Wno-error=undef -Wno-error=conversion" + - CC: gcc-9 + CXX: g++-9 + CFLAGS: "-mmacosx-version-min=10.15 -Wno-error=undef -Wno-error=conversion" + build: + - name: OpenSSL + install: nghttp2 openssl + generate: -DOPENSSL_ROOT_DIR=/usr/local/opt/openssl -DCURL_DISABLE_LDAP=ON -DCURL_DISABLE_LDAPS=ON + - name: LibreSSL + install: nghttp2 libressl + generate: -DOPENSSL_ROOT_DIR=/usr/local/opt/libressl -DCURL_DISABLE_LDAP=ON -DCURL_DISABLE_LDAPS=ON + steps: + - run: echo libtool autoconf automake pkg-config ${{ matrix.build.install }} | xargs -Ix -n1 echo brew '"x"' > /tmp/Brewfile + name: 'brew bundle' + + - run: brew update && brew bundle install --no-lock --file /tmp/Brewfile + name: 'brew install' + + - uses: actions/checkout@v2 + + - run: cmake -H. -Bbuild -DCURL_WERROR=ON -DPICKY_COMPILER=ON ${{ matrix.build.generate }} + name: 'cmake generate' + + - run: cmake --build build + name: 'cmake build' diff --git a/.gitignore b/.gitignore index 9b040fea07449a..e35747146de00c 100644 --- a/.gitignore +++ b/.gitignore @@ -20,6 +20,7 @@ /.vs /build/ /builds/ +/stats/ __pycache__ CHANGES.dist Debug @@ -57,4 +58,3 @@ scripts/curl.fish curl_fuzzer curl_fuzzer_seed_corpus.zip libstandaloneengine.a -.checksrc diff --git a/.lgtm.yml b/.lgtm.yml index bb6945f0fc71c5..4063cd3a78552a 100644 --- a/.lgtm.yml +++ b/.lgtm.yml @@ -1,3 +1,24 @@ +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 1998 - 2020, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.haxx.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +########################################################################### extraction: cpp: prepare: @@ -7,4 +28,4 @@ extraction: - rm -f CMakeLists.txt - ./buildconf configure: # enable as many optional features as possible - command: ./configure --enable-ares --with-libssh2 --with-gssapi --with-librtmp --with-libmetalink --with-libmetalink + command: ./configure --enable-ares --with-libssh2 --with-gssapi --with-librtmp --with-libmetalink diff --git a/.mailmap b/.mailmap index cc86d0032aaea9..b5e6a6538b9c7e 100644 --- a/.mailmap +++ b/.mailmap @@ -46,3 +46,29 @@ Nick Zitzmann Kees Dekker Max Savenkov Daniel Jelinski <30433125+djelinski@users.noreply.github.com> +Amit Katyal +Giorgos Oikonomou +Evgeny Grin +Peter Pih +Anton Malov +Marquis de Muesli +Kyohei Kadota +Lucas Pardue +Massimiliano Fantuzzi +Niall O'Reilly +Mohammad Hasbini +Andrew Ishchuk +Nicolas Guillier <59726521+nicoguillier@users.noreply.github.com> +Julian Z +Jessa Chandler +Gökhan Şengün +Svyatoslav Mishyn +Douglas Steinwand +James Fuller +Don J Olmstead +Nicolas Sterchele +Sergey Raevskiy +SecuritySense on github +Mipsters on github +Pavel Novikov +apique13 on github diff --git a/.muse/config.toml b/.muse/config.toml new file mode 100644 index 00000000000000..4e5b5cc63bdea7 --- /dev/null +++ b/.muse/config.toml @@ -0,0 +1,3 @@ +ignore = [ "DEAD_STORE" ] +build = "make" +setup = ".muse/setup.sh" diff --git a/.muse/setup.sh b/.muse/setup.sh new file mode 100755 index 00000000000000..55872d594b9955 --- /dev/null +++ b/.muse/setup.sh @@ -0,0 +1,4 @@ +#!/usr/bin/env bash +./buildconf +./configure +echo "Ran the setup script for muse including autoconf and executing ./configure" diff --git a/.travis-iconv-env.sh b/.travis-iconv-env.sh deleted file mode 100644 index bb7dcf422f5021..00000000000000 --- a/.travis-iconv-env.sh +++ /dev/null @@ -1 +0,0 @@ -export CPPFLAGS="-DCURL_DOES_CONVERSIONS -DHAVE_ICONV -DCURL_ICONV_CODESET_OF_HOST='\"ISO8859-1\"'" diff --git a/.travis.yml b/.travis.yml index ed18f33846db6c..2832a9cac4596b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,558 +1,422 @@ +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 1998 - 2020, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.haxx.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +########################################################################### language: c -sudo: required +os: linux +dist: bionic cache: - directories: - - $HOME/libpsl-0.20.1 - - $HOME/mbedtls-mbedtls-2.8.0 - - $HOME/libidn2-2.0.4 - - $HOME/wolfssl-4.0.0-stable - - $HOME/nghttp2-1.34.0 + directories: + - $HOME/wolfssl-4.4.0-stable + - $HOME/mesalink-1.0.0 + - $HOME/nghttp2-1.39.2 env: - global: - - LD_LIBRARY_PATH=/usr/local/lib + global: + - LD_LIBRARY_PATH=/usr/local/lib addons: - apt: - config: - retries: true - sources: &common_sources - - ubuntu-toolchain-r-test - packages: &common_packages - - cmake - - gcc-8 - - valgrind - - libev-dev - - libc-ares-dev - - g++-8 - - libstdc++-8-dev - - stunnel4 - - libidn2-0-dev - - autopoint # for libpsl that needs autoreconf that uses gettext that needs it - - libunistring-dev # for libidn2 needed by libpsl - - gnutls-bin + apt: &common_apt + config: + retries: true + packages: &common_packages + - cmake + - valgrind + - libev-dev + - libc-ares-dev + - g++-8 + - stunnel4 + - libidn2-dev + - gnutls-bin + - python-impacket -matrix: - include: - - os: linux - compiler: gcc - dist: trusty - env: - - T=normal C="--with-gssapi --with-libssh2" CHECKSRC=1 - - OVERRIDE_CC="CC=gcc-8" OVERRIDE_CXX="CXX=g++-8" - addons: - apt: - sources: - - *common_sources - packages: - - *common_packages - - krb5-user - - libssh2-1-dev - - os: linux - compiler: gcc - dist: trusty - env: - - T=normal C=--with-libssh - - OVERRIDE_CC="CC=gcc-8" OVERRIDE_CXX="CXX=g++-8" - addons: - apt: - sources: - - *common_sources - packages: - - *common_packages - - libssh-dev - - os: linux - compiler: gcc - dist: trusty - env: - - T=normal C="--disable-http --disable-smtp --disable-imap" - - OVERRIDE_CC="CC=gcc-8" OVERRIDE_CXX="CXX=g++-8" - - os: linux - compiler: gcc - dist: trusty - env: - - T=normal C="--enable-ares" - - OVERRIDE_CC="CC=gcc-8" OVERRIDE_CXX="CXX=g++-8" - - os: linux - compiler: gcc - dist: trusty - env: - - T=normal C="--disable-verbose" CPPFLAGS="-Wno-variadic-macros" NOTESTS=1 - - OVERRIDE_CC="CC=gcc-8" OVERRIDE_CXX="CXX=g++-8" - - os: linux - compiler: gcc - dist: trusty - env: - - T=normal BROTLI=yes - - OVERRIDE_CC="CC=gcc-8" OVERRIDE_CXX="CXX=g++-8" - - os: linux - compiler: gcc - dist: trusty - env: - - T=novalgrind BORINGSSL=yes C="--with-ssl=$HOME/boringssl" LD_LIBRARY_PATH=/home/travis/boringssl/lib:/usr/local/lib - - OVERRIDE_CC="CC=gcc-8" OVERRIDE_CXX="CXX=g++-8" - - os: linux - compiler: gcc - dist: trusty - env: - - T=debug-wolfssl C="--with-wolfssl --without-ssl" - - OVERRIDE_CC="CC=gcc-8" OVERRIDE_CXX="CXX=g++-8" - - os: linux - compiler: clang - dist: trusty - env: - - T=debug - - OVERRIDE_CC="CC=clang-7" OVERRIDE_CXX="CXX=clang++-7" - addons: - apt: - sources: - - *common_sources - - llvm-toolchain-trusty-7 - packages: - - *common_packages - - clang-7 - - os: linux - compiler: clang - dist: trusty - env: - - T=debug C="--enable-alt-svc" - - OVERRIDE_CC="CC=clang-7" OVERRIDE_CXX="CXX=clang++-7" - addons: - apt: - sources: - - *common_sources - - llvm-toolchain-trusty-7 - packages: - - *common_packages - - clang-7 - - os: linux - compiler: clang - dist: trusty - env: - - T=debug C="--with-mbedtls --without-ssl" - - OVERRIDE_CC="CC=clang-7" OVERRIDE_CXX="CXX=clang++-7" - addons: - apt: - sources: - - *common_sources - - llvm-toolchain-trusty-7 - packages: - - *common_packages - - clang-7 - - os: linux - compiler: clang - dist: trusty - env: - - T=debug C="--with-gnutls --without-ssl" - - OVERRIDE_CC="CC=clang-7" OVERRIDE_CXX="CXX=clang++-7" - addons: - apt: - sources: - - *common_sources - - llvm-toolchain-trusty-7 - packages: - - *common_packages - - clang-7 - - libgnutls28-dev - - os: linux - compiler: clang - dist: trusty - env: - - T=debug C="--disable-threaded-resolver" - - OVERRIDE_CC="CC=clang-7" OVERRIDE_CXX="CXX=clang++-7" - addons: - apt: - sources: - - *common_sources - - llvm-toolchain-trusty-7 - packages: - - *common_packages - - clang-7 - - os: linux - compiler: clang - dist: trusty - env: - - T=debug C="--with-nss --without-ssl" NOTESTS=1 CPPFLAGS="-isystem /usr/include/nss" - - OVERRIDE_CC="CC=clang-7" OVERRIDE_CXX="CXX=clang++-7" - addons: - apt: - sources: - - *common_sources - - llvm-toolchain-trusty-7 - packages: - - *common_packages - - clang-7 - - libnss3-dev - - os: linux - compiler: gcc - dist: trusty - env: - - T=iconv - - OVERRIDE_CC="CC=gcc-8" OVERRIDE_CXX="CXX=g++-8" - - os: osx - compiler: gcc - env: T=debug C=--with-libssh2 - - os: osx - compiler: gcc - env: T=debug C=--enable-ares - - os: osx - compiler: gcc - env: T=debug C="--with-ssl=/usr/local/opt/openssl --with-libmetalink" - - os: osx - compiler: gcc - env: T=debug C="--with-ssl=/usr/local/opt/libressl --with-libmetalink" - - os: osx - compiler: clang - osx_image: xcode10 - env: T=debug C="--without-ssl --with-darwinssl --with-libmetalink" - - os: osx - compiler: clang - env: T=normal - - os: osx - compiler: clang - env: T=cmake - - os: linux - compiler: gcc - dist: trusty - env: - - T=cmake - - OVERRIDE_CC="CC=gcc-8" OVERRIDE_CXX="CXX=g++-8" - - os: linux - compiler: clang - dist: trusty - env: - - T=cmake - - OVERRIDE_CC="CC=clang-7" OVERRIDE_CXX="CXX=clang++-7" - addons: - apt: - sources: - - *common_sources - - llvm-toolchain-trusty-7 - packages: - - *common_packages - - clang-7 - - os: linux - compiler: gcc - dist: trusty - env: - - T=coverage - - OVERRIDE_CC="CC=gcc-8" OVERRIDE_CXX="CXX=g++-8" - addons: - apt: - sources: - - *common_sources - packages: - - *common_packages - - lcov - - os: linux - compiler: gcc - dist: trusty - env: - - T=distcheck - - OVERRIDE_CC="CC=gcc-8" OVERRIDE_CXX="CXX=g++-8" - - os: linux - compiler: clang - dist: trusty - env: - - T=fuzzer - - OVERRIDE_CC="CC=clang-7" OVERRIDE_CXX="CXX=clang++-7" - addons: - apt: - sources: - - *common_sources - - llvm-toolchain-trusty-7 - packages: - - *common_packages - - clang-7 - - os: linux - compiler: clang - dist: trusty - env: - - T=tidy - - OVERRIDE_CC="CC=clang-7" OVERRIDE_CXX="CXX=clang++-7" - addons: - apt: - sources: - - *common_sources - - llvm-toolchain-trusty-7 - packages: - - *common_packages - - clang-7 - - clang-tidy-7 - - os: linux - compiler: clang - dist: trusty - env: - - T=scan-build - - OVERRIDE_CC="CC=clang-7" OVERRIDE_CXX="CXX=clang++-7" - addons: - apt: - sources: - - *common_sources - - llvm-toolchain-trusty-7 - packages: - - *common_packages - - clang-7 - - os: linux - compiler: clang - dist: trusty - env: - - T=debug CFLAGS="-fsanitize=address,undefined,signed-integer-overflow -fno-sanitize-recover=undefined,integer -Wformat -Werror=format-security -Werror=array-bounds -g" LDFLAGS="-fsanitize=address,undefined -fno-sanitize-recover=undefined,integer" LIBS="-ldl -lubsan" - - OVERRIDE_CC="CC=clang-7" OVERRIDE_CXX="CXX=clang++-7" - addons: - apt: - sources: - - *common_sources - - llvm-toolchain-trusty-7 - packages: - - *common_packages - - clang-7 +jobs: + include: + - env: + - T=normal C="--with-gssapi --with-libssh2" CHECKSRC=1 + - OVERRIDE_CC="CC=gcc-8" OVERRIDE_CXX="CXX=g++-8" + addons: + apt: + <<: *common_apt + packages: + - *common_packages + - krb5-user + - libssh2-1-dev + - env: + - T=normal C=--with-libssh + # Avoid bionic, its pre-release libssh version triggers deprecation warnings. + dist: focal + addons: + apt: + <<: *common_apt + packages: + - cmake + - valgrind + - libev-dev + - libc-ares-dev + - g++-8 + - stunnel4 + - libidn2-dev + - gnutls-bin + # The above list is common_packages minus impacket. + - libssh-dev + - env: + - T=normal C="--enable-ares" + - OVERRIDE_CC="CC=gcc-8" OVERRIDE_CXX="CXX=g++-8" + - env: + - T=normal C="--disable-proxy" + - env: + - T=normal C="--disable-verbose" CPPFLAGS="-Wno-variadic-macros" NOTESTS=1 + - OVERRIDE_CC="CC=gcc-8" OVERRIDE_CXX="CXX=g++-8" + addons: + apt: + <<: *common_apt + packages: + - *common_packages + - libpsl-dev + - libbrotli-dev + - libzstd-dev + - env: + - T=novalgrind BORINGSSL=yes C="--with-ssl=$HOME/boringssl" LD_LIBRARY_PATH=/home/travis/boringssl/lib:/usr/local/lib + - OVERRIDE_CC="CC=gcc-8" OVERRIDE_CXX="CXX=g++-8" + before_install: + - eval "$(gimme stable)"; gimme --list # Install latest Go (for boringssl) + - env: + - T=novalgrind QUICHE="yes" C="--with-ssl=$HOME/quiche/deps/boringssl/src --with-quiche=$HOME/quiche/target/release --enable-alt-svc" LD_LIBRARY_PATH=$HOME/quiche/target/release:/usr/local/lib + - OVERRIDE_CC="CC=gcc-8" OVERRIDE_CXX="CXX=g++-8" + addons: + apt: + <<: *common_apt + packages: + - *common_packages + - libpsl-dev + - libbrotli-dev + - libzstd-dev + - env: + - T=novalgrind LIBRESSL=yes C="--with-ssl=$HOME/libressl" LD_LIBRARY_PATH=/home/travis/libressl/lib:/usr/local/lib + - OVERRIDE_CC="CC=gcc-8" OVERRIDE_CXX="CXX=g++-8" + - env: + - T=novalgrind NGTCP2=yes C="--with-ssl=$HOME/ngbuild --with-ngtcp2=$HOME/ngbuild --with-nghttp3=$HOME/ngbuild --enable-alt-svc" NOTESTS= + - OVERRIDE_CC="CC=gcc-8" OVERRIDE_CXX="CXX=g++-8" + addons: + apt: + <<: *common_apt + packages: + - *common_packages + - libpsl-dev + - libbrotli-dev + - libzstd-dev + - env: + - T=novalgrind NGTCP2=yes GNUTLS=yes C="PKG_CONFIG_PATH=$HOME/ngbuild --without-ssl --with-gnutls=$HOME/ngbuild --with-ngtcp2=$HOME/ngbuild --with-nghttp3=$HOME/ngbuild --enable-alt-svc" NOTESTS= + - OVERRIDE_CC="CC=gcc-8" OVERRIDE_CXX="CXX=g++-8" + addons: + apt: + <<: *common_apt + packages: + - *common_packages + - libpsl-dev + - libbrotli-dev + - libzstd-dev + - autogen + - automake + - autopoint + - bison + - gperf + - libgmp-dev + - libopts25-dev + - libp11-kit-dev + - libtasn1-6-dev + - nettle-dev + - env: + - T=debug-wolfssl C="--with-wolfssl --without-ssl" + - OVERRIDE_CC="CC=gcc-8" OVERRIDE_CXX="CXX=g++-8" + addons: + apt: + <<: *common_apt + packages: + - *common_packages + - libpsl-dev + - libbrotli-dev + - libzstd-dev + - env: + - T=debug OPENSSL3="yes" C="--with-ssl=$HOME/openssl3" LD_LIBRARY_PATH=/home/travis/openssl3/lib:/usr/local/lib + - OVERRIDE_CC="CC=gcc-8" OVERRIDE_CXX="CXX=g++-8" + addons: + apt: + <<: *common_apt + packages: + - *common_packages + - libpsl-dev + - libbrotli-dev + - libzstd-dev + - env: + - T=debug-mesalink C="--with-mesalink --without-ssl" MESALINK=yes + - OVERRIDE_CC="CC=gcc-8" OVERRIDE_CXX="CXX=g++-8" + addons: + apt: + <<: *common_apt + packages: + - *common_packages + - libpsl-dev + - libbrotli-dev + - libzstd-dev + - env: + - T=debug + - &clang OVERRIDE_CC="CC=clang-9" OVERRIDE_CXX="CXX=clang++-9" + compiler: clang + addons: + apt: + <<: *common_apt + packages: + - &clang_packages [*common_packages, clang-9] + - libpsl-dev + - libbrotli-dev + - libzstd-dev + - env: + - T=debug C="--enable-alt-svc" + - *clang + compiler: clang + addons: + apt: + <<: *common_apt + packages: + - *clang_packages + - libpsl-dev + - libbrotli-dev + - libzstd-dev + - env: + - T=debug C="--with-mbedtls --without-ssl" + - *clang + compiler: clang + addons: + apt: + <<: *common_apt + packages: + - *clang_packages + - libpsl-dev + - libbrotli-dev + - libzstd-dev + - libmbedtls-dev + - env: + - T=debug C="--with-gnutls --without-ssl" + - *clang + compiler: clang + addons: + apt: + <<: *common_apt + packages: + - *clang_packages + - libgnutls28-dev + - libpsl-dev + - libbrotli-dev + - libzstd-dev + - env: + - T=debug C="--with-nss --without-ssl" NOTESTS=1 CPPFLAGS="-isystem /usr/include/nss" + - *clang + compiler: clang + addons: + apt: + <<: *common_apt + packages: + - *clang_packages + - libnss3-dev + - libpsl-dev + - libbrotli-dev + - libzstd-dev + - env: + - T=iconv + - OVERRIDE_CC="CC=gcc-8" OVERRIDE_CXX="CXX=g++-8" + - env: + - T=cmake BORINGSSL=yes QUICHE=yes C="-DUSE_QUICHE=1 -DOPENSSL_ROOT_DIR=$HOME/boringssl -DCURL_BROTLI=1 -DCURL_ZSTD=1" + - OVERRIDE_CC="CC=gcc-8" OVERRIDE_CXX="CXX=g++-8" + - PKG_CONFIG_PATH="$HOME/quiche/target/release" + before_install: + - eval "$(gimme stable)"; gimme --list # Install latest Go (for boringssl) + addons: + apt: + <<: *common_apt + packages: + - *common_packages + - libpsl-dev + - libbrotli-dev + - libzstd-dev + - env: + - T=cmake NGTCP2=yes C="-DUSE_NGTCP2=ON -DCURL_BROTLI=1 -DCURL_ZSTD=1" + - *clang + - PKG_CONFIG_PATH="$HOME/ngbuild/lib/pkgconfig" + compiler: clang + addons: + apt: + <<: *common_apt + packages: + - *clang_packages + - libpsl-dev + - libbrotli-dev + - libzstd-dev + - env: + - T=torture + - OVERRIDE_CC="CC=gcc-8" OVERRIDE_CXX="CXX=g++-8" + addons: + apt: + <<: *common_apt + packages: + - *common_packages + - lcov + - libpsl-dev + - libbrotli-dev + - libzstd-dev + - libssh2-1-dev + - env: + - T=distcheck + - OVERRIDE_CC="CC=gcc-8" OVERRIDE_CXX="CXX=g++-8" + addons: + apt: + <<: *common_apt + packages: + - *common_packages + - libpsl-dev + - libbrotli-dev + - libzstd-dev + - env: + - T=fuzzer + - *clang + compiler: clang + addons: + apt: + <<: *common_apt + packages: + - *clang_packages + - libpsl-dev + - libbrotli-dev + - libzstd-dev + - env: + - T=tidy + - *clang + compiler: clang + addons: + apt: + <<: *common_apt + packages: + - *clang_packages + - clang-tidy-9 + - libpsl-dev + - libbrotli-dev + - libzstd-dev + - env: + - T=scan-build + - *clang + compiler: clang + addons: + apt: + <<: *common_apt + packages: + - *clang_packages + - libpsl-dev + - libbrotli-dev + - libzstd-dev + - env: + - T=debug CFLAGS="-fsanitize=address,undefined,signed-integer-overflow -fno-sanitize-recover=undefined,integer -Wformat -Werror=format-security -Werror=array-bounds -g" LDFLAGS="-fsanitize=address,undefined -fno-sanitize-recover=undefined,integer" LIBS="-ldl -lubsan" + - *clang + compiler: clang + addons: + apt: + <<: *common_apt + packages: + - *clang_packages + - libpsl-dev + - libbrotli-dev + - libzstd-dev + - env: + - T=debug C="--enable-alt-svc" + - OVERRIDE_CC="CC=gcc-8" OVERRIDE_CXX="CXX=g++-8" + arch: arm64 + addons: + apt: + <<: *common_apt + packages: + - *common_packages + - libpsl-dev + - libbrotli-dev + - libzstd-dev + - libev-dev + - libssl-dev + - libtool + - pkg-config + - zlib1g-dev + + - env: + - T=debug C="--enable-alt-svc" + - OVERRIDE_CC="CC=gcc-8" OVERRIDE_CXX="CXX=g++-8" + arch: ppc64le + addons: + apt: + <<: *common_apt + packages: + - *common_packages + - libpsl-dev + - libbrotli-dev + - libzstd-dev + - libev-dev + - libssl-dev + - libtool + - pkg-config + - zlib1g-dev + + - env: + - T=debug C="--enable-alt-svc" + - OVERRIDE_CC="CC=gcc-8" OVERRIDE_CXX="CXX=g++-8" + arch: s390x + addons: + apt: + <<: *common_apt + packages: + - *common_packages + - libpsl-dev + - libbrotli-dev + - libzstd-dev + - libev-dev + - libssl-dev + - libtool + - pkg-config + - zlib1g-dev before_install: - - eval "${OVERRIDE_CC}" - - eval "${OVERRIDE_CXX}" +- export "${OVERRIDE_CC-blank=}" +- export "${OVERRIDE_CXX-blank=}" install: - - if [ "$T" = "coverage" ]; then pip2 install --user cpp-coveralls; fi - - if [ "$TRAVIS_OS_NAME" == "osx" ]; then brew update > /dev/null; fi - - if [ "$TRAVIS_OS_NAME" == "osx" ]; then brew reinstall libtool > /dev/null; fi - - if [ "$TRAVIS_OS_NAME" == "osx" ]; then brew install rtmpdump libssh2 c-ares libmetalink libressl nghttp2 libmetalink; fi +- if [ "$T" = "coverage" ]; then pip2 install --user cpp-coveralls; fi +# before_script and script: +# Travis isn't reliable catching errors in inline script commands (#3730). +# Do not add anything here, instead add to the respective script. before_script: - - ./buildconf - - | - # No brotli package available for Trusty. Download & compile from source. - # Cannot be done in the install script because cmake is needed. - if [ "$TRAVIS_OS_NAME" = linux -a "$BROTLI" ]; then - curl -L https://github.com/google/brotli/archive/v1.0.1.tar.gz | - tar xzf - && - ( - cd brotli-1.0.1 && - cmake . -DCMAKE_INSTALL_PREFIX=/usr \ - -DCMAKE_INSTALL_LIBDIR=/usr/lib && - make && - sudo make install - ) - fi - - | - if [ "$TRAVIS_OS_NAME" = linux -a "$BORINGSSL" ]; then - (cd $HOME && - git clone --depth=1 https://boringssl.googlesource.com/boringssl && - cd boringssl && - mkdir build && - cd build && - CXX="g++" CC="gcc" cmake -DCMAKE_BUILD_TYPE=release -DBUILD_SHARED_LIBS=1 .. && - make && - cd .. && - mkdir lib && - cd lib && - ln -s ../build/crypto/libcrypto.so . && - ln -s ../build/ssl/libssl.so . && - echo "BoringSSL lib dir: "`pwd` && - export LIBS=-lpthread ) - fi - - | - if [ $TRAVIS_OS_NAME = linux ]; then - if [ ! -e $HOME/libidn2-2.0.4/Makefile ]; then - (cd $HOME && \ - curl -LO https://ftp.gnu.org/gnu/libidn/libidn2-2.0.4.tar.gz && \ - tar -xzf libidn2-2.0.4.tar.gz && \ - cd libidn2-2.0.4 && \ - ./configure && \ - make) - fi - fi - - | - if [ $TRAVIS_OS_NAME = linux ]; then - if [ ! -e $HOME/libpsl-0.20.1/Makefile ]; then - (cd $HOME && \ - curl -LO https://github.com/rockdaboot/libpsl/releases/download/libpsl-0.20.1/libpsl-0.20.1.tar.gz && \ - tar -xzf libpsl-0.20.1.tar.gz && \ - cd libpsl-0.20.1 && \ - autoreconf -i && \ - ./configure && \ - make) - fi - fi - - | - if [ $TRAVIS_OS_NAME = linux ]; then - if [ ! -e $HOME/mbedtls-mbedtls-2.8.0/library/libmbedtls.a ]; then - (cd $HOME && \ - curl -LO https://github.com/ARMmbed/mbedtls/archive/mbedtls-2.8.0.tar.gz && \ - tar -xzf mbedtls-2.8.0.tar.gz && \ - cd mbedtls-mbedtls-2.8.0 && \ - cmake . -DCMAKE_INSTALL_PREFIX=/usr/local -DCMAKE_C_FLAGS=-fPIC && \ - make) - fi - fi - - | - if [ $TRAVIS_OS_NAME = linux ]; then - if [ ! -e $HOME/wolfssl-4.0.0-stable/Makefile ]; then - (cd $HOME && \ - curl -LO https://github.com/wolfSSL/wolfssl/archive/v4.0.0-stable.tar.gz && \ - tar -xzf v4.0.0-stable.tar.gz && \ - cd wolfssl-4.0.0-stable && \ - ./autogen.sh && \ - ./configure --enable-tls13 --enable-all && \ - touch wolfssl/wolfcrypt/fips.h && \ - make) - fi - fi - - | - if [ $TRAVIS_OS_NAME = linux ]; then - if [ ! -e $HOME/nghttp2-1.34.0/Makefile ]; then - (cd $HOME && \ - curl -L https://github.com/nghttp2/nghttp2/releases/download/v1.34.0/nghttp2-1.34.0.tar.gz | - tar xzf - && \ - cd nghttp2-1.34.0 && \ - CXX="g++-8" CC="gcc-8" CFLAGS="" LDFLAGS="" LIBS="" ./configure --disable-threads --enable-app && \ - make) - fi - fi - - | - if [ $TRAVIS_OS_NAME = linux ]; then - (cd $HOME/libidn2-2.0.4 && sudo make install) - (cd $HOME/libpsl-0.20.1 && sudo make install) - (cd $HOME/mbedtls-mbedtls-2.8.0 && sudo make install) - (cd $HOME/wolfssl-4.0.0-stable && sudo make install) - (cd $HOME/nghttp2-1.34.0 && sudo make install) - fi - +- ./scripts/travis/before_script.sh || travis_terminate 1 script: - - | - set -eo pipefail - if [ "$T" = "coverage" ]; then - ./configure --enable-debug --disable-shared --enable-code-coverage - make - make TFLAGS=-n test-nonflaky - make "TFLAGS=-n -e" test-nonflaky - tests="1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 200 201 202 300 301 302 500 501 502 503 504 506 507 508 509 510 511 512 513 514 515 516 517 518 519 600 601 700 701 702 800 801 802 803 900 901 902 903 1000 1001 1002 1004 1100 1101 1200 1201 1302 1303 1304 1305 1306 1308 1400 1401 1402 1404 1450 1451 1452 1502 1507 1508 1600 1602 1603 1605 1650 1651 1652 1653 1654 2001 2100 3000" - make "TFLAGS=-n -t $tests" test-nonflaky - coveralls --gcov /usr/bin/gcov-8 --gcov-options '\-lp' -i src -e lib -e tests -e docs -b $PWD/src - coveralls --gcov /usr/bin/gcov-8 --gcov-options '\-lp' -e src -i lib -e tests -e docs -b $PWD/lib - fi - - | - set -eo pipefail - if [ "$T" = "debug" ]; then - ./configure --enable-debug --enable-werror $C - make && make examples - if [ -z $NOTESTS ]; then - make TFLAGS=-n test-nonflaky - fi - fi - - | - set -eo pipefail - if [ "$T" = "debug-wolfssl" ]; then - ./configure --enable-debug --enable-werror $C - make - make "TFLAGS=-n !313" test-nonflaky - fi - - | - set -eo pipefail - if [ "$T" = "novalgrind" ]; then - ./configure $C - make && make examples - make TFLAGS=-n test-nonflaky - fi - - | - set -eo pipefail - if [ "$T" = "normal" ]; then - if [ $TRAVIS_OS_NAME = linux ]; then - # Remove system curl to make sure we don't rely on it. - # Only done on Linux since we're not permitted to on mac. - sudo rm -f /usr/bin/curl - fi - ./configure --enable-warnings --enable-werror $C - make && make examples - if [ -z $NOTESTS ]; then - make test-nonflaky - fi - if [ -n $CHECKSRC ]; then - make checksrc - fi - fi - - | - set -eo pipefail - if [ "$T" = "tidy" ]; then - ./configure --enable-warnings --enable-werror $C - make && make tidy - fi - - | - set -eo pipefail - if [ "$T" = "iconv" ]; then - source .travis-iconv-env.sh - ./configure --enable-debug --enable-werror $C - make && make examples - make test-nonflaky - fi - - | - set -eo pipefail - if [ "$T" = "cmake" ]; then - if [ $TRAVIS_OS_NAME = linux ]; then - cmake -H. -Bbuild -DCURL_WERROR=ON && cmake --build build - else - cmake -H. -Bbuild -DOPENSSL_ROOT_DIR=/usr/local/opt/openssl -DCURL_DISABLE_LDAP=ON -DCURL_DISABLE_LDAPS=ON && cmake --build build - fi - fi - - | - set -eo pipefail - if [ "$T" = "distcheck" ]; then - # find BOM markers and exit if we do - ! git grep `printf '\xef\xbb\xbf'` - ./configure - make - ./maketgz 99.98.97 - # verify in-tree build - and install it - (tar xf curl-99.98.97.tar.gz && \ - cd curl-99.98.97 && \ - ./configure --prefix=$HOME/temp && \ - make && \ - make TFLAGS=1 test && \ - make install) - # basic check of the installed files - bash scripts/installcheck.sh $HOME/temp - rm -rf curl-99.98.97 - # verify out-of-tree build - (tar xf curl-99.98.97.tar.gz && \ - touch curl-99.98.97/docs/{cmdline-opts,libcurl}/Makefile.inc && \ - mkdir build && \ - cd build && \ - ../curl-99.98.97/configure && \ - make && \ - make TFLAGS='-p 1 1139' test) - # verify cmake build - rm -rf curl-99.98.97 - (tar xf curl-99.98.97.tar.gz && \ - cd curl-99.98.97 && \ - mkdir build && \ - cd build && \ - cmake .. && \ - make) - fi - - | - set -eo pipefail - if [ "$T" = "fuzzer" ]; then - # Download the fuzzer to a temporary folder - ./tests/fuzz/download_fuzzer.sh /tmp/curl_fuzzer - - export CURLSRC=$PWD - - # Run the mainline fuzzer test - pushd /tmp/curl_fuzzer - ./mainline.sh ${CURLSRC} - popd - fi - - | - if [ "$T" = "scan-build" ]; then - scan-build ./configure --enable-debug --enable-werror $C - scan-build --status-bugs make && scan-build --status-bugs make examples - fi +- ./scripts/travis/script.sh || travis_terminate 1 -# whitelist branches to avoid testing feature branches twice (as branch and as pull request) +# select branches to avoid testing feature branches twice (as branch and as pull request) branches: - only: - - master - - /\/ci$/ + only: + - master + - /\/ci$/ notifications: email: false diff --git a/CMake/CMakeConfigurableFile.in b/CMake/CMakeConfigurableFile.in index df2c382e9b352d..2bafe2c1035cc9 100644 --- a/CMake/CMakeConfigurableFile.in +++ b/CMake/CMakeConfigurableFile.in @@ -1 +1,22 @@ +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 1998 - 2020, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.haxx.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +########################################################################### @CMAKE_CONFIGURABLE_FILE_CONTENT@ diff --git a/CMake/CurlSymbolHiding.cmake b/CMake/CurlSymbolHiding.cmake index 15ba46e4672727..aaac9feadfdbb8 100644 --- a/CMake/CurlSymbolHiding.cmake +++ b/CMake/CurlSymbolHiding.cmake @@ -1,3 +1,24 @@ +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 1998 - 2020, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.haxx.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +########################################################################### include(CheckCSourceCompiles) option(CURL_HIDDEN_SYMBOLS "Set to ON to hide libcurl internal symbols (=hide all symbols that aren't officially external)." ON) @@ -11,13 +32,7 @@ if(CURL_HIDDEN_SYMBOLS) set(_SYMBOL_EXTERN "__attribute__ ((__visibility__ (\"default\")))") set(_CFLAG_SYMBOLS_HIDE "-fvisibility=hidden") elseif(CMAKE_COMPILER_IS_GNUCC) - if(NOT CMAKE_VERSION VERSION_LESS 2.8.10) - set(GCC_VERSION ${CMAKE_C_COMPILER_VERSION}) - else() - execute_process(COMMAND ${CMAKE_C_COMPILER} -dumpversion - OUTPUT_VARIABLE GCC_VERSION) - endif() - if(NOT GCC_VERSION VERSION_LESS 3.4) + if(NOT CMAKE_C_COMPILER_VERSION VERSION_LESS 3.4) # note: this is considered buggy prior to 4.0 but the autotools don't care, so let's ignore that fact set(SUPPORTS_SYMBOL_HIDING TRUE) set(_SYMBOL_EXTERN "__attribute__ ((__visibility__ (\"default\")))") @@ -29,7 +44,7 @@ if(CURL_HIDDEN_SYMBOLS) set(_CFLAG_SYMBOLS_HIDE "-xldscope=hidden") elseif(CMAKE_C_COMPILER_ID MATCHES "Intel" AND NOT CMAKE_C_COMPILER_VERSION VERSION_LESS 9.0) # note: this should probably just check for version 9.1.045 but I'm not 100% sure - # so let's to it the same way autotools do. + # so let's do it the same way autotools do. set(SUPPORTS_SYMBOL_HIDING TRUE) set(_SYMBOL_EXTERN "__attribute__ ((__visibility__ (\"default\")))") set(_CFLAG_SYMBOLS_HIDE "-fvisibility=hidden") @@ -53,7 +68,7 @@ elseif(MSVC) message(WARNING "Hiding private symbols regardless CURL_HIDDEN_SYMBOLS being disabled.") set(HIDES_CURL_PRIVATE_SYMBOLS TRUE) endif() -elseif() +else() set(HIDES_CURL_PRIVATE_SYMBOLS FALSE) endif() diff --git a/CMake/CurlTests.c b/CMake/CurlTests.c index 848e0d5d79f606..3ef35f025356e5 100644 --- a/CMake/CurlTests.c +++ b/CMake/CurlTests.c @@ -5,7 +5,7 @@ * | (__| |_| | _ <| |___ * \___|\___/|_| \_\_____| * - * Copyright (C) 1998 - 2014, Daniel Stenberg, , et al. + * Copyright (C) 1998 - 2019, Daniel Stenberg, , et al. * * This software is licensed as described in the file COPYING, which * you should have received as part of this distribution. The terms @@ -125,6 +125,7 @@ int main(void) #if defined(HAVE_GETHOSTBYADDR_R_5) || \ defined(HAVE_GETHOSTBYADDR_R_5_REENTRANT) rc = gethostbyaddr_r(address, length, type, &h, &hdata); + (void)rc; #elif defined(HAVE_GETHOSTBYADDR_R_7) || \ defined(HAVE_GETHOSTBYADDR_R_7_REENTRANT) hp = gethostbyaddr_r(address, length, type, &h, buffer, 8192, &h_errnop); @@ -132,6 +133,7 @@ int main(void) #elif defined(HAVE_GETHOSTBYADDR_R_8) || \ defined(HAVE_GETHOSTBYADDR_R_8_REENTRANT) rc = gethostbyaddr_r(address, length, type, &h, buffer, 8192, &hp, &h_errnop); + (void)rc; #endif #if defined(HAVE_GETHOSTBYNAME_R_3) || \ @@ -584,8 +586,8 @@ int fun2(int arg1, int arg2) { int main() { int res3 = c99_vmacro3(1, 2, 3); - (void)res3; int res2 = c99_vmacro2(1, 2); + (void)res3; (void)res2; return 0; } @@ -608,6 +610,8 @@ int main() { int res3 = gcc_vmacro3(1, 2, 3); int res2 = gcc_vmacro2(1, 2); + (void)res3; + (void)res2; return 0; } #endif diff --git a/CMake/FindBearSSL.cmake b/CMake/FindBearSSL.cmake new file mode 100644 index 00000000000000..a8f72c92d285a0 --- /dev/null +++ b/CMake/FindBearSSL.cmake @@ -0,0 +1,30 @@ +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 1998 - 2020, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.haxx.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +########################################################################### +find_path(BEARSSL_INCLUDE_DIRS bearssl.h) + +find_library(BEARSSL_LIBRARY bearssl) + +include(FindPackageHandleStandardArgs) +find_package_handle_standard_args(BEARSSL DEFAULT_MSG + BEARSSL_INCLUDE_DIRS BEARSSL_LIBRARY) + +mark_as_advanced(BEARSSL_INCLUDE_DIRS BEARSSL_LIBRARY) diff --git a/CMake/FindBrotli.cmake b/CMake/FindBrotli.cmake index 351b8f757b8bb2..c43172b640787b 100644 --- a/CMake/FindBrotli.cmake +++ b/CMake/FindBrotli.cmake @@ -1,3 +1,24 @@ +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 1998 - 2020, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.haxx.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +########################################################################### include(FindPackageHandleStandardArgs) find_path(BROTLI_INCLUDE_DIR "brotli/decode.h") diff --git a/CMake/FindCARES.cmake b/CMake/FindCARES.cmake index 723044a644252d..9160ae5fd66879 100644 --- a/CMake/FindCARES.cmake +++ b/CMake/FindCARES.cmake @@ -1,3 +1,24 @@ +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 1998 - 2020, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.haxx.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +########################################################################### # - Find c-ares # Find the c-ares includes and library # This module defines @@ -7,34 +28,16 @@ # also defined, but not for general use are # CARES_LIBRARY, where to find the c-ares library. -find_path(CARES_INCLUDE_DIR ares.h - /usr/local/include - /usr/include - ) +find_path(CARES_INCLUDE_DIR ares.h) set(CARES_NAMES ${CARES_NAMES} cares) find_library(CARES_LIBRARY NAMES ${CARES_NAMES} - PATHS /usr/lib /usr/local/lib ) -if(CARES_LIBRARY AND CARES_INCLUDE_DIR) - set(CARES_LIBRARIES ${CARES_LIBRARY}) - set(CARES_FOUND "YES") -else() - set(CARES_FOUND "NO") -endif() - - -if(CARES_FOUND) - if(NOT CARES_FIND_QUIETLY) - message(STATUS "Found c-ares: ${CARES_LIBRARIES}") - endif() -else() - if(CARES_FIND_REQUIRED) - message(FATAL_ERROR "Could not find c-ares library") - endif() -endif() +include(FindPackageHandleStandardArgs) +find_package_handle_standard_args(CARES + REQUIRED_VARS CARES_LIBRARY CARES_INCLUDE_DIR) mark_as_advanced( CARES_LIBRARY diff --git a/CMake/FindGSS.cmake b/CMake/FindGSS.cmake index 8a28f2fb617817..02111a20fe03c8 100644 --- a/CMake/FindGSS.cmake +++ b/CMake/FindGSS.cmake @@ -1,3 +1,24 @@ +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 1998 - 2020, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.haxx.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +########################################################################### # - Try to find the GSS Kerberos library # Once done this will define # @@ -62,6 +83,7 @@ if(NOT _GSS_FOUND) #not found by pkg-config. Let's take more traditional approac COMMAND ${_GSS_CONFIGURE_SCRIPT} "--cflags" "gssapi" OUTPUT_VARIABLE _GSS_CFLAGS RESULT_VARIABLE _GSS_CONFIGURE_FAILED + OUTPUT_STRIP_TRAILING_WHITESPACE ) message(STATUS "CFLAGS: ${_GSS_CFLAGS}") if(NOT _GSS_CONFIGURE_FAILED) # 0 means success @@ -84,6 +106,7 @@ if(NOT _GSS_FOUND) #not found by pkg-config. Let's take more traditional approac COMMAND ${_GSS_CONFIGURE_SCRIPT} "--libs" "gssapi" OUTPUT_VARIABLE _GSS_LIB_FLAGS RESULT_VARIABLE _GSS_CONFIGURE_FAILED + OUTPUT_STRIP_TRAILING_WHITESPACE ) message(STATUS "LDFLAGS: ${_GSS_LIB_FLAGS}") @@ -110,6 +133,7 @@ if(NOT _GSS_FOUND) #not found by pkg-config. Let's take more traditional approac COMMAND ${_GSS_CONFIGURE_SCRIPT} "--version" OUTPUT_VARIABLE _GSS_VERSION RESULT_VARIABLE _GSS_CONFIGURE_FAILED + OUTPUT_STRIP_TRAILING_WHITESPACE ) # older versions may not have the "--version" parameter. In this case we just don't care. @@ -121,6 +145,7 @@ if(NOT _GSS_FOUND) #not found by pkg-config. Let's take more traditional approac COMMAND ${_GSS_CONFIGURE_SCRIPT} "--vendor" OUTPUT_VARIABLE _GSS_VENDOR RESULT_VARIABLE _GSS_CONFIGURE_FAILED + OUTPUT_STRIP_TRAILING_WHITESPACE ) # older versions may not have the "--vendor" parameter. In this case we just don't care. @@ -134,7 +159,7 @@ if(NOT _GSS_FOUND) #not found by pkg-config. Let's take more traditional approac endif() endif() - else() # either there is no config script or we are on platform that doesn't provide one (Windows?) + else() # either there is no config script or we are on a platform that doesn't provide one (Windows?) find_path(_GSS_INCLUDE_DIR NAMES @@ -164,7 +189,7 @@ if(NOT _GSS_FOUND) #not found by pkg-config. Let's take more traditional approac set(CMAKE_REQUIRED_DEFINITIONS "") endif() else() - # I'm not convienced if this is the right way but this is what autotools do at the moment + # I'm not convinced if this is the right way but this is what autotools do at the moment find_path(_GSS_INCLUDE_DIR NAMES "gssapi.h" diff --git a/CMake/FindLibSSH2.cmake b/CMake/FindLibSSH2.cmake index 84822dba74d7be..4cdf3e31032873 100644 --- a/CMake/FindLibSSH2.cmake +++ b/CMake/FindLibSSH2.cmake @@ -1,3 +1,24 @@ +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 1998 - 2020, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.haxx.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +########################################################################### # - Try to find the libssh2 library # Once done this will define # @@ -5,31 +26,18 @@ # LIBSSH2_INCLUDE_DIR - the libssh2 include directory # LIBSSH2_LIBRARY - the libssh2 library name -if(LIBSSH2_INCLUDE_DIR AND LIBSSH2_LIBRARY) - set(LibSSH2_FIND_QUIETLY TRUE) -endif() - -find_path(LIBSSH2_INCLUDE_DIR libssh2.h -) +find_path(LIBSSH2_INCLUDE_DIR libssh2.h) -find_library(LIBSSH2_LIBRARY NAMES ssh2 -) +find_library(LIBSSH2_LIBRARY NAMES ssh2 libssh2) if(LIBSSH2_INCLUDE_DIR) - file(STRINGS "${LIBSSH2_INCLUDE_DIR}/libssh2.h" libssh2_version_str REGEX "^#define[\t ]+LIBSSH2_VERSION_NUM[\t ]+0x[0-9][0-9][0-9][0-9][0-9][0-9].*") - - string(REGEX REPLACE "^.*LIBSSH2_VERSION_NUM[\t ]+0x([0-9][0-9]).*$" "\\1" LIBSSH2_VERSION_MAJOR "${libssh2_version_str}") - string(REGEX REPLACE "^.*LIBSSH2_VERSION_NUM[\t ]+0x[0-9][0-9]([0-9][0-9]).*$" "\\1" LIBSSH2_VERSION_MINOR "${libssh2_version_str}") - string(REGEX REPLACE "^.*LIBSSH2_VERSION_NUM[\t ]+0x[0-9][0-9][0-9][0-9]([0-9][0-9]).*$" "\\1" LIBSSH2_VERSION_PATCH "${libssh2_version_str}") - - string(REGEX REPLACE "^0(.+)" "\\1" LIBSSH2_VERSION_MAJOR "${LIBSSH2_VERSION_MAJOR}") - string(REGEX REPLACE "^0(.+)" "\\1" LIBSSH2_VERSION_MINOR "${LIBSSH2_VERSION_MINOR}") - string(REGEX REPLACE "^0(.+)" "\\1" LIBSSH2_VERSION_PATCH "${LIBSSH2_VERSION_PATCH}") - - set(LIBSSH2_VERSION "${LIBSSH2_VERSION_MAJOR}.${LIBSSH2_VERSION_MINOR}.${LIBSSH2_VERSION_PATCH}") + file(STRINGS "${LIBSSH2_INCLUDE_DIR}/libssh2.h" libssh2_version_str REGEX "^#define[\t ]+LIBSSH2_VERSION[\t ]+\"(.*)\"") + string(REGEX REPLACE "^.*\"([^\"]+)\"" "\\1" LIBSSH2_VERSION "${libssh2_version_str}") endif() include(FindPackageHandleStandardArgs) -find_package_handle_standard_args(LibSSH2 DEFAULT_MSG LIBSSH2_INCLUDE_DIR LIBSSH2_LIBRARY ) +find_package_handle_standard_args(LibSSH2 + REQUIRED_VARS LIBSSH2_LIBRARY LIBSSH2_INCLUDE_DIR + VERSION_VAR LIBSSH2_VERSION) -mark_as_advanced(LIBSSH2_INCLUDE_DIR LIBSSH2_LIBRARY LIBSSH2_VERSION_MAJOR LIBSSH2_VERSION_MINOR LIBSSH2_VERSION_PATCH LIBSSH2_VERSION) +mark_as_advanced(LIBSSH2_INCLUDE_DIR LIBSSH2_LIBRARY) diff --git a/CMake/FindMbedTLS.cmake b/CMake/FindMbedTLS.cmake index a91639589218f0..2ebe721a64f59e 100644 --- a/CMake/FindMbedTLS.cmake +++ b/CMake/FindMbedTLS.cmake @@ -1,3 +1,24 @@ +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 1998 - 2020, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.haxx.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +########################################################################### find_path(MBEDTLS_INCLUDE_DIRS mbedtls/ssl.h) find_library(MBEDTLS_LIBRARY mbedtls) diff --git a/CMake/FindNGHTTP2.cmake b/CMake/FindNGHTTP2.cmake index 348b9612dfa701..e1eba05327a40d 100644 --- a/CMake/FindNGHTTP2.cmake +++ b/CMake/FindNGHTTP2.cmake @@ -1,3 +1,24 @@ +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 1998 - 2020, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.haxx.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +########################################################################### include(FindPackageHandleStandardArgs) find_path(NGHTTP2_INCLUDE_DIR "nghttp2/nghttp2.h") @@ -10,9 +31,9 @@ find_package_handle_standard_args(NGHTTP2 REQUIRED_VARS NGHTTP2_LIBRARY NGHTTP2_INCLUDE_DIR - FAIL_MESSAGE - "Could NOT find NGHTTP2" ) set(NGHTTP2_INCLUDE_DIRS ${NGHTTP2_INCLUDE_DIR}) set(NGHTTP2_LIBRARIES ${NGHTTP2_LIBRARY}) + +mark_as_advanced(NGHTTP2_INCLUDE_DIRS NGHTTP2_LIBRARIES) diff --git a/CMake/FindNGHTTP3.cmake b/CMake/FindNGHTTP3.cmake new file mode 100644 index 00000000000000..73ce9e188390d6 --- /dev/null +++ b/CMake/FindNGHTTP3.cmake @@ -0,0 +1,76 @@ +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 1998 - 2020, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.haxx.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +########################################################################### + +#[=======================================================================[.rst: +FindNGHTTP3 +---------- + +Find the nghttp3 library + +Result Variables +^^^^^^^^^^^^^^^^ + +``NGHTTP3_FOUND`` + System has nghttp3 +``NGHTTP3_INCLUDE_DIRS`` + The nghttp3 include directories. +``NGHTTP3_LIBRARIES`` + The libraries needed to use nghttp3 +``NGHTTP3_VERSION`` + version of nghttp3. +#]=======================================================================] + +if(UNIX) + find_package(PkgConfig QUIET) + pkg_search_module(PC_NGHTTP3 libnghttp3) +endif() + +find_path(NGHTTP3_INCLUDE_DIR nghttp3/nghttp3.h + HINTS + ${PC_NGHTTP3_INCLUDEDIR} + ${PC_NGHTTP3_INCLUDE_DIRS} +) + +find_library(NGHTTP3_LIBRARY NAMES nghttp3 + HINTS + ${PC_NGHTTP3_LIBDIR} + ${PC_NGHTTP3_LIBRARY_DIRS} +) + +if(PC_NGHTTP3_VERSION) + set(NGHTTP3_VERSION ${PC_NGHTTP3_VERSION}) +endif() + +include(FindPackageHandleStandardArgs) +find_package_handle_standard_args(NGHTTP3 + REQUIRED_VARS + NGHTTP3_LIBRARY + NGHTTP3_INCLUDE_DIR + VERSION_VAR NGHTTP3_VERSION +) + +if(NGHTTP3_FOUND) + set(NGHTTP3_LIBRARIES ${NGHTTP3_LIBRARY}) + set(NGHTTP3_INCLUDE_DIRS ${NGHTTP3_INCLUDE_DIR}) +endif() + +mark_as_advanced(NGHTTP3_INCLUDE_DIRS NGHTTP3_LIBRARIES) diff --git a/CMake/FindNGTCP2.cmake b/CMake/FindNGTCP2.cmake new file mode 100644 index 00000000000000..a1ed8cd4be5f7a --- /dev/null +++ b/CMake/FindNGTCP2.cmake @@ -0,0 +1,113 @@ +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 1998 - 2020, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.haxx.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +########################################################################### + +#[=======================================================================[.rst: +FindNGTCP2 +---------- + +Find the ngtcp2 library + +This module accepts optional COMPONENTS to control the crypto library (these are +mutually exclusive):: + + OpenSSL: Use libngtcp2_crypto_openssl + GnuTLS: Use libngtcp2_crypto_gnutls + +Result Variables +^^^^^^^^^^^^^^^^ + +``NGTCP2_FOUND`` + System has ngtcp2 +``NGTCP2_INCLUDE_DIRS`` + The ngtcp2 include directories. +``NGTCP2_LIBRARIES`` + The libraries needed to use ngtcp2 +``NGTCP2_VERSION`` + version of ngtcp2. +#]=======================================================================] + +if(UNIX) + find_package(PkgConfig QUIET) + pkg_search_module(PC_NGTCP2 libngtcp2) +endif() + +find_path(NGTCP2_INCLUDE_DIR ngtcp2/ngtcp2.h + HINTS + ${PC_NGTCP2_INCLUDEDIR} + ${PC_NGTCP2_INCLUDE_DIRS} +) + +find_library(NGTCP2_LIBRARY NAMES ngtcp2 + HINTS + ${PC_NGTCP2_LIBDIR} + ${PC_NGTCP2_LIBRARY_DIRS} +) + +if(PC_NGTCP2_VERSION) + set(NGTCP2_VERSION ${PC_NGTCP2_VERSION}) +endif() + +if(NGTCP2_FIND_COMPONENTS) + set(NGTCP2_CRYPTO_BACKEND "") + foreach(component IN LISTS NGTCP2_FIND_COMPONENTS) + if(component MATCHES "^(OpenSSL|GnuTLS)") + if(NGTCP2_CRYPTO_BACKEND) + message(FATAL_ERROR "NGTCP2: Only one crypto library can be selected") + endif() + set(NGTCP2_CRYPTO_BACKEND ${component}) + endif() + endforeach() + + if(NGTCP2_CRYPTO_BACKEND) + string(TOLOWER "ngtcp2_crypto_${NGTCP2_CRYPTO_BACKEND}" _crypto_library) + if(UNIX) + pkg_search_module(PC_${_crypto_library} lib${_crypto_library}) + endif() + find_library(${_crypto_library}_LIBRARY + NAMES + ${_crypto_library} + HINTS + ${PC_${_crypto_library}_LIBDIR} + ${PC_${_crypto_library}_LIBRARY_DIRS} + ) + if(${_crypto_library}_LIBRARY) + set(NGTCP2_${NGTCP2_CRYPTO_BACKEND}_FOUND TRUE) + set(NGTCP2_CRYPTO_LIBRARY ${${_crypto_library}_LIBRARY}) + endif() + endif() +endif() + +include(FindPackageHandleStandardArgs) +find_package_handle_standard_args(NGTCP2 + REQUIRED_VARS + NGTCP2_LIBRARY + NGTCP2_INCLUDE_DIR + VERSION_VAR NGTCP2_VERSION + HANDLE_COMPONENTS +) + +if(NGTCP2_FOUND) + set(NGTCP2_LIBRARIES ${NGTCP2_LIBRARY} ${NGTCP2_CRYPTO_LIBRARY}) + set(NGTCP2_INCLUDE_DIRS ${NGTCP2_INCLUDE_DIR}) +endif() + +mark_as_advanced(NGTCP2_INCLUDE_DIRS NGTCP2_LIBRARIES) diff --git a/CMake/FindNSS.cmake b/CMake/FindNSS.cmake new file mode 100644 index 00000000000000..5fdb2b776e04ef --- /dev/null +++ b/CMake/FindNSS.cmake @@ -0,0 +1,38 @@ +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 1998 - 2020, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.haxx.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +########################################################################### +if(UNIX) + find_package(PkgConfig QUIET) + pkg_search_module(PC_NSS nss) +endif() +if(NOT PC_NSS_FOUND) + return() +endif() + +set(NSS_LIBRARIES ${PC_NSS_LINK_LIBRARIES}) +set(NSS_INCLUDE_DIRS ${PC_NSS_INCLUDE_DIRS}) + +include(FindPackageHandleStandardArgs) +find_package_handle_standard_args(NSS + REQUIRED_VARS NSS_LIBRARIES NSS_INCLUDE_DIRS + VERSION_VAR PC_NSS_VERSION) + +mark_as_advanced(NSS_INCLUDE_DIRS NSS_LIBRARIES) diff --git a/CMake/FindQUICHE.cmake b/CMake/FindQUICHE.cmake new file mode 100644 index 00000000000000..01d17582bef208 --- /dev/null +++ b/CMake/FindQUICHE.cmake @@ -0,0 +1,68 @@ +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 1998 - 2020, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.haxx.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +########################################################################### + +#[=======================================================================[.rst: +FindQUICHE +---------- + +Find the quiche library + +Result Variables +^^^^^^^^^^^^^^^^ + +``QUICHE_FOUND`` + System has quiche +``QUICHE_INCLUDE_DIRS`` + The quiche include directories. +``QUICHE_LIBRARIES`` + The libraries needed to use quiche +#]=======================================================================] +if(UNIX) + find_package(PkgConfig QUIET) + pkg_search_module(PC_QUICHE quiche) +endif() + +find_path(QUICHE_INCLUDE_DIR quiche.h + HINTS + ${PC_QUICHE_INCLUDEDIR} + ${PC_QUICHE_INCLUDE_DIRS} +) + +find_library(QUICHE_LIBRARY NAMES quiche + HINTS + ${PC_QUICHE_LIBDIR} + ${PC_QUICHE_LIBRARY_DIRS} +) + +include(FindPackageHandleStandardArgs) +find_package_handle_standard_args(QUICHE + REQUIRED_VARS + QUICHE_LIBRARY + QUICHE_INCLUDE_DIR +) + +if(QUICHE_FOUND) + set(QUICHE_LIBRARIES ${QUICHE_LIBRARY}) + set(QUICHE_INCLUDE_DIRS ${QUICHE_INCLUDE_DIR}) +endif() + +mark_as_advanced(QUICHE_INCLUDE_DIRS QUICHE_LIBRARIES) diff --git a/CMake/FindWolfSSL.cmake b/CMake/FindWolfSSL.cmake new file mode 100644 index 00000000000000..54df1a86c03e4c --- /dev/null +++ b/CMake/FindWolfSSL.cmake @@ -0,0 +1,34 @@ +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 1998 - 2020, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.haxx.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +########################################################################### +find_path(WolfSSL_INCLUDE_DIR NAMES wolfssl/ssl.h) +find_library(WolfSSL_LIBRARY NAMES wolfssl) +mark_as_advanced(WolfSSL_INCLUDE_DIR WolfSSL_LIBRARY) + +include(FindPackageHandleStandardArgs) +find_package_handle_standard_args(WolfSSL + REQUIRED_VARS WolfSSL_INCLUDE_DIR WolfSSL_LIBRARY + ) + +if(WolfSSL_FOUND) + set(WolfSSL_INCLUDE_DIRS ${WolfSSL_INCLUDE_DIR}) + set(WolfSSL_LIBRARIES ${WolfSSL_LIBRARY}) +endif() diff --git a/CMake/FindZstd.cmake b/CMake/FindZstd.cmake new file mode 100644 index 00000000000000..44c741ae82cbf4 --- /dev/null +++ b/CMake/FindZstd.cmake @@ -0,0 +1,69 @@ +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 1998 - 2020, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.haxx.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +########################################################################### + +#[=======================================================================[.rst: +FindZstd +---------- + +Find the zstd library + +Result Variables +^^^^^^^^^^^^^^^^ + +``Zstd_FOUND`` + System has zstd +``Zstd_INCLUDE_DIRS`` + The zstd include directories. +``Zstd_LIBRARIES`` + The libraries needed to use zstd +#]=======================================================================] + +if(UNIX) + find_package(PkgConfig QUIET) + pkg_search_module(PC_Zstd libzstd) +endif() + +find_path(Zstd_INCLUDE_DIR zstd.h + HINTS + ${PC_Zstd_INCLUDEDIR} + ${PC_Zstd_INCLUDE_DIRS} +) + +find_library(Zstd_LIBRARY NAMES zstd + HINTS + ${PC_Zstd_LIBDIR} + ${PC_Zstd_LIBRARY_DIRS} +) + +include(FindPackageHandleStandardArgs) +find_package_handle_standard_args(Zstd + REQUIRED_VARS + Zstd_LIBRARY + Zstd_INCLUDE_DIR +) + +if(Zstd_FOUND) + set(Zstd_LIBRARIES ${Zstd_LIBRARY}) + set(Zstd_INCLUDE_DIRS ${Zstd_INCLUDE_DIR}) +endif() + +mark_as_advanced(Zstd_INCLUDE_DIRS Zstd_LIBRARIES) diff --git a/CMake/Macros.cmake b/CMake/Macros.cmake index 7f71345156c216..65a41e484a939d 100644 --- a/CMake/Macros.cmake +++ b/CMake/Macros.cmake @@ -1,3 +1,24 @@ +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 1998 - 2020, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.haxx.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +########################################################################### #File defines convenience macros for available feature testing # This macro checks if the symbol exists in the library and if it diff --git a/CMake/OtherTests.cmake b/CMake/OtherTests.cmake index c1c9aa32ab5c52..7cec6da6de2feb 100644 --- a/CMake/OtherTests.cmake +++ b/CMake/OtherTests.cmake @@ -1,3 +1,24 @@ +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 1998 - 2020, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.haxx.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +########################################################################### include(CheckCSourceCompiles) # The begin of the sources (macros and includes) set(_source_epilogue "#undef inline") @@ -32,7 +53,7 @@ int main(void) { return 0; }" curl_cv_recv) if(curl_cv_recv) - if(NOT DEFINED curl_cv_func_recv_args OR "${curl_cv_func_recv_args}" STREQUAL "unknown") + if(NOT DEFINED curl_cv_func_recv_args OR curl_cv_func_recv_args STREQUAL "unknown") foreach(recv_retv "int" "ssize_t" ) foreach(recv_arg1 "SOCKET" "int" ) foreach(recv_arg2 "char *" "void *" ) @@ -42,6 +63,9 @@ if(curl_cv_recv) unset(curl_cv_func_recv_test CACHE) check_c_source_compiles(" ${_source_epilogue} + #ifdef WINSOCK_API_LINKAGE + WINSOCK_API_LINKAGE + #endif extern ${recv_retv} ${signature_call_conv} recv(${recv_arg1}, ${recv_arg2}, ${recv_arg3}, ${recv_arg4}); int main(void) { @@ -81,7 +105,7 @@ if(curl_cv_recv) string(REGEX REPLACE "^[^,]*,[^,]*,[^,]*,[^,]*,([^,]*)$" "\\1" RECV_TYPE_RETV "${curl_cv_func_recv_args}") endif() - if("${curl_cv_func_recv_args}" STREQUAL "unknown") + if(curl_cv_func_recv_args STREQUAL "unknown") message(FATAL_ERROR "Cannot find proper types to use for recv args") endif() else() @@ -106,6 +130,9 @@ if(curl_cv_send) unset(curl_cv_func_send_test CACHE) check_c_source_compiles(" ${_source_epilogue} + #ifdef WINSOCK_API_LINKAGE + WINSOCK_API_LINKAGE + #endif extern ${send_retv} ${signature_call_conv} send(${send_arg1}, ${send_arg2}, ${send_arg3}, ${send_arg4}); int main(void) { diff --git a/CMake/Platforms/WindowsCache.cmake b/CMake/Platforms/WindowsCache.cmake index cafaec216c252f..9ae9b56f0b2fd9 100644 --- a/CMake/Platforms/WindowsCache.cmake +++ b/CMake/Platforms/WindowsCache.cmake @@ -1,3 +1,24 @@ +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 1998 - 2020, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.haxx.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +########################################################################### if(NOT UNIX) if(WIN32) set(HAVE_LIBDL 0) @@ -7,7 +28,6 @@ if(NOT UNIX) set(HAVE_LIBNSL 0) set(HAVE_GETHOSTNAME 1) set(HAVE_LIBZ 0) - set(HAVE_LIBCRYPTO 0) set(HAVE_DLOPEN 0) diff --git a/CMake/Utilities.cmake b/CMake/Utilities.cmake index 5cb1d449754bb2..59b17d07440824 100644 --- a/CMake/Utilities.cmake +++ b/CMake/Utilities.cmake @@ -1,13 +1,33 @@ +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 1998 - 2020, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.haxx.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +########################################################################### # File containing various utilities # Returns a list of arguments that evaluate to true function(count_true output_count_var) - set(lst) + set(lst_len 0) foreach(option_var IN LISTS ARGN) if(${option_var}) - list(APPEND lst ${option_var}) + math(EXPR lst_len "${lst_len} + 1") endif() endforeach() - list(LENGTH lst lst_len) set(${output_count_var} ${lst_len} PARENT_SCOPE) endfunction() diff --git a/CMake/cmake_uninstall.cmake.in b/CMake/cmake_uninstall.cmake.in index db8e5367dbcca2..4a0de5e6bcd9b0 100644 --- a/CMake/cmake_uninstall.cmake.in +++ b/CMake/cmake_uninstall.cmake.in @@ -1,3 +1,24 @@ +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 1998 - 2020, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.haxx.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +########################################################################### if(NOT EXISTS "@CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt") message(FATAL_ERROR "Cannot find install manifest: @CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt") endif() diff --git a/CMake/curl-config.cmake.in b/CMake/curl-config.cmake.in index 1294e173a009af..ae8cc30f1a646c 100644 --- a/CMake/curl-config.cmake.in +++ b/CMake/curl-config.cmake.in @@ -1,3 +1,24 @@ +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 1998 - 2020, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.haxx.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +########################################################################### @PACKAGE_INIT@ include(CMakeFindDependencyMacro) diff --git a/CMakeLists.txt b/CMakeLists.txt index 594501ec63d379..ec1cfa78264afd 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -5,7 +5,7 @@ # | (__| |_| | _ <| |___ # \___|\___/|_| \_\_____| # -# Copyright (C) 1998 - 2019, Daniel Stenberg, , et al. +# Copyright (C) 1998 - 2020, Daniel Stenberg, , et al. # # This software is licensed as described in the file COPYING, which # you should have received as part of this distribution. The terms @@ -26,7 +26,6 @@ # The output .so file lacks the soname number which we currently have within the lib/Makefile.am file # Add full (4 or 5 libs) SSL support # Add INSTALL target (EXTRA_DIST variables in Makefile.am may be moved to Makefile.inc so that CMake/CPack is aware of what's to include). -# Add CTests(?) # Check on all possible platforms # Test with as many configurations possible (With or without any option) # Create scripts that help keeping the CMake build system up to date (to reduce maintenance). According to Tetetest: @@ -38,7 +37,8 @@ # To check: # (From Daniel Stenberg) The cmake build selected to run gcc with -fPIC on my box while the plain configure script did not. # (From Daniel Stenberg) The gcc command line use neither -g nor any -O options. As a developer, I also treasure our configure scripts's --enable-debug option that sets a long range of "picky" compiler options. -cmake_minimum_required(VERSION 3.0 FATAL_ERROR) +cmake_minimum_required(VERSION 3.2...3.16 FATAL_ERROR) + set(CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/CMake;${CMAKE_MODULE_PATH}") include(Utilities) include(Macros) @@ -47,9 +47,7 @@ include(CheckCCompilerFlag) project(CURL C) -message(WARNING "the curl cmake build system is poorly maintained. Be aware") - -file(READ ${CURL_SOURCE_DIR}/include/curl/curlver.h CURL_VERSION_H_CONTENTS) +file(STRINGS ${CURL_SOURCE_DIR}/include/curl/curlver.h CURL_VERSION_H_CONTENTS REGEX "#define LIBCURL_VERSION( |_NUM )") string(REGEX MATCH "#define LIBCURL_VERSION \"[^\"]*" CURL_VERSION ${CURL_VERSION_H_CONTENTS}) string(REGEX REPLACE "[^\"]+\"" "" CURL_VERSION ${CURL_VERSION}) @@ -79,7 +77,28 @@ option(ENABLE_ARES "Set to ON to enable c-ares support" OFF) if(WIN32) option(CURL_STATIC_CRT "Set to ON to build libcurl with static CRT on Windows (/MT)." OFF) option(ENABLE_INET_PTON "Set to OFF to prevent usage of inet_pton when building against modern SDKs while still requiring compatibility with older Windows versions, such as Windows XP, Windows Server 2003 etc." ON) + option(ENABLE_UNICODE "Set to ON to use the Unicode version of the Windows API functions" OFF) + set(CURL_TARGET_WINDOWS_VERSION "" CACHE STRING "Minimum target Windows version as hex string") + if(CURL_TARGET_WINDOWS_VERSION) + add_definitions(-D_WIN32_WINNT=${CURL_TARGET_WINDOWS_VERSION}) + set(CMAKE_REQUIRED_DEFINITIONS "${CMAKE_REQUIRED_DEFINITIONS} -D_WIN32_WINNT=${CURL_TARGET_WINDOWS_VERSION}") + elseif(ENABLE_INET_PTON) + # _WIN32_WINNT_VISTA (0x0600) + add_definitions(-D_WIN32_WINNT=0x0600) + set(CMAKE_REQUIRED_DEFINITIONS "${CMAKE_REQUIRED_DEFINITIONS} -D_WIN32_WINNT=0x0600") + else() + # _WIN32_WINNT_WINXP (0x0501) + add_definitions(-D_WIN32_WINNT=0x0501) + set(CMAKE_REQUIRED_DEFINITIONS "${CMAKE_REQUIRED_DEFINITIONS} -D_WIN32_WINNT=0x0501") + endif() + if(ENABLE_UNICODE) + add_definitions(-DUNICODE -D_UNICODE) + if(MINGW) + add_compile_options(-municode) + endif() + endif() endif() +option(CURL_LTO "Turn on compiler Link Time Optimizations" OFF) cmake_dependent_option(ENABLE_THREADED_RESOLVER "Set to ON to enable threaded DNS lookup" ON "NOT ENABLE_ARES" @@ -90,14 +109,24 @@ option(ENABLE_CURLDEBUG "Set to ON to build with TrackMemory feature enabled" OF if(CMAKE_COMPILER_IS_GNUCC OR CMAKE_COMPILER_IS_CLANG) if(PICKY_COMPILER) - foreach(_CCOPT -pedantic -Wall -W -Wpointer-arith -Wwrite-strings -Wunused -Wshadow -Winline -Wnested-externs -Wmissing-declarations -Wmissing-prototypes -Wno-long-long -Wfloat-equal -Wno-multichar -Wsign-compare -Wundef -Wno-format-nonliteral -Wendif-labels -Wstrict-prototypes -Wdeclaration-after-statement -Wstrict-aliasing=3 -Wcast-align -Wtype-limits -Wold-style-declaration -Wmissing-parameter-type -Wempty-body -Wclobbered -Wignored-qualifiers -Wconversion -Wno-sign-conversion -Wvla -Wdouble-promotion -Wno-system-headers -Wno-pedantic-ms-format) + foreach(_CCOPT -pedantic -Wall -W -Wpointer-arith -Wwrite-strings -Wunused -Wshadow -Winline -Wnested-externs -Wmissing-declarations -Wmissing-prototypes -Wfloat-equal -Wsign-compare -Wundef -Wendif-labels -Wstrict-prototypes -Wdeclaration-after-statement -Wstrict-aliasing=3 -Wcast-align -Wtype-limits -Wold-style-declaration -Wmissing-parameter-type -Wempty-body -Wclobbered -Wignored-qualifiers -Wconversion -Wvla -Wdouble-promotion) # surprisingly, CHECK_C_COMPILER_FLAG needs a new variable to store each new # test result in. - check_c_compiler_flag(${_CCOPT} OPT${_CCOPT}) - if(OPT${_CCOPT}) + string(MAKE_C_IDENTIFIER "OPT${_CCOPT}" _optvarname) + check_c_compiler_flag(${_CCOPT} ${_optvarname}) + if(${_optvarname}) set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${_CCOPT}") endif() endforeach() + foreach(_CCOPT long-long multichar format-nonliteral sign-conversion system-headers pedantic-ms-format) + # GCC only warns about unknown -Wno- options if there are also other diagnostic messages, + # so test for the positive form instead + string(MAKE_C_IDENTIFIER "OPT${_CCOPT}" _optvarname) + check_c_compiler_flag("-W${_CCOPT}" ${_optvarname}) + if(${_optvarname}) + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wno-${_CCOPT}") + endif() + endforeach() endif() endif() @@ -123,7 +152,6 @@ if(ENABLE_ARES) set(USE_ARES 1) find_package(CARES REQUIRED) list(APPEND CURL_LIBS ${CARES_LIBRARY}) - set(CURL_LIBS ${CURL_LIBS} ${CARES_LIBRARY}) endif() include(CurlSymbolHiding) @@ -160,20 +188,24 @@ option(CURL_DISABLE_SMTP "to disable SMTP" OFF) mark_as_advanced(CURL_DISABLE_SMTP) option(CURL_DISABLE_GOPHER "to disable Gopher" OFF) mark_as_advanced(CURL_DISABLE_GOPHER) +option(CURL_DISABLE_MQTT "to disable MQTT" OFF) +mark_as_advanced(CURL_DISABLE_MQTT) if(HTTP_ONLY) + set(CURL_DISABLE_DICT ON) + set(CURL_DISABLE_FILE ON) set(CURL_DISABLE_FTP ON) + set(CURL_DISABLE_GOPHER ON) + set(CURL_DISABLE_IMAP ON) set(CURL_DISABLE_LDAP ON) set(CURL_DISABLE_LDAPS ON) - set(CURL_DISABLE_TELNET ON) - set(CURL_DISABLE_DICT ON) - set(CURL_DISABLE_FILE ON) - set(CURL_DISABLE_TFTP ON) - set(CURL_DISABLE_RTSP ON) + set(CURL_DISABLE_MQTT ON) set(CURL_DISABLE_POP3 ON) - set(CURL_DISABLE_IMAP ON) + set(CURL_DISABLE_RTSP ON) + set(CURL_DISABLE_SMB ON) set(CURL_DISABLE_SMTP ON) - set(CURL_DISABLE_GOPHER ON) + set(CURL_DISABLE_TELNET ON) + set(CURL_DISABLE_TFTP ON) endif() option(CURL_DISABLE_COOKIES "to disable cookies support" OFF) @@ -199,26 +231,22 @@ if(ENABLE_IPV6 AND NOT WIN32) endif() endif() -curl_nroff_check() +if(USE_MANUAL) + #nroff is currently only used when USE_MANUAL is set, so we can prevent the warning of no *NROFF if USE_MANUAL is OFF (or not defined), by not even looking for NROFF.. + curl_nroff_check() +endif() find_package(Perl) cmake_dependent_option(ENABLE_MANUAL "to provide the built-in manual" ON "NROFF_USEFUL;PERL_FOUND" OFF) -if(NOT PERL_FOUND) - message(STATUS "Perl not found, testing disabled.") - set(BUILD_TESTING OFF) -endif() if(ENABLE_MANUAL) set(USE_MANUAL ON) endif() -# We need ansi c-flags, especially on HP -set(CMAKE_C_FLAGS "${CMAKE_ANSI_CFLAGS} ${CMAKE_C_FLAGS}") -set(CMAKE_REQUIRED_FLAGS ${CMAKE_ANSI_CFLAGS}) - if(CURL_STATIC_CRT) + set(CMAKE_MSVC_RUNTIME_LIBRARY "MultiThreaded$<$:Debug>") set(CMAKE_C_FLAGS_RELEASE "${CMAKE_C_FLAGS_RELEASE} /MT") set(CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG} /MTd") endif() @@ -234,6 +262,7 @@ if(${CMAKE_SYSTEM_NAME} MATCHES AIX) endif() # Include all the necessary files for macros +include(CMakePushCheckState) include(CheckFunctionExists) include(CheckIncludeFile) include(CheckIncludeFiles) @@ -260,7 +289,7 @@ if(ENABLE_THREADED_RESOLVER) endif() # Check for all needed libraries -check_library_exists_concat("dl" dlopen HAVE_LIBDL) +check_library_exists_concat("${CMAKE_DL_LIBS}" dlopen HAVE_LIBDL) check_library_exists_concat("socket" connect HAVE_LIBSOCKET) check_library_exists("c" gethostbyname "" NOT_NEED_LIBNSL) @@ -284,38 +313,47 @@ if(WIN32) endif() # check SSL libraries -# TODO support GNUTLS, NSS, POLARSSL, CYASSL +# TODO support GnuTLS +if(CMAKE_USE_WINSSL) + message(FATAL_ERROR "The cmake option CMAKE_USE_WINSSL was renamed to CMAKE_USE_SCHANNEL.") +endif() if(APPLE) - option(CMAKE_USE_DARWINSSL "enable Apple OS native SSL/TLS" OFF) + option(CMAKE_USE_SECTRANSP "enable Apple OS native SSL/TLS" OFF) endif() if(WIN32) - option(CMAKE_USE_WINSSL "enable Windows native SSL/TLS" OFF) + option(CMAKE_USE_SCHANNEL "enable Windows native SSL/TLS" OFF) cmake_dependent_option(CURL_WINDOWS_SSPI "Use windows libraries to allow NTLM authentication without openssl" ON - CMAKE_USE_WINSSL OFF) + CMAKE_USE_SCHANNEL OFF) endif() option(CMAKE_USE_MBEDTLS "Enable mbedTLS for SSL/TLS" OFF) +option(CMAKE_USE_BEARSSL "Enable BearSSL for SSL/TLS" OFF) +option(CMAKE_USE_NSS "Enable NSS for SSL/TLS" OFF) +option(CMAKE_USE_WOLFSSL "enable wolfSSL for SSL/TLS" OFF) set(openssl_default ON) -if(WIN32 OR CMAKE_USE_DARWINSSL OR CMAKE_USE_WINSSL OR CMAKE_USE_MBEDTLS) +if(WIN32 OR CMAKE_USE_SECTRANSP OR CMAKE_USE_SCHANNEL OR CMAKE_USE_MBEDTLS OR CMAKE_USE_NSS OR CMAKE_USE_WOLFSSL) set(openssl_default OFF) endif() option(CMAKE_USE_OPENSSL "Use OpenSSL code. Experimental" ${openssl_default}) count_true(enabled_ssl_options_count - CMAKE_USE_WINSSL - CMAKE_USE_DARWINSSL + CMAKE_USE_SCHANNEL + CMAKE_USE_SECTRANSP CMAKE_USE_OPENSSL CMAKE_USE_MBEDTLS + CMAKE_USE_BEARSSL + CMAKE_USE_NSS + CMAKE_USE_WOLFSSL ) if(enabled_ssl_options_count GREATER "1") set(CURL_WITH_MULTI_SSL ON) endif() -if(CMAKE_USE_WINSSL) +if(CMAKE_USE_SCHANNEL) set(SSL_ENABLED ON) set(USE_SCHANNEL ON) # Windows native SSL/TLS support - set(USE_WINDOWS_SSPI ON) # CMAKE_USE_WINSSL implies CURL_WINDOWS_SSPI + set(USE_WINDOWS_SSPI ON) # CMAKE_USE_SCHANNEL implies CURL_WINDOWS_SSPI list(APPEND CURL_LIBS "crypt32") endif() if(CURL_WINDOWS_SSPI) @@ -324,6 +362,10 @@ if(CURL_WINDOWS_SSPI) endif() if(CMAKE_USE_DARWINSSL) + message(FATAL_ERROR "The cmake option CMAKE_USE_DARWINSSL was renamed to CMAKE_USE_SECTRANSP.") +endif() + +if(CMAKE_USE_SECTRANSP) find_library(COREFOUNDATION_FRAMEWORK "CoreFoundation") if(NOT COREFOUNDATION_FRAMEWORK) message(FATAL_ERROR "CoreFoundation framework not found") @@ -335,7 +377,7 @@ if(CMAKE_USE_DARWINSSL) endif() set(SSL_ENABLED ON) - set(USE_DARWINSSL ON) + set(USE_SECTRANSP ON) list(APPEND CURL_LIBS "${COREFOUNDATION_FRAMEWORK}" "${SECURITY_FRAMEWORK}") endif() @@ -343,8 +385,6 @@ if(CMAKE_USE_OPENSSL) find_package(OpenSSL REQUIRED) set(SSL_ENABLED ON) set(USE_OPENSSL ON) - set(HAVE_LIBCRYPTO ON) - set(HAVE_LIBSSL ON) # Depend on OpenSSL via imported targets if supported by the running # version of CMake. This allows our dependents to get our dependencies @@ -377,6 +417,35 @@ if(CMAKE_USE_MBEDTLS) include_directories(${MBEDTLS_INCLUDE_DIRS}) endif() +if(CMAKE_USE_BEARSSL) + find_package(BearSSL REQUIRED) + set(SSL_ENABLED ON) + set(USE_BEARSSL ON) + list(APPEND CURL_LIBS ${BEARSSL_LIBRARY}) + include_directories(${BEARSSL_INCLUDE_DIRS}) +endif() + +if(CMAKE_USE_WOLFSSL) + find_package(WolfSSL REQUIRED) + set(SSL_ENABLED ON) + set(USE_WOLFSSL ON) + list(APPEND CURL_LIBS ${WolfSSL_LIBRARIES}) + include_directories(${WolfSSL_INCLUDE_DIRS}) +endif() + +if(CMAKE_USE_NSS) + find_package(NSS REQUIRED) + include_directories(${NSS_INCLUDE_DIRS}) + list(APPEND CURL_LIBS ${NSS_LIBRARIES}) + set(SSL_ENABLED ON) + set(USE_NSS ON) + cmake_push_check_state() + set(CMAKE_REQUIRED_INCLUDES ${NSS_INCLUDE_DIRS}) + set(CMAKE_REQUIRED_LIBRARIES ${NSS_LIBRARIES}) + check_symbol_exists(PK11_CreateManagedGenericObject "pk11pub.h" HAVE_PK11_CREATEMANAGEDGENERICOBJECT) + cmake_pop_check_state() +endif() + option(USE_NGHTTP2 "Use Nghttp2 library" OFF) if(USE_NGHTTP2) find_package(NGHTTP2 REQUIRED) @@ -384,6 +453,60 @@ if(USE_NGHTTP2) list(APPEND CURL_LIBS ${NGHTTP2_LIBRARIES}) endif() +function(CheckQuicSupportInOpenSSL) + # Be sure that the OpenSSL library actually supports QUIC. + cmake_push_check_state() + set(CMAKE_REQUIRED_INCLUDES "${OPENSSL_INCLUDE_DIR}") + set(CMAKE_REQUIRED_LIBRARIES "${OPENSSL_LIBRARIES}") + check_symbol_exists(SSL_CTX_set_quic_method "openssl/ssl.h" HAVE_SSL_CTX_SET_QUIC_METHOD) + if(NOT HAVE_SSL_CTX_SET_QUIC_METHOD) + message(FATAL_ERROR "QUIC support is missing in OpenSSL/boringssl. Try setting -DOPENSSL_ROOT_DIR") + endif() + cmake_pop_check_state() +endfunction() + +option(USE_NGTCP2 "Use ngtcp2 and nghttp3 libraries for HTTP/3 support" OFF) +if(USE_NGTCP2) + if(USE_OPENSSL) + find_package(NGTCP2 REQUIRED OpenSSL) + CheckQuicSupportInOpenSSL() + elseif(USE_GNUTLS) + # TODO add GnuTLS support as vtls library. + find_package(NGTCP2 REQUIRED GnuTLS) + else() + message(FATAL_ERROR "ngtcp2 requires OpenSSL or GnuTLS") + endif() + set(USE_NGTCP2 ON) + include_directories(${NGTCP2_INCLUDE_DIRS}) + list(APPEND CURL_LIBS ${NGTCP2_LIBRARIES}) + + find_package(NGHTTP3 REQUIRED) + set(USE_NGHTTP3 ON) + include_directories(${NGHTTP3_INCLUDE_DIRS}) + list(APPEND CURL_LIBS ${NGHTTP3_LIBRARIES}) +endif() + +option(USE_QUICHE "Use quiche library for HTTP/3 support" OFF) +if(USE_QUICHE) + if(USE_NGTCP2) + message(FATAL_ERROR "Only one HTTP/3 backend can be selected!") + endif() + find_package(QUICHE REQUIRED) + CheckQuicSupportInOpenSSL() + set(USE_QUICHE ON) + include_directories(${QUICHE_INCLUDE_DIRS}) + list(APPEND CURL_LIBS ${QUICHE_LIBRARIES}) + cmake_push_check_state() + set(CMAKE_REQUIRED_INCLUDES "${QUICHE_INCLUDE_DIRS}") + set(CMAKE_REQUIRED_LIBRARIES "${QUICHE_LIBRARIES}") + check_symbol_exists(quiche_conn_set_qlog_fd "quiche.h" HAVE_QUICHE_CONN_SET_QLOG_FD) + cmake_pop_check_state() +endif() + +if(WIN32) + set(USE_WIN32_CRYPTO ON) +endif() + if(NOT CURL_DISABLE_LDAP) if(WIN32) option(USE_WIN32_LDAP "Use Windows LDAP implementation" ON) @@ -526,7 +649,7 @@ endif() option(CURL_BROTLI "Set to ON to enable building curl with brotli support." OFF) set(HAVE_BROTLI OFF) if(CURL_BROTLI) - find_package(BROTLI QUIET) + find_package(Brotli QUIET) if(BROTLI_FOUND) set(HAVE_BROTLI ON) list(APPEND CURL_LIBS ${BROTLI_LIBRARIES}) @@ -535,6 +658,22 @@ if(CURL_BROTLI) endif() endif() +option(CURL_ZSTD "Set to ON to enable building curl with zstd support." OFF) +set(HAVE_ZSTD OFF) +if(CURL_ZSTD) + find_package(Zstd REQUIRED) + cmake_push_check_state() + set(CMAKE_REQUIRED_INCLUDES ${Zstd_INCLUDE_DIRS}) + set(CMAKE_REQUIRED_LIBRARIES ${Zstd_LIBRARIES}) + check_symbol_exists(ZSTD_createDStream "zstd.h" HAVE_ZSTD_CREATEDSTREAM) + cmake_pop_check_state() + if(Zstd_FOUND AND HAVE_ZSTD_CREATEDSTREAM) + set(HAVE_ZSTD ON) + list(APPEND CURL_LIBS ${Zstd_LIBRARIES}) + include_directories(${Zstd_INCLUDE_DIRS}) + endif() +endif() + #libSSH2 option(CMAKE_USE_LIBSSH2 "Use libSSH2" ON) mark_as_advanced(CMAKE_USE_LIBSSH2) @@ -569,6 +708,20 @@ if(CMAKE_USE_LIBSSH2) endif() endif() +# libssh +option(CMAKE_USE_LIBSSH "Use libSSH" OFF) +mark_as_advanced(CMAKE_USE_LIBSSH) +if(NOT HAVE_LIBSSH2 AND CMAKE_USE_LIBSSH) + find_package(libssh CONFIG) + if(libssh_FOUND) + message(STATUS "Found libssh ${libssh_VERSION}") + # Use imported target for include and library paths. + list(APPEND CURL_LIBS ssh) + set(USE_LIBSSH ON) + set(HAVE_LIBSSH_LIBSSH_H 1) + endif() +endif() + option(CMAKE_USE_GSSAPI "Use GSSAPI implementation (right now only Heimdal is supported with CMake build)" OFF) mark_as_advanced(CMAKE_USE_GSSAPI) @@ -637,6 +790,9 @@ else() unset(USE_UNIX_SOCKETS CACHE) endif() +option(ENABLE_ALT_SVC "Enable alt-svc support" OFF) +set(USE_ALTSVC ${ENABLE_ALT_SVC}) + # # CA handling # @@ -664,7 +820,9 @@ elseif("${CURL_CA_PATH}" STREQUAL "none") unset(CURL_CA_PATH CACHE) elseif("${CURL_CA_PATH}" STREQUAL "auto") unset(CURL_CA_PATH CACHE) - set(CURL_CA_PATH_AUTODETECT TRUE) + if(NOT USE_NSS) + set(CURL_CA_PATH_AUTODETECT TRUE) + endif() else() set(CURL_CA_PATH_SET TRUE) endif() @@ -703,7 +861,7 @@ elseif(CURL_CA_PATH_AUTODETECT OR CURL_CA_BUNDLE_AUTODETECT) endif() if(CURL_CA_PATH_SET AND NOT USE_OPENSSL AND NOT USE_MBEDTLS) - message(FATAL_ERROR + message(STATUS "CA path only supported by OpenSSL, GnuTLS or mbed TLS. " "Set CURL_CA_PATH=none or enable one of those TLS backends.") endif() @@ -741,7 +899,6 @@ check_include_file_concat("arpa/inet.h" HAVE_ARPA_INET_H) check_include_file_concat("arpa/tftp.h" HAVE_ARPA_TFTP_H) check_include_file_concat("assert.h" HAVE_ASSERT_H) check_include_file_concat("crypto.h" HAVE_CRYPTO_H) -check_include_file_concat("des.h" HAVE_DES_H) check_include_file_concat("err.h" HAVE_ERR_H) check_include_file_concat("errno.h" HAVE_ERRNO_H) check_include_file_concat("fcntl.h" HAVE_FCNTL_H) @@ -856,6 +1013,7 @@ check_symbol_exists(strlcat "${CURL_INCLUDES}" HAVE_STRLCAT) check_symbol_exists(getpwuid "${CURL_INCLUDES}" HAVE_GETPWUID) check_symbol_exists(getpwuid_r "${CURL_INCLUDES}" HAVE_GETPWUID_R) check_symbol_exists(geteuid "${CURL_INCLUDES}" HAVE_GETEUID) +check_symbol_exists(usleep "${CURL_INCLUDES}" HAVE_USLEEP) check_symbol_exists(utime "${CURL_INCLUDES}" HAVE_UTIME) check_symbol_exists(gmtime_r "${CURL_INCLUDES}" HAVE_GMTIME_R) check_symbol_exists(localtime_r "${CURL_INCLUDES}" HAVE_LOCALTIME_R) @@ -881,6 +1039,9 @@ check_symbol_exists(freeifaddrs "${CURL_INCLUDES}" HAVE_FREEIFADDRS) check_symbol_exists(pipe "${CURL_INCLUDES}" HAVE_PIPE) check_symbol_exists(ftruncate "${CURL_INCLUDES}" HAVE_FTRUNCATE) check_symbol_exists(getprotobyname "${CURL_INCLUDES}" HAVE_GETPROTOBYNAME) +check_symbol_exists(getpeername "${CURL_INCLUDES}" HAVE_GETPEERNAME) +check_symbol_exists(getsockname "${CURL_INCLUDES}" HAVE_GETSOCKNAME) +check_symbol_exists(if_nametoindex "${CURL_INCLUDES}" HAVE_IF_NAMETOINDEX) check_symbol_exists(getrlimit "${CURL_INCLUDES}" HAVE_GETRLIMIT) check_symbol_exists(setlocale "${CURL_INCLUDES}" HAVE_SETLOCALE) check_symbol_exists(setmode "${CURL_INCLUDES}" HAVE_SETMODE) @@ -889,20 +1050,7 @@ check_symbol_exists(fcntl "${CURL_INCLUDES}" HAVE_FCNTL) check_symbol_exists(ioctl "${CURL_INCLUDES}" HAVE_IOCTL) check_symbol_exists(setsockopt "${CURL_INCLUDES}" HAVE_SETSOCKOPT) check_function_exists(mach_absolute_time HAVE_MACH_ABSOLUTE_TIME) - -# symbol exists in win32, but function does not. -if(WIN32) - if(ENABLE_INET_PTON) - check_function_exists(inet_pton HAVE_INET_PTON) - # _WIN32_WINNT_VISTA (0x0600) - add_definitions(-D_WIN32_WINNT=0x0600) - else() - # _WIN32_WINNT_WINXP (0x0501) - add_definitions(-D_WIN32_WINNT=0x0501) - endif() -else() - check_function_exists(inet_pton HAVE_INET_PTON) -endif() +check_symbol_exists(inet_pton "${CURL_INCLUDES}" HAVE_INET_PTON) check_symbol_exists(fsetxattr "${CURL_INCLUDES}" HAVE_FSETXATTR) if(HAVE_FSETXATTR) @@ -1126,6 +1274,23 @@ if(CURL_WERROR) endif() endif() +if(CURL_LTO) + if(CMAKE_VERSION VERSION_LESS 3.9) + message(FATAL_ERROR "Requested LTO but your cmake version ${CMAKE_VERSION} is to old. You need at least 3.9") + endif() + + cmake_policy(SET CMP0069 NEW) + + include(CheckIPOSupported) + check_ipo_supported(RESULT CURL_HAS_LTO OUTPUT CURL_LTO_ERROR LANGUAGES C) + if(CURL_HAS_LTO) + message(STATUS "LTO supported and enabled") + else() + message(FATAL_ERROR "LTO was requested - but compiler doesn't support it\n${CURL_LTO_ERROR}") + endif() +endif() + + # Ugly (but functional) way to include "Makefile.inc" by transforming it (= regenerate it). function(transform_makefile_inc INPUT_FILE OUTPUT_FILE) file(READ ${INPUT_FILE} MAKEFILE_INC_TEXT) @@ -1139,7 +1304,7 @@ function(transform_makefile_inc INPUT_FILE OUTPUT_FILE) string(REGEX REPLACE "\\$\\(([a-zA-Z_][a-zA-Z0-9_]*)\\)" "\${\\1}" MAKEFILE_INC_TEXT ${MAKEFILE_INC_TEXT}) # Replace $() with ${} string(REGEX REPLACE "@([a-zA-Z_][a-zA-Z0-9_]*)@" "\${\\1}" MAKEFILE_INC_TEXT ${MAKEFILE_INC_TEXT}) # Replace @@ with ${}, even if that may not be read by CMake scripts. file(WRITE ${OUTPUT_FILE} ${MAKEFILE_INC_TEXT}) - + set_property(DIRECTORY APPEND PROPERTY CMAKE_CONFIGURE_DEPENDS "${INPUT_FILE}") endfunction() include(GNUInstallDirs) @@ -1160,19 +1325,29 @@ if(BUILD_CURL_EXE) add_subdirectory(src) endif() -include(CTest) -if(BUILD_TESTING) +option(BUILD_TESTING "Build tests" "${PERL_FOUND}") +if(NOT PERL_FOUND) + message(STATUS "Perl not found, testing disabled.") +elseif(BUILD_TESTING) add_subdirectory(tests) endif() +# NTLM support requires crypto function adaptions from various SSL libs +# TODO alternative SSL libs tests for SSP1, GNUTLS, NSS +if(NOT CURL_DISABLE_CRYPTO_AUTH AND (USE_OPENSSL OR USE_DARWINSSL OR USE_MBEDTLS OR USE_WIN32_CRYPTO)) + set(use_ntlm ON) +else() + set(use_ntlm OFF) +endif() + # Helper to populate a list (_items) with a label when conditions (the remaining # args) are satisfied -function(_add_if label) - # TODO need to disable policy CMP0054 (CMake 3.1) to allow this indirection +macro(_add_if label) + # needs to be a macro to allow this indirection if(${ARGN}) - set(_items ${_items} "${label}" PARENT_SCOPE) + set(_items ${_items} "${label}") endif() -endfunction() +endmacro() # Clear list and try to detect available features set(_items) @@ -1180,13 +1355,16 @@ _add_if("SSL" SSL_ENABLED) _add_if("IPv6" ENABLE_IPV6) _add_if("unix-sockets" USE_UNIX_SOCKETS) _add_if("libz" HAVE_LIBZ) +_add_if("brotli" HAVE_BROTLI) +_add_if("zstd" HAVE_ZSTD) _add_if("AsynchDNS" USE_ARES OR USE_THREADS_POSIX OR USE_THREADS_WIN32) _add_if("IDN" HAVE_LIBIDN2) _add_if("Largefile" (CURL_SIZEOF_CURL_OFF_T GREATER 4) AND ((SIZEOF_OFF_T GREATER 4) OR USE_WIN32_LARGE_FILES)) -# TODO SSP1 (WinSSL) check is missing +# TODO SSP1 (Schannel) check is missing _add_if("SSPI" USE_WINDOWS_SSPI) _add_if("GSS-API" HAVE_GSSAPI) +_add_if("alt-svc" ENABLE_ALT_SVC) # TODO SSP1 missing for SPNEGO _add_if("SPNEGO" NOT CURL_DISABLE_CRYPTO_AUTH AND (HAVE_GSSAPI OR USE_WINDOWS_SSPI)) @@ -1194,15 +1372,16 @@ _add_if("Kerberos" NOT CURL_DISABLE_CRYPTO_AUTH AND (HAVE_GSSAPI OR USE_WINDOWS_SSPI)) # NTLM support requires crypto function adaptions from various SSL libs # TODO alternative SSL libs tests for SSP1, GNUTLS, NSS -if(NOT CURL_DISABLE_CRYPTO_AUTH AND (USE_OPENSSL OR USE_WINDOWS_SSPI OR USE_DARWINSSL OR USE_MBEDTLS)) - _add_if("NTLM" 1) - # TODO missing option (autoconf: --enable-ntlm-wb) - _add_if("NTLM_WB" NOT CURL_DISABLE_HTTP AND NTLM_WB_ENABLED) -endif() +_add_if("NTLM" use_ntlm OR USE_WINDOWS_SSPI) +# TODO missing option (autoconf: --enable-ntlm-wb) +_add_if("NTLM_WB" use_ntlm AND NOT CURL_DISABLE_HTTP AND NTLM_WB_ENABLED) # TODO missing option (--enable-tls-srp), depends on GNUTLS_SRP/OPENSSL_SRP _add_if("TLS-SRP" USE_TLS_SRP) # TODO option --with-nghttp2 tests for nghttp2 lib and nghttp2/nghttp2.h header _add_if("HTTP2" USE_NGHTTP2) +_add_if("HTTP3" USE_NGTCP2 OR USE_QUICHE) +_add_if("MultiSSL" CURL_WITH_MULTI_SSL) +_add_if("HTTPS-proxy" SSL_ENABLED AND (USE_OPENSSL OR USE_GNUTLS OR USE_NSS)) string(REPLACE ";" " " SUPPORT_FEATURES "${_items}") message(STATUS "Enabled features: ${SUPPORT_FEATURES}") @@ -1227,12 +1406,15 @@ _add_if("POP3" NOT CURL_DISABLE_POP3) _add_if("POP3S" NOT CURL_DISABLE_POP3 AND SSL_ENABLED) _add_if("IMAP" NOT CURL_DISABLE_IMAP) _add_if("IMAPS" NOT CURL_DISABLE_IMAP AND SSL_ENABLED) +_add_if("SMB" NOT CURL_DISABLE_SMB AND use_ntlm) +_add_if("SMBS" NOT CURL_DISABLE_SMB AND SSL_ENABLED AND use_ntlm) _add_if("SMTP" NOT CURL_DISABLE_SMTP) _add_if("SMTPS" NOT CURL_DISABLE_SMTP AND SSL_ENABLED) -_add_if("SCP" USE_LIBSSH2) -_add_if("SFTP" USE_LIBSSH2) +_add_if("SCP" USE_LIBSSH2 OR USE_LIBSSH) +_add_if("SFTP" USE_LIBSSH2 OR USE_LIBSSH) _add_if("RTSP" NOT CURL_DISABLE_RTSP) _add_if("RTMP" USE_LIBRTMP) +_add_if("MQTT" NOT CURL_DISABLE_MQTT) if(_items) list(SORT _items) endif() @@ -1241,10 +1423,13 @@ message(STATUS "Enabled protocols: ${SUPPORT_PROTOCOLS}") # Clear list and collect SSL backends set(_items) -_add_if("WinSSL" SSL_ENABLED AND USE_WINDOWS_SSPI) -_add_if("OpenSSL" SSL_ENABLED AND USE_OPENSSL) -_add_if("DarwinSSL" SSL_ENABLED AND USE_DARWINSSL) -_add_if("mbedTLS" SSL_ENABLED AND USE_MBEDTLS) +_add_if("Schannel" SSL_ENABLED AND USE_WINDOWS_SSPI) +_add_if("OpenSSL" SSL_ENABLED AND USE_OPENSSL) +_add_if("Secure Transport" SSL_ENABLED AND USE_SECTRANSP) +_add_if("mbedTLS" SSL_ENABLED AND USE_MBEDTLS) +_add_if("BearSSL" SSL_ENABLED AND USE_BEARSSL) +_add_if("NSS" SSL_ENABLED AND USE_NSS) +_add_if("wolfSSL" SSL_ENABLED AND USE_WOLFSSL) if(_items) list(SORT _items) endif() @@ -1258,25 +1443,43 @@ set(CONFIGURE_OPTIONS "") # TODO when to set "-DCURL_STATICLIB" for CPPFLAG_CURL_STATICLIB? set(CPPFLAG_CURL_STATICLIB "") set(CURLVERSION "${CURL_VERSION}") -if(BUILD_SHARED_LIBS) - set(ENABLE_SHARED "yes") - set(ENABLE_STATIC "no") -else() - set(ENABLE_SHARED "no") - set(ENABLE_STATIC "yes") -endif() set(exec_prefix "\${prefix}") set(includedir "\${prefix}/include") set(LDFLAGS "${CMAKE_SHARED_LINKER_FLAGS}") set(LIBCURL_LIBS "") set(libdir "${CMAKE_INSTALL_PREFIX}/lib") foreach(_lib ${CMAKE_C_IMPLICIT_LINK_LIBRARIES} ${CURL_LIBS}) + if(TARGET "${_lib}") + set(_libname "${_lib}") + get_target_property(_libtype "${_libname}" TYPE) + if(_libtype STREQUAL INTERFACE_LIBRARY) + # Interface libraries can occur when an external project embeds curl and + # defined targets such as ZLIB::ZLIB by themselves. Ignore these as + # reading the LOCATION property will error out. Assume the user won't need + # this information in the .pc file. + continue() + endif() + get_target_property(_lib "${_libname}" LOCATION) + if(NOT _lib) + message(WARNING "Bad lib in library list: ${_libname}") + continue() + endif() + endif() if(_lib MATCHES ".*/.*" OR _lib MATCHES "^-") set(LIBCURL_LIBS "${LIBCURL_LIBS} ${_lib}") else() set(LIBCURL_LIBS "${LIBCURL_LIBS} -l${_lib}") endif() endforeach() +if(BUILD_SHARED_LIBS) + set(ENABLE_SHARED "yes") + set(ENABLE_STATIC "no") + set(LIBCURL_NO_SHARED "") +else() + set(ENABLE_SHARED "no") + set(ENABLE_STATIC "yes") + set(LIBCURL_NO_SHARED "${LIBCURL_LIBS}") +endif() # "a" (Linux) or "lib" (Windows) string(REPLACE "." "" libext "${CMAKE_STATIC_LIBRARY_SUFFIX}") set(prefix "${CMAKE_INSTALL_PREFIX}") diff --git a/COPYING b/COPYING index 3528bd75663a54..9d9e4af8d8bcfc 100644 --- a/COPYING +++ b/COPYING @@ -1,6 +1,6 @@ COPYRIGHT AND PERMISSION NOTICE -Copyright (c) 1996 - 2019, Daniel Stenberg, , and many +Copyright (c) 1996 - 2020, Daniel Stenberg, , and many contributors, see the THANKS file. All rights reserved. diff --git a/MacOSX-Framework b/MacOSX-Framework index e6badcde5424bd..73d9ed8b4a08a6 100755 --- a/MacOSX-Framework +++ b/MacOSX-Framework @@ -1,4 +1,25 @@ #!/bin/bash +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 1998 - 2020, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.haxx.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +########################################################################### # This script performs all of the steps needed to build a # universal binary libcurl.framework for Mac OS X 10.4 or greater. # @@ -82,7 +103,7 @@ MINVER64='-mmacosx-version-min='$MACVER64 if test ! -z $SDK32; then echo "----Configuring libcurl for 32 bit universal framework..." make clean - ./configure --disable-dependency-tracking --disable-static --with-gssapi --with-darwinssl \ + ./configure --disable-dependency-tracking --disable-static --with-gssapi --with-secure-transport \ CFLAGS="-Os -isysroot $SDK32_DIR $ARCHES32" \ LDFLAGS="-Wl,-syslibroot,$SDK32_DIR $ARCHES32 -Wl,-headerpad_max_install_names" \ CC=$CC @@ -111,7 +132,7 @@ if test ! -z $SDK32; then popd make clean echo "----Configuring libcurl for 64 bit universal framework..." - ./configure --disable-dependency-tracking --disable-static --with-gssapi --with-darwinssl \ + ./configure --disable-dependency-tracking --disable-static --with-gssapi --with-secure-transport \ CFLAGS="-Os -isysroot $SDK64_DIR $ARCHES64" \ LDFLAGS="-Wl,-syslibroot,$SDK64_DIR $ARCHES64 -Wl,-headerpad_max_install_names" \ CC=$CC diff --git a/Makefile.am b/Makefile.am index ac5eca3bac1e89..1a6d7f997f5689 100644 --- a/Makefile.am +++ b/Makefile.am @@ -5,7 +5,7 @@ # | (__| |_| | _ <| |___ # \___|\___/|_| \_\_____| # -# Copyright (C) 1998 - 2018, Daniel Stenberg, , et al. +# Copyright (C) 1998 - 2020, Daniel Stenberg, , et al. # # This software is licensed as described in the file COPYING, which # you should have received as part of this distribution. The terms @@ -24,14 +24,30 @@ AUTOMAKE_OPTIONS = foreign ACLOCAL_AMFLAGS = -I m4 -CMAKE_DIST = CMakeLists.txt CMake/CMakeConfigurableFile.in \ - CMake/CurlTests.c CMake/FindGSS.cmake CMake/OtherTests.cmake \ - CMake/Platforms/WindowsCache.cmake CMake/Utilities.cmake \ - CMake/Macros.cmake \ - CMake/CurlSymbolHiding.cmake CMake/FindCARES.cmake \ - CMake/FindLibSSH2.cmake CMake/FindNGHTTP2.cmake \ - CMake/FindMbedTLS.cmake CMake/cmake_uninstall.cmake.in \ - CMake/curl-config.cmake.in +CMAKE_DIST = \ + CMake/cmake_uninstall.cmake.in \ + CMake/CMakeConfigurableFile.in \ + CMake/curl-config.cmake.in \ + CMake/CurlSymbolHiding.cmake \ + CMake/CurlTests.c \ + CMake/FindBearSSL.cmake \ + CMake/FindBrotli.cmake \ + CMake/FindCARES.cmake \ + CMake/FindGSS.cmake \ + CMake/FindLibSSH2.cmake \ + CMake/FindMbedTLS.cmake \ + CMake/FindNGHTTP2.cmake \ + CMake/FindNGHTTP3.cmake \ + CMake/FindNGTCP2.cmake \ + CMake/FindNSS.cmake \ + CMake/FindQUICHE.cmake \ + CMake/FindWolfSSL.cmake \ + CMake/FindZstd.cmake \ + CMake/Macros.cmake \ + CMake/OtherTests.cmake \ + CMake/Platforms/WindowsCache.cmake \ + CMake/Utilities.cmake \ + CMakeLists.txt VC6_LIBTMPL = projects/Windows/VC6/lib/libcurl.tmpl VC6_LIBDSP = projects/Windows/VC6/lib/libcurl.dsp.dist @@ -151,12 +167,23 @@ VC_DIST = projects/README \ projects/wolfssl_options.h \ projects/wolfssl_override.props -WINBUILD_DIST = winbuild/BUILD.WINDOWS.txt winbuild/gen_resp_file.bat \ +WINBUILD_DIST = winbuild/README.md winbuild/gen_resp_file.bat \ winbuild/MakefileBuild.vc winbuild/Makefile.vc +PLAN9_DIST = plan9/include/mkfile \ + plan9/include/mkfile \ + plan9/mkfile.proto \ + plan9/mkfile \ + plan9/README \ + plan9/lib/mkfile.inc \ + plan9/lib/mkfile \ + plan9/src/mkfile.inc \ + plan9/src/mkfile + EXTRA_DIST = CHANGES COPYING maketgz Makefile.dist curl-config.in \ RELEASE-NOTES buildconf libcurl.pc.in MacOSX-Framework \ - scripts/updatemanpages.pl $(CMAKE_DIST) $(VC_DIST) $(WINBUILD_DIST) \ + scripts/updatemanpages.pl $(CMAKE_DIST) \ + $(VC_DIST) $(WINBUILD_DIST) $(PLAN9_DIST) \ lib/libcurl.vers.in buildconf.bat scripts/coverage.sh scripts/completion.pl CLEANFILES = $(VC6_LIBDSP) $(VC6_SRCDSP) $(VC7_LIBVCPROJ) $(VC7_SRCVCPROJ) \ @@ -278,15 +305,15 @@ cygwinbin: # We extend the standard install with a custom hook: install-data-hook: - cd include && $(MAKE) install - cd docs && $(MAKE) install - cd docs/libcurl && $(MAKE) install + (cd include && $(MAKE) install) + (cd docs && $(MAKE) install) + (cd docs/libcurl && $(MAKE) install) # We extend the standard uninstall with a custom hook: uninstall-hook: - cd include && $(MAKE) uninstall - cd docs && $(MAKE) uninstall - cd docs/libcurl && $(MAKE) uninstall + (cd include && $(MAKE) uninstall) + (cd docs && $(MAKE) uninstall) + (cd docs/libcurl && $(MAKE) uninstall) ca-bundle: lib/mk-ca-bundle.pl @echo "generating a fresh ca-bundle.crt" @@ -297,11 +324,11 @@ ca-firefox: lib/firefox-db2pem.sh ./lib/firefox-db2pem.sh lib/ca-bundle.crt checksrc: - cd lib && $(MAKE) checksrc - cd src && $(MAKE) checksrc - cd tests && $(MAKE) checksrc - cd include/curl && $(MAKE) checksrc - cd docs/examples && $(MAKE) checksrc + (cd lib && $(MAKE) checksrc) + (cd src && $(MAKE) checksrc) + (cd tests && $(MAKE) checksrc) + (cd include/curl && $(MAKE) checksrc) + (cd docs/examples && $(MAKE) checksrc) .PHONY: vc-ide @@ -317,6 +344,10 @@ vc-ide: $(VC6_LIBDSP_DEPS) $(VC6_SRCDSP_DEPS) $(VC7_LIBVCPROJ_DEPS) \ win32_lib_rc='$(LIB_RCFILES)'; \ win32_lib_vauth_srcs='$(LIB_VAUTH_CFILES)'; \ win32_lib_vauth_hdrs='$(LIB_VAUTH_HFILES)'; \ + win32_lib_vquic_srcs='$(LIB_VQUIC_CFILES)'; \ + win32_lib_vquic_hdrs='$(LIB_VQUIC_HFILES)'; \ + win32_lib_vssh_srcs='$(LIB_VSSH_CFILES)'; \ + win32_lib_vssh_hdrs='$(LIB_VSSH_HFILES)'; \ win32_lib_vtls_srcs='$(LIB_VTLS_CFILES)'; \ win32_lib_vtls_hdrs='$(LIB_VTLS_HFILES)'; \ win32_src_srcs='$(CURL_CFILES)'; \ @@ -329,6 +360,10 @@ vc-ide: $(VC6_LIBDSP_DEPS) $(VC6_SRCDSP_DEPS) $(VC7_LIBVCPROJ_DEPS) \ sorted_lib_hdrs=`for file in $$win32_lib_hdrs; do echo $$file; done | sort`; \ sorted_lib_vauth_srcs=`for file in $$win32_lib_vauth_srcs; do echo $$file; done | sort`; \ sorted_lib_vauth_hdrs=`for file in $$win32_lib_vauth_hdrs; do echo $$file; done | sort`; \ + sorted_lib_vquic_srcs=`for file in $$win32_lib_vquic_srcs; do echo $$file; done | sort`; \ + sorted_lib_vquic_hdrs=`for file in $$win32_lib_vquic_hdrs; do echo $$file; done | sort`; \ + sorted_lib_vssh_srcs=`for file in $$win32_lib_vssh_srcs; do echo $$file; done | sort`; \ + sorted_lib_vssh_hdrs=`for file in $$win32_lib_vssh_hdrs; do echo $$file; done | sort`; \ sorted_lib_vtls_srcs=`for file in $$win32_lib_vtls_srcs; do echo $$file; done | sort`; \ sorted_lib_vtls_hdrs=`for file in $$win32_lib_vtls_hdrs; do echo $$file; done | sort`; \ sorted_src_srcs=`for file in $$win32_src_srcs; do echo $$file; done | sort`; \ @@ -340,10 +375,15 @@ vc-ide: $(VC6_LIBDSP_DEPS) $(VC6_SRCDSP_DEPS) $(VC7_LIBVCPROJ_DEPS) \ function gen_element(type, dir, file)\ {\ sub(/vauth\//, "", file);\ + sub(/vquic\//, "", file);\ + sub(/vssh\//, "", file);\ sub(/vtls\//, "", file);\ \ spaces=" ";\ - if(dir == "lib\\vauth" || dir == "lib\\vtls")\ + if(dir == "lib\\vauth" ||\ + dir == "lib\\vquic" ||\ + dir == "lib\\vssh" ||\ + dir == "lib\\vtls")\ tabs=" ";\ else\ tabs=" ";\ @@ -405,6 +445,22 @@ function gen_element(type, dir, file)\ split(lib_vauth_hdrs, arr);\ for(val in arr) gen_element(proj_type, "lib\\vauth", arr[val]);\ }\ + else if($$0 == "CURL_LIB_VQUIC_C_FILES") {\ + split(lib_vquic_srcs, arr);\ + for(val in arr) gen_element(proj_type, "lib\\vquic", arr[val]);\ + }\ + else if($$0 == "CURL_LIB_VQUIC_H_FILES") {\ + split(lib_vquic_hdrs, arr);\ + for(val in arr) gen_element(proj_type, "lib\\vquic", arr[val]);\ + }\ + else if($$0 == "CURL_LIB_VSSH_C_FILES") {\ + split(lib_vssh_srcs, arr);\ + for(val in arr) gen_element(proj_type, "lib\\vssh", arr[val]);\ + }\ + else if($$0 == "CURL_LIB_VSSH_H_FILES") {\ + split(lib_vssh_hdrs, arr);\ + for(val in arr) gen_element(proj_type, "lib\\vssh", arr[val]);\ + }\ else if($$0 == "CURL_LIB_VTLS_C_FILES") {\ split(lib_vtls_srcs, arr);\ for(val in arr) gen_element(proj_type, "lib\\vtls", arr[val]);\ @@ -450,6 +506,10 @@ function gen_element(type, dir, file)\ -v lib_rc="$$win32_lib_rc" \ -v lib_vauth_srcs="$$sorted_lib_vauth_srcs" \ -v lib_vauth_hdrs="$$sorted_lib_vauth_hdrs" \ + -v lib_vquic_srcs="$$sorted_lib_vquic_srcs" \ + -v lib_vquic_hdrs="$$sorted_lib_vquic_hdrs" \ + -v lib_vssh_srcs="$$sorted_lib_vssh_srcs" \ + -v lib_vssh_hdrs="$$sorted_lib_vssh_hdrs" \ -v lib_vtls_srcs="$$sorted_lib_vtls_srcs" \ -v lib_vtls_hdrs="$$sorted_lib_vtls_hdrs" \ "$$awk_code" $(srcdir)/$(VC6_LIBTMPL) > $(VC6_LIBDSP) || { exit 1; }; \ @@ -470,6 +530,10 @@ function gen_element(type, dir, file)\ -v lib_rc="$$win32_lib_rc" \ -v lib_vauth_srcs="$$sorted_lib_vauth_srcs" \ -v lib_vauth_hdrs="$$sorted_lib_vauth_hdrs" \ + -v lib_vquic_srcs="$$sorted_lib_vquic_srcs" \ + -v lib_vquic_hdrs="$$sorted_lib_vquic_hdrs" \ + -v lib_vssh_srcs="$$sorted_lib_vssh_srcs" \ + -v lib_vssh_hdrs="$$sorted_lib_vssh_hdrs" \ -v lib_vtls_srcs="$$sorted_lib_vtls_srcs" \ -v lib_vtls_hdrs="$$sorted_lib_vtls_hdrs" \ "$$awk_code" $(srcdir)/$(VC7_LIBTMPL) > $(VC7_LIBVCPROJ) || { exit 1; }; \ @@ -490,6 +554,10 @@ function gen_element(type, dir, file)\ -v lib_rc="$$win32_lib_rc" \ -v lib_vauth_srcs="$$sorted_lib_vauth_srcs" \ -v lib_vauth_hdrs="$$sorted_lib_vauth_hdrs" \ + -v lib_vquic_srcs="$$sorted_lib_vquic_srcs" \ + -v lib_vquic_hdrs="$$sorted_lib_vquic_hdrs" \ + -v lib_vssh_srcs="$$sorted_lib_vssh_srcs" \ + -v lib_vssh_hdrs="$$sorted_lib_vssh_hdrs" \ -v lib_vtls_srcs="$$sorted_lib_vtls_srcs" \ -v lib_vtls_hdrs="$$sorted_lib_vtls_hdrs" \ "$$awk_code" $(srcdir)/$(VC71_LIBTMPL) > $(VC71_LIBVCPROJ) || { exit 1; }; \ @@ -510,6 +578,10 @@ function gen_element(type, dir, file)\ -v lib_rc="$$win32_lib_rc" \ -v lib_vauth_srcs="$$sorted_lib_vauth_srcs" \ -v lib_vauth_hdrs="$$sorted_lib_vauth_hdrs" \ + -v lib_vquic_srcs="$$sorted_lib_vquic_srcs" \ + -v lib_vquic_hdrs="$$sorted_lib_vquic_hdrs" \ + -v lib_vssh_srcs="$$sorted_lib_vssh_srcs" \ + -v lib_vssh_hdrs="$$sorted_lib_vssh_hdrs" \ -v lib_vtls_srcs="$$sorted_lib_vtls_srcs" \ -v lib_vtls_hdrs="$$sorted_lib_vtls_hdrs" \ "$$awk_code" $(srcdir)/$(VC8_LIBTMPL) > $(VC8_LIBVCPROJ) || { exit 1; }; \ @@ -530,6 +602,10 @@ function gen_element(type, dir, file)\ -v lib_rc="$$win32_lib_rc" \ -v lib_vauth_srcs="$$sorted_lib_vauth_srcs" \ -v lib_vauth_hdrs="$$sorted_lib_vauth_hdrs" \ + -v lib_vquic_srcs="$$sorted_lib_vquic_srcs" \ + -v lib_vquic_hdrs="$$sorted_lib_vquic_hdrs" \ + -v lib_vssh_srcs="$$sorted_lib_vssh_srcs" \ + -v lib_vssh_hdrs="$$sorted_lib_vssh_hdrs" \ -v lib_vtls_srcs="$$sorted_lib_vtls_srcs" \ -v lib_vtls_hdrs="$$sorted_lib_vtls_hdrs" \ "$$awk_code" $(srcdir)/$(VC9_LIBTMPL) > $(VC9_LIBVCPROJ) || { exit 1; }; \ @@ -550,6 +626,10 @@ function gen_element(type, dir, file)\ -v lib_rc="$$win32_lib_rc" \ -v lib_vauth_srcs="$$sorted_lib_vauth_srcs" \ -v lib_vauth_hdrs="$$sorted_lib_vauth_hdrs" \ + -v lib_vquic_srcs="$$sorted_lib_vquic_srcs" \ + -v lib_vquic_hdrs="$$sorted_lib_vquic_hdrs" \ + -v lib_vssh_srcs="$$sorted_lib_vssh_srcs" \ + -v lib_vssh_hdrs="$$sorted_lib_vssh_hdrs" \ -v lib_vtls_srcs="$$sorted_lib_vtls_srcs" \ -v lib_vtls_hdrs="$$sorted_lib_vtls_hdrs" \ "$$awk_code" $(srcdir)/$(VC10_LIBTMPL) > $(VC10_LIBVCXPROJ) || { exit 1; }; \ @@ -570,6 +650,10 @@ function gen_element(type, dir, file)\ -v lib_rc="$$win32_lib_rc" \ -v lib_vauth_srcs="$$sorted_lib_vauth_srcs" \ -v lib_vauth_hdrs="$$sorted_lib_vauth_hdrs" \ + -v lib_vquic_srcs="$$sorted_lib_vquic_srcs" \ + -v lib_vquic_hdrs="$$sorted_lib_vquic_hdrs" \ + -v lib_vssh_srcs="$$sorted_lib_vssh_srcs" \ + -v lib_vssh_hdrs="$$sorted_lib_vssh_hdrs" \ -v lib_vtls_srcs="$$sorted_lib_vtls_srcs" \ -v lib_vtls_hdrs="$$sorted_lib_vtls_hdrs" \ "$$awk_code" $(srcdir)/$(VC11_LIBTMPL) > $(VC11_LIBVCXPROJ) || { exit 1; }; \ @@ -590,6 +674,10 @@ function gen_element(type, dir, file)\ -v lib_rc="$$win32_lib_rc" \ -v lib_vauth_srcs="$$sorted_lib_vauth_srcs" \ -v lib_vauth_hdrs="$$sorted_lib_vauth_hdrs" \ + -v lib_vquic_srcs="$$sorted_lib_vquic_srcs" \ + -v lib_vquic_hdrs="$$sorted_lib_vquic_hdrs" \ + -v lib_vssh_srcs="$$sorted_lib_vssh_srcs" \ + -v lib_vssh_hdrs="$$sorted_lib_vssh_hdrs" \ -v lib_vtls_srcs="$$sorted_lib_vtls_srcs" \ -v lib_vtls_hdrs="$$sorted_lib_vtls_hdrs" \ "$$awk_code" $(srcdir)/$(VC12_LIBTMPL) > $(VC12_LIBVCXPROJ) || { exit 1; }; \ @@ -610,6 +698,10 @@ function gen_element(type, dir, file)\ -v lib_rc="$$win32_lib_rc" \ -v lib_vauth_srcs="$$sorted_lib_vauth_srcs" \ -v lib_vauth_hdrs="$$sorted_lib_vauth_hdrs" \ + -v lib_vquic_srcs="$$sorted_lib_vquic_srcs" \ + -v lib_vquic_hdrs="$$sorted_lib_vquic_hdrs" \ + -v lib_vssh_srcs="$$sorted_lib_vssh_srcs" \ + -v lib_vssh_hdrs="$$sorted_lib_vssh_hdrs" \ -v lib_vtls_srcs="$$sorted_lib_vtls_srcs" \ -v lib_vtls_hdrs="$$sorted_lib_vtls_hdrs" \ "$$awk_code" $(srcdir)/$(VC14_LIBTMPL) > $(VC14_LIBVCXPROJ) || { exit 1; }; \ @@ -630,6 +722,10 @@ function gen_element(type, dir, file)\ -v lib_rc="$$win32_lib_rc" \ -v lib_vauth_srcs="$$sorted_lib_vauth_srcs" \ -v lib_vauth_hdrs="$$sorted_lib_vauth_hdrs" \ + -v lib_vquic_srcs="$$sorted_lib_vquic_srcs" \ + -v lib_vquic_hdrs="$$sorted_lib_vquic_hdrs" \ + -v lib_vssh_srcs="$$sorted_lib_vssh_srcs" \ + -v lib_vssh_hdrs="$$sorted_lib_vssh_hdrs" \ -v lib_vtls_srcs="$$sorted_lib_vtls_srcs" \ -v lib_vtls_hdrs="$$sorted_lib_vtls_hdrs" \ "$$awk_code" $(srcdir)/$(VC15_LIBTMPL) > $(VC15_LIBVCXPROJ) || { exit 1; }; \ diff --git a/Makefile.dist b/Makefile.dist index a6316ab5febef1..8e084604ad22d8 100644 --- a/Makefile.dist +++ b/Makefile.dist @@ -5,7 +5,7 @@ # | (__| |_| | _ <| |___ # \___|\___/|_| \_\_____| # -# Copyright (C) 1998 - 2018, Daniel Stenberg, , et al. +# Copyright (C) 1998 - 2020, Daniel Stenberg, , et al. # # This software is licensed as described in the file COPYING, which # you should have received as part of this distribution. The terms @@ -28,18 +28,6 @@ ssl: ./configure --with-ssl make -watcom: .SYMBOLIC - cd lib && $(MAKE) -u -f Makefile.Watcom - cd src && $(MAKE) -u -f Makefile.Watcom - -watcom-clean: .SYMBOLIC - cd lib && $(MAKE) -u -f Makefile.Watcom clean - cd src && $(MAKE) -u -f Makefile.Watcom clean - -watcom-vclean: .SYMBOLIC - cd lib && $(MAKE) -u -f Makefile.Watcom vclean - cd src && $(MAKE) -u -f Makefile.Watcom vclean - mingw32: $(MAKE) -C lib -f Makefile.m32 $(MAKE) -C src -f Makefile.m32 diff --git a/README b/README index f0b3b93932f608..f337f351114e9f 100644 --- a/README +++ b/README @@ -17,9 +17,9 @@ README You find answers to the most frequent questions we get in the FAQ document. - Study the COPYING file for distribution terms and similar. If you distribute - curl binaries or other binaries that involve libcurl, you might enjoy the - LICENSE-MIXING document. + Study the COPYING file for distribution terms. + + Those documents and more can be found in the docs/ directory. CONTACT @@ -28,9 +28,9 @@ CONTACT All contributors to the project are listed in the THANKS document. -WEB SITE +WEBSITE - Visit the curl web site for the latest news and downloads: + Visit the curl website for the latest news and downloads: https://curl.haxx.se/ @@ -42,6 +42,12 @@ GIT (you'll get a directory named curl created, filled with the source code) +SECURITY PROBLEMS + + Report suspected security problems via our HackerOne page and not in public! + + https://hackerone.com/curl + NOTICE Curl contains pieces of source code that is Copyright (c) 1998, 1999 diff --git a/README.md b/README.md index 70764357fbc6d6..7642afd6806cc9 100644 --- a/README.md +++ b/README.md @@ -4,12 +4,13 @@ [![Coverity passed](https://scan.coverity.com/projects/curl/badge.svg)](https://scan.coverity.com/projects/curl) [![Travis-CI Build Status](https://travis-ci.org/curl/curl.svg?branch=master)](https://travis-ci.org/curl/curl) [![AppVeyor Build Status](https://ci.appveyor.com/api/projects/status/l1vv31029huhf4g4?svg=true)](https://ci.appveyor.com/project/curlorg/curl) +[![Azure DevOps Build Status](https://dev.azure.com/daniel0244/curl/_apis/build/status/curl.curl?branchName=master)](https://dev.azure.com/daniel0244/curl/_build/latest?definitionId=1&branchName=master) [![Cirrus Build Status](https://api.cirrus-ci.com/github/curl/curl.svg?branch=master)](https://cirrus-ci.com/github/curl/curl) -[![Coverage Status](https://coveralls.io/repos/github/curl/curl/badge.svg)](https://coveralls.io/github/curl/curl) [![Backers on Open Collective](https://opencollective.com/curl/backers/badge.svg)](#backers) [![Sponsors on Open Collective](https://opencollective.com/curl/sponsors/badge.svg)](#sponsors) [![Language Grade: C/C++](https://img.shields.io/lgtm/grade/cpp/g/curl/curl.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/curl/curl/context:cpp) [![Codacy Badge](https://api.codacy.com/project/badge/Grade/d11483a0cc5c4ebd9da4ff9f7cd56690)](https://www.codacy.com/app/curl/curl?utm_source=github.com&utm_medium=referral&utm_content=curl/curl&utm_campaign=Badge_Grade) +[![Fuzzing Status](https://oss-fuzz-build-logs.storage.googleapis.com/badges/curl.svg)](https://bugs.chromium.org/p/oss-fuzz/issues/list?sort=-opened&can=1&q=proj:curl) Curl is a command-line tool for transferring data specified with URL syntax. Find out how to use curl by reading [the curl.1 man @@ -21,13 +22,11 @@ libcurl is the library curl is using to do its job. It is readily available to be used by your software. Read [the libcurl.3 man page](https://curl.haxx.se/libcurl/c/libcurl.html) to learn how! -You find answers to the most frequent questions we get in [the FAQ +You can find answers to the most frequent questions we get in [the FAQ document](https://curl.haxx.se/docs/faq.html). Study [the COPYING file](https://curl.haxx.se/docs/copyright.html) for -distribution terms and similar. If you distribute curl binaries or other -binaries that involve libcurl, you might enjoy [the LICENSE-MIXING -document](https://curl.haxx.se/legal/licmix.html). +distribution terms. ## Contact @@ -39,7 +38,7 @@ document](https://curl.haxx.se/docs/thanks.html). ## Website -Visit the [curl web site](https://curl.haxx.se/) for the latest news and +Visit the [curl website](https://curl.haxx.se/) for the latest news and downloads. ## Git @@ -50,6 +49,11 @@ To download the very latest source from the Git server do this: (you'll get a directory named curl created, filled with the source code) +## Security problems + +Report suspected security problems via [our HackerOne +page](https://hackerone.com/curl) and not in public! + ## Notice Curl contains pieces of source code that is Copyright (c) 1998, 1999 Kungliga diff --git a/RELEASE-NOTES b/RELEASE-NOTES index 50e7f3e1e31152..080534e045db74 100644 --- a/RELEASE-NOTES +++ b/RELEASE-NOTES @@ -1,54 +1,19 @@ -curl and libcurl 7.65.0 +curl and libcurl 7.73.1 - Public curl releases: 181 - Command line options: 221 - curl_easy_setopt() options: 267 - Public functions in libcurl: 80 - Contributors: 1929 + Public curl releases: 196 + Command line options: 234 + curl_easy_setopt() options: 278 + Public functions in libcurl: 85 + Contributors: 2271 This release includes the following changes: - o pipelining: removed [10] - o CURLOPT_DNS_USE_GLOBAL_CACHE: removed [25] + o This release includes the following bugfixes: - o --config: clarify that initial : and = might need quoting [17] - o AppVeyor: enable testing for WinSSL build [23] - o README.md: fix no-consecutive-blank-lines Codacy warning [22] - o VC15 project: remove MinimalRebuild - o VS projects: use Unicode for VC10+ [16] - o build-openssl.bat: lots of improvements and polish - o cirrus: Customize the disabled tests per FreeBSD version - o cmake: avoid linking executable for some tests with cmake 3.6+ [18] - o cmake: clear CMAKE_REQUIRED_LIBRARIES after each use [19] - o cmake: set SSL_BACKENDS [12] - o configure: avoid unportable `==' test(1) operator [1] - o configure: fix default location for fish completions [13] - o curl_easy_getinfo.3: fix minor formatting mistake - o documentation: Fix several typos [7] - o ftplistparser: fix LGTM alert "Empty block without comment" [14] - o lib509: add missing include for strdup [22] - o lib557: initialize variables [22] - o multi: improved HTTP_1_1_REQUIRED handling [2] - o polarssl_threadlock: remove conditionally unused code [22] - o resolve: apply Happy Eyeballs philosophy to parallel c-ares queries [3] - o scripts: fix typos - o smtp: fix compiler warning [15] - o socks5: user name and passwords must be shorter than 256 [8] - o socks: fix error message - o tests/server/util: fix Windows Unicode build [21] - o tests: make Impacket (SMB server) Python 3 compatible [11] - o tool_cb_wrt: fix bad-function-cast warning [5] - o tool_help: include for strcasecmp [4] - o transfer: fix LGTM alert "Comparison is always true" [14] - o travis: allow builds on branches named "ci" - o travis: install dependencies only when needed [24] - o url: always clone the CUROPT_CURLU handle [26] - o urlapi: urlencode characters above 0x7f correctly [9] - o vauth/cleartext: update the PLAIN login to match RFC 4616 [27] - o vauth/oauth2: Fix OAUTHBEARER token generation [6] - o xattr: skip unittest on unsupported platforms [20] + o src/tool_filetime: disable -Wformat on mingw for this file [2] + o test122[12]: remove these two tests [1] This release includes the following known bugs: @@ -57,41 +22,12 @@ This release includes the following known bugs: This release would not have looked like this without help, code, reports and advice from friends like these: - Brad Spencer, cclauss on github, Dan Fandrich, Daniel Gustafsson, - Daniel Stenberg, Eli Schwartz, Even Rouault, Jakub Zakrzewski, - Leonardo Taccari, Marcel Raad, Mert Yazıcıoğlu, niner on github, - Paolo Mossino, Poul T Lomholt, Rikard Falkeborn, Simon Warta, Steve Holme, - Tim Rühsen, Wyatt O'Day, XmiliaH on github, - (20 contributors) + b9a1 on github, Daniel Stenberg, Marc Hörsken, + (3 contributors) Thanks! (and sorry if I forgot to mention someone) References to bug reports and discussions on issues: - [1] = https://curl.haxx.se/bug/?i=3709 - [2] = https://curl.haxx.se/bug/?i=3707 - [3] = https://curl.haxx.se/bug/?i=3699 - [4] = https://curl.haxx.se/bug/?i=3715 - [5] = https://curl.haxx.se/bug/?i=3718 - [6] = https://curl.haxx.se/bug/?i=2487 - [7] = https://curl.haxx.se/bug/?i=3724 - [8] = https://curl.haxx.se/bug/?i=3737 - [9] = https://curl.haxx.se/bug/?i=3741 - [10] = https://curl.haxx.se/bug/?i=3651 - [11] = https://curl.haxx.se/bug/?i=3731 - [12] = https://curl.haxx.se/bug/?i=3736 - [13] = https://curl.haxx.se/bug/?i=3723 - [14] = https://curl.haxx.se/bug/?i=3732 - [15] = https://curl.haxx.se/bug/?i=3729 - [16] = https://curl.haxx.se/bug/?i=3720 - [17] = https://curl.haxx.se/bug/?i=3738 - [18] = https://curl.haxx.se/bug/?i=3744 - [19] = https://curl.haxx.se/bug/?i=3743 - [20] = https://curl.haxx.se/bug/?i=3759 - [21] = https://curl.haxx.se/bug/?i=3758 - [22] = https://curl.haxx.se/bug/?i=3739 - [23] = https://curl.haxx.se/bug/?i=3725 - [24] = https://curl.haxx.se/bug/?i=3721 - [25] = https://curl.haxx.se/bug/?i=3654 - [26] = https://curl.haxx.se/bug/?i=3753 - [27] = https://curl.haxx.se/bug/?i=3757 + [1] = https://curl.haxx.se/bug/?i=6080 + [2] = https://curl.haxx.se/bug/?i=6079 diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 00000000000000..4e84fbefc25d60 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,10 @@ +# Security Policy + +See [docs/SECURITY-PROCESS.md](docs/SECURITY-PROCESS.md) for full details. + +## Reporting a Vulnerability + +If you have found or just suspect a security problem somewhere in curl or libcurl, +report it on [https://hackerone.com/curl](https://hackerone.com/curl). + +We treat security issues with confidentiality until controlled and disclosed responsibly. diff --git a/acinclude.m4 b/acinclude.m4 index 24dad39141ec0b..e7a36e4bd676af 100644 --- a/acinclude.m4 +++ b/acinclude.m4 @@ -5,7 +5,7 @@ # | (__| |_| | _ <| |___ # \___|\___/|_| \_\_____| # -# Copyright (C) 1998 - 2018, Daniel Stenberg, , et al. +# Copyright (C) 1998 - 2020, Daniel Stenberg, , et al. # # This software is licensed as described in the file COPYING, which # you should have received as part of this distribution. The terms @@ -661,7 +661,7 @@ dnl ------------------------------------------------- dnl Check for libraries needed for WINLDAP support, dnl and prepended to LIBS any needed libraries. dnl This macro can take an optional parameter with a -dnl white space separated list of libraries to check +dnl whitespace separated list of libraries to check dnl before the WINLDAP default ones. AC_DEFUN([CURL_CHECK_LIBS_WINLDAP], [ @@ -755,7 +755,7 @@ dnl ------------------------------------------------- dnl Check for libraries needed for LDAP support, dnl and prepended to LIBS any needed libraries. dnl This macro can take an optional parameter with a -dnl white space separated list of libraries to check +dnl whitespace separated list of libraries to check dnl before the default ones. AC_DEFUN([CURL_CHECK_LIBS_LDAP], [ @@ -1887,6 +1887,8 @@ struct Library *SocketBase = NULL; #endif #ifdef HAVE_SYS_SELECT_H #include +#elif defined(HAVE_UNISTD_H) +#include #endif #ifdef HAVE_SYS_SOCKET_H #include @@ -1950,6 +1952,8 @@ struct Library *SocketBase = NULL; #endif #ifdef HAVE_SYS_SELECT_H #include +#elif defined(HAVE_UNISTD_H) +#include #endif #ifdef HAVE_SYS_SOCKET_H #include @@ -2174,8 +2178,8 @@ AC_HELP_STRING([--without-ca-bundle], [Don't use a default CA bundle]), AC_ARG_WITH(ca-path, AC_HELP_STRING([--with-ca-path=DIRECTORY], [Path to a directory containing CA certificates stored individually, with \ -their filenames in a hash format. This option can be used with OpenSSL, \ -GnuTLS and PolarSSL backends. Refer to OpenSSL c_rehash for details. \ +their filenames in a hash format. This option can be used with the OpenSSL, \ +GnuTLS and mbedTLS backends. Refer to OpenSSL c_rehash for details. \ (example: /etc/certificates)]) AC_HELP_STRING([--without-ca-path], [Don't use a default CA path]), [ @@ -2201,8 +2205,8 @@ AC_HELP_STRING([--without-ca-path], [Don't use a default CA path]), capath="no" elif test "x$want_capath" != "xno" -a "x$want_capath" != "xunset"; then dnl --with-ca-path given - if test "x$OPENSSL_ENABLED" != "x1" -a "x$GNUTLS_ENABLED" != "x1" -a "x$POLARSSL_ENABLED" != "x1"; then - AC_MSG_ERROR([--with-ca-path only works with OpenSSL, GnuTLS or PolarSSL]) + if test "x$OPENSSL_ENABLED" != "x1" -a "x$GNUTLS_ENABLED" != "x1" -a "x$MBEDTLS_ENABLED" != "x1"; then + AC_MSG_ERROR([--with-ca-path only works with OpenSSL, GnuTLS or mbedTLS]) fi capath="$want_capath" ca="no" diff --git a/appveyor.yml b/appveyor.yml index d742493d242cd4..beaa56d7132d40 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -1,89 +1,248 @@ +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 1998 - 2020, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.haxx.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +########################################################################### + version: 7.50.0.{build} environment: matrix: + # generated CMake-based Visual Studio Release builds - APPVEYOR_BUILD_WORKER_IMAGE: "Visual Studio 2015" + BUILD_SYSTEM: CMake PRJ_GEN: "Visual Studio 9 2008" PRJ_CFG: Release OPENSSL: OFF - WINSSL: ON + SCHANNEL: ON + ENABLE_UNICODE: OFF HTTP_ONLY: OFF TESTING: OFF SHARED: ON DISABLED_TESTS: "" COMPILER_PATH: "" - - APPVEYOR_BUILD_WORKER_IMAGE: "Visual Studio 2017" - PRJ_GEN: "Visual Studio 15 2017 Win64" - PRJ_CFG: Debug - OPENSSL: OFF - WINSSL: ON + - APPVEYOR_BUILD_WORKER_IMAGE: "Visual Studio 2019" + BUILD_SYSTEM: CMake + PRJ_GEN: "Visual Studio 16 2019" + TARGET: "-A x64" + PRJ_CFG: Release + OPENSSL: ON + SCHANNEL: OFF + ENABLE_UNICODE: OFF HTTP_ONLY: OFF - TESTING: ON - SHARED: OFF + TESTING: OFF + SHARED: ON DISABLED_TESTS: "" COMPILER_PATH: "" - - APPVEYOR_BUILD_WORKER_IMAGE: "Visual Studio 2017" - PRJ_GEN: "Visual Studio 15 2017 Win64" + - APPVEYOR_BUILD_WORKER_IMAGE: "Visual Studio 2019" + BUILD_SYSTEM: CMake + PRJ_GEN: "Visual Studio 16 2019" + TARGET: "-A ARM64" PRJ_CFG: Release - OPENSSL: ON - WINSSL: OFF + OPENSSL: OFF + SCHANNEL: ON + ENABLE_UNICODE: OFF HTTP_ONLY: OFF TESTING: OFF - SHARED: ON + SHARED: OFF DISABLED_TESTS: "" COMPILER_PATH: "" + # generated CMake-based Visual Studio Debug builds - APPVEYOR_BUILD_WORKER_IMAGE: "Visual Studio 2015" + BUILD_SYSTEM: CMake PRJ_GEN: "Visual Studio 10 2010 Win64" PRJ_CFG: Debug OPENSSL: OFF - WINSSL: OFF + SCHANNEL: OFF + ENABLE_UNICODE: OFF HTTP_ONLY: OFF TESTING: ON SHARED: OFF - DISABLED_TESTS: "" + DISABLED_TESTS: "!1139 !1501" COMPILER_PATH: "" - - APPVEYOR_BUILD_WORKER_IMAGE: "Visual Studio 2017" - PRJ_GEN: "Visual Studio 15 2017 Win64" + - APPVEYOR_BUILD_WORKER_IMAGE: "Visual Studio 2019" + BUILD_SYSTEM: CMake + PRJ_GEN: "Visual Studio 16 2019" + TARGET: "-A x64" PRJ_CFG: Debug OPENSSL: OFF - WINSSL: OFF + SCHANNEL: ON + ENABLE_UNICODE: ON HTTP_ONLY: OFF TESTING: ON SHARED: OFF - DISABLED_TESTS: "" + DISABLED_TESTS: "!1139 !1501" COMPILER_PATH: "" - - APPVEYOR_BUILD_WORKER_IMAGE: "Visual Studio 2017" - PRJ_GEN: "Visual Studio 15 2017 Win64" + - APPVEYOR_BUILD_WORKER_IMAGE: "Visual Studio 2019" + BUILD_SYSTEM: CMake + PRJ_GEN: "Visual Studio 16 2019" + TARGET: "-A x64" PRJ_CFG: Debug OPENSSL: OFF - WINSSL: OFF + SCHANNEL: OFF + ENABLE_UNICODE: OFF + HTTP_ONLY: OFF + TESTING: ON + SHARED: OFF + DISABLED_TESTS: "!1139 !1501" + COMPILER_PATH: "" + - APPVEYOR_BUILD_WORKER_IMAGE: "Visual Studio 2019" + BUILD_SYSTEM: CMake + PRJ_GEN: "Visual Studio 16 2019" + TARGET: "-A x64" + PRJ_CFG: Debug + OPENSSL: OFF + SCHANNEL: OFF + ENABLE_UNICODE: OFF HTTP_ONLY: ON TESTING: ON SHARED: OFF - DISABLED_TESTS: "" + DISABLED_TESTS: "!1139 !1501" COMPILER_PATH: "" + # generated CMake-based MSYS Makefiles builds (mingw cross-compiling) - APPVEYOR_BUILD_WORKER_IMAGE: "Visual Studio 2015" + BUILD_SYSTEM: CMake PRJ_GEN: "MSYS Makefiles" PRJ_CFG: Debug OPENSSL: OFF - WINSSL: OFF + SCHANNEL: ON + ENABLE_UNICODE: ON HTTP_ONLY: OFF TESTING: ON SHARED: OFF - DISABLED_TESTS: "!198" + DISABLED_TESTS: "!1139 !1501" COMPILER_PATH: "C:\\mingw-w64\\x86_64-8.1.0-posix-seh-rt_v6-rev0\\mingw64\\bin" MSYS2_ARG_CONV_EXCL: "/*" + BUILD_OPT: -k + - APPVEYOR_BUILD_WORKER_IMAGE: "Visual Studio 2017" + BUILD_SYSTEM: CMake + PRJ_GEN: "MSYS Makefiles" + PRJ_CFG: Debug + OPENSSL: OFF + SCHANNEL: ON + ENABLE_UNICODE: ON + HTTP_ONLY: OFF + TESTING: ON + SHARED: OFF + DISABLED_TESTS: "!1139 !1501" + COMPILER_PATH: "C:\\mingw-w64\\x86_64-7.2.0-posix-seh-rt_v5-rev1\\mingw64\\bin" + MSYS2_ARG_CONV_EXCL: "/*" + BUILD_OPT: -k - APPVEYOR_BUILD_WORKER_IMAGE: "Visual Studio 2015" + BUILD_SYSTEM: CMake PRJ_GEN: "MSYS Makefiles" PRJ_CFG: Debug OPENSSL: OFF - WINSSL: OFF + SCHANNEL: ON + ENABLE_UNICODE: OFF HTTP_ONLY: OFF TESTING: ON SHARED: OFF - DISABLED_TESTS: "" + DISABLED_TESTS: "!1139 !1501" + COMPILER_PATH: "C:\\mingw-w64\\i686-6.3.0-posix-dwarf-rt_v5-rev1\\mingw32\\bin" + MSYS2_ARG_CONV_EXCL: "/*" + BUILD_OPT: -k + - APPVEYOR_BUILD_WORKER_IMAGE: "Visual Studio 2015" + BUILD_SYSTEM: CMake + PRJ_GEN: "MSYS Makefiles" + PRJ_CFG: Debug + OPENSSL: OFF + SCHANNEL: OFF + ENABLE_UNICODE: OFF + HTTP_ONLY: OFF + TESTING: ON + SHARED: OFF + DISABLED_TESTS: "!1139 !1501" COMPILER_PATH: "C:\\MinGW\\bin" MSYS2_ARG_CONV_EXCL: "/*" + BUILD_OPT: -k + # winbuild-based builds + - APPVEYOR_BUILD_WORKER_IMAGE: "Visual Studio 2015" + BUILD_SYSTEM: winbuild_vs2015 + DEBUG: yes + PATHPART: debug + TESTING: OFF + ENABLE_UNICODE: no + - APPVEYOR_BUILD_WORKER_IMAGE: "Visual Studio 2015" + BUILD_SYSTEM: winbuild_vs2015 + DEBUG: no + PATHPART: release + TESTING: OFF + ENABLE_UNICODE: no + - APPVEYOR_BUILD_WORKER_IMAGE: "Visual Studio 2017" + BUILD_SYSTEM: winbuild_vs2017 + DEBUG: yes + PATHPART: debug + TESTING: OFF + ENABLE_UNICODE: no + - APPVEYOR_BUILD_WORKER_IMAGE: "Visual Studio 2017" + BUILD_SYSTEM: winbuild_vs2017 + DEBUG: no + PATHPART: release + TESTING: OFF + ENABLE_UNICODE: no + - APPVEYOR_BUILD_WORKER_IMAGE: "Visual Studio 2015" + BUILD_SYSTEM: winbuild_vs2015 + DEBUG: yes + PATHPART: debug + TESTING: OFF + ENABLE_UNICODE: yes + - APPVEYOR_BUILD_WORKER_IMAGE: "Visual Studio 2015" + BUILD_SYSTEM: winbuild_vs2015 + DEBUG: no + PATHPART: release + TESTING: OFF + ENABLE_UNICODE: yes + - APPVEYOR_BUILD_WORKER_IMAGE: "Visual Studio 2017" + BUILD_SYSTEM: winbuild_vs2017 + DEBUG: yes + PATHPART: debug + TESTING: OFF + ENABLE_UNICODE: yes + - APPVEYOR_BUILD_WORKER_IMAGE: "Visual Studio 2017" + BUILD_SYSTEM: winbuild_vs2017 + DEBUG: no + PATHPART: release + TESTING: OFF + ENABLE_UNICODE: yes + # generated VisualStudioSolution-based builds + - APPVEYOR_BUILD_WORKER_IMAGE: "Visual Studio 2017" + BUILD_SYSTEM: VisualStudioSolution + PRJ_CFG: "DLL Debug - DLL Windows SSPI - DLL WinIDN" + TESTING: OFF + VC_VERSION: VC15 + # autotools-based builds (NOT mingw cross-compiling, but msys2 native) + - APPVEYOR_BUILD_WORKER_IMAGE: "Visual Studio 2015" + BUILD_SYSTEM: autotools + TESTING: ON + DISABLED_TESTS: "!19 ~1056 !1233" + CONFIG_ARGS: "--enable-debug --enable-werror --enable-alt-svc --disable-threaded-resolver --disable-proxy" + - APPVEYOR_BUILD_WORKER_IMAGE: "Visual Studio 2019" + BUILD_SYSTEM: autotools + TESTING: ON + DISABLED_TESTS: "!19 !504 !704 !705 ~1056 !1233" + CONFIG_ARGS: "--enable-debug --enable-werror --enable-alt-svc --disable-threaded-resolver" + - APPVEYOR_BUILD_WORKER_IMAGE: "Visual Studio 2019" + BUILD_SYSTEM: autotools + TESTING: ON + DISABLED_TESTS: "!19 !504 !704 !705 ~1056 !1233" + CONFIG_ARGS: "--enable-warnings --enable-werror" install: - set "PATH=C:\msys64\usr\bin;%PATH%" @@ -91,26 +250,69 @@ install: set "PATH=%COMPILER_PATH%;%PATH%" ) build_script: - - cmake . - -G"%PRJ_GEN%" - -DCMAKE_USE_OPENSSL=%OPENSSL% - -DCMAKE_USE_WINSSL=%WINSSL% - -DHTTP_ONLY=%HTTP_ONLY% - -DBUILD_SHARED_LIBS=%SHARED% - -DBUILD_TESTING=%TESTING% - -DCURL_WERROR=ON - -DENABLE_DEBUG=ON - -DCMAKE_RUNTIME_OUTPUT_DIRECTORY_RELEASE="" - -DCMAKE_RUNTIME_OUTPUT_DIRECTORY_DEBUG="" - -DCMAKE_INSTALL_PREFIX="C:/CURL" - -DCMAKE_BUILD_TYPE=%PRJ_CFG% - - cmake --build . --config %PRJ_CFG% --clean-first + - if %BUILD_SYSTEM%==CMake ( + cmake . + -G"%PRJ_GEN%" + %TARGET% + -DCMAKE_USE_OPENSSL=%OPENSSL% + -DCMAKE_USE_SCHANNEL=%SCHANNEL% + -DHTTP_ONLY=%HTTP_ONLY% + -DBUILD_SHARED_LIBS=%SHARED% + -DBUILD_TESTING=%TESTING% + -DCURL_WERROR=ON + -DENABLE_DEBUG=ON + -DENABLE_UNICODE=%ENABLE_UNICODE% + -DCMAKE_RUNTIME_OUTPUT_DIRECTORY_RELEASE="" + -DCMAKE_RUNTIME_OUTPUT_DIRECTORY_DEBUG="" + -DCMAKE_INSTALL_PREFIX="C:/CURL" + -DCMAKE_BUILD_TYPE=%PRJ_CFG% && + cmake --build . --config %PRJ_CFG% --parallel 2 --clean-first -- %BUILD_OPT% + ) else ( + if %BUILD_SYSTEM%==VisualStudioSolution ( + cd projects && + .\\generate.bat %VC_VERSION% && + msbuild.exe /p:Configuration="%PRJ_CFG%" "Windows\\%VC_VERSION%\\curl-all.sln" + ) else ( + if %BUILD_SYSTEM%==winbuild_vs2015 ( + call buildconf.bat && + cd winbuild && + call "C:\Program Files\Microsoft SDKs\Windows\v7.1\Bin\SetEnv.cmd" /x64 && + call "C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" x86_amd64 && + nmake /f Makefile.vc mode=dll VC=14 "SSL_PATH=C:\OpenSSL-v111-Win64" WITH_SSL=dll MACHINE=x64 DEBUG=%DEBUG% ENABLE_UNICODE=%ENABLE_UNICODE% && + ..\builds\libcurl-vc14-x64-%PATHPART%-dll-ssl-dll-ipv6-sspi\bin\curl.exe -V + ) else ( + if %BUILD_SYSTEM%==winbuild_vs2017 ( + call buildconf.bat && + cd winbuild && + call "C:\Program Files (x86)\Microsoft Visual Studio\2017\Community\VC\Auxiliary\Build\vcvars64.bat" && + nmake /f Makefile.vc mode=dll VC=15 "SSL_PATH=C:\OpenSSL-v111-Win64" WITH_SSL=dll MACHINE=x64 DEBUG=%DEBUG% ENABLE_UNICODE=%ENABLE_UNICODE% && + ..\builds\libcurl-vc15-x64-%PATHPART%-dll-ssl-dll-ipv6-sspi\bin\curl.exe -V + ) else ( + if %BUILD_SYSTEM%==autotools ( + bash.exe -e -l -c "cd /c/projects/curl && ./buildconf && ./configure %CONFIG_ARGS% && make && make examples && cd tests && make" + ))))) + - if %TESTING%==ON ( + if %BUILD_SYSTEM%==CMake ( + cmake --build . --config %PRJ_CFG% --parallel 2 --target testdeps + )) test_script: - if %TESTING%==ON ( - bash.exe -e -l -c "cd /c/projects/curl/tests && ./runtests.pl -a -p !flaky !1139 %DISABLED_TESTS%" ) + if %BUILD_SYSTEM%==CMake ( + set TFLAGS=%DISABLED_TESTS% && + cmake --build . --config %PRJ_CFG% --target test-nonflaky + ) else ( + echo APPVEYOR_API_URL=%APPVEYOR_API_URL% && + bash.exe -e -l -c "cd /c/projects/curl/tests && ./runtests.pl -a -b$(($(echo '%APPVEYOR_API_URL%' | cut -d'/' -f3 | cut -d':' -f2)+1)) -p !flaky %DISABLED_TESTS%" )) -# whitelist branches to avoid testing feature branches twice (as branch and as pull request) +# select branches to avoid testing feature branches twice (as branch and as pull request) branches: only: - master + - /\/ci$/ + +artifacts: + - path: '**/curl.exe' + name: curl + - path: '**/*curl*.dll' + name: libcurl diff --git a/buildconf b/buildconf index 509575312951eb..4e4c17e9991980 100755 --- a/buildconf +++ b/buildconf @@ -1,448 +1,4 @@ #!/bin/sh -#*************************************************************************** -# _ _ ____ _ -# Project ___| | | | _ \| | -# / __| | | | |_) | | -# | (__| |_| | _ <| |___ -# \___|\___/|_| \_\_____| -# -# Copyright (C) 1998 - 2017, Daniel Stenberg, , et al. -# -# This software is licensed as described in the file COPYING, which -# you should have received as part of this distribution. The terms -# are also available at https://curl.haxx.se/docs/copyright.html. -# -# You may opt to use, copy, modify, merge, publish, distribute and/or sell -# copies of the Software, and permit persons to whom the Software is -# furnished to do so, under the terms of the COPYING file. -# -# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY -# KIND, either express or implied. -# -########################################################################### -#-------------------------------------------------------------------------- -# die prints argument string to stdout and exits this shell script. -# -die(){ - echo "buildconf: $@" - exit 1 -} - -#-------------------------------------------------------------------------- -# findtool works as 'which' but we use a different name to make it more -# obvious we aren't using 'which'! ;-) -# Unlike 'which' does, the current directory is ignored. -# -findtool(){ - file="$1" - - if { echo "$file" | grep "/" >/dev/null 2>&1; } then - # when file is given with a path check it first - if test -f "$file"; then - echo "$file" - return - fi - fi - - old_IFS=$IFS; IFS=':' - for path in $PATH - do - IFS=$old_IFS - # echo "checks for $file in $path" >&2 - if test "$path" -a "$path" != '.' -a -f "$path/$file"; then - echo "$path/$file" - return - fi - done - IFS=$old_IFS -} - -#-------------------------------------------------------------------------- -# removethis() removes all files and subdirectories with the given name, -# inside and below the current subdirectory at invocation time. -# -removethis(){ - if test "$#" = "1"; then - find . -depth -name $1 -print > buildconf.tmp.$$ - while read fdname - do - if test -f "$fdname"; then - rm -f "$fdname" - elif test -d "$fdname"; then - rm -f -r "$fdname" - fi - done < buildconf.tmp.$$ - rm -f buildconf.tmp.$$ - fi -} - -#-------------------------------------------------------------------------- -# Ensure that buildconf runs from the subdirectory where configure.ac lives -# -if test ! -f configure.ac || - test ! -f src/tool_main.c || - test ! -f lib/urldata.h || - test ! -f include/curl/curl.h || - test ! -f m4/curl-functions.m4; then - echo "Can not run buildconf from outside of curl's source subdirectory!" - echo "Change to the subdirectory where buildconf is found, and try again." - exit 1 -fi - -#-------------------------------------------------------------------------- -# autoconf 2.57 or newer. Unpatched version 2.67 does not generate proper -# configure script. Unpatched version 2.68 is simply unusable, we should -# disallow 2.68 usage. -# -need_autoconf="2.57" -ac_version=`${AUTOCONF:-autoconf} --version 2>/dev/null|head -n 1| sed -e 's/^[^0-9]*//' -e 's/[a-z]* *$//'` -if test -z "$ac_version"; then - echo "buildconf: autoconf not found." - echo " You need autoconf version $need_autoconf or newer installed." - exit 1 -fi -old_IFS=$IFS; IFS='.'; set $ac_version; IFS=$old_IFS -if test "$1" = "2" -a "$2" -lt "57" || test "$1" -lt "2"; then - echo "buildconf: autoconf version $ac_version found." - echo " You need autoconf version $need_autoconf or newer installed." - echo " If you have a sufficient autoconf installed, but it" - echo " is not named 'autoconf', then try setting the" - echo " AUTOCONF environment variable." - exit 1 -fi - -if test "$1" = "2" -a "$2" -eq "67"; then - echo "buildconf: autoconf version $ac_version (BAD)" - echo " Unpatched version generates broken configure script." -elif test "$1" = "2" -a "$2" -eq "68"; then - echo "buildconf: autoconf version $ac_version (BAD)" - echo " Unpatched version generates unusable configure script." -else - echo "buildconf: autoconf version $ac_version (ok)" -fi - -am4te_version=`${AUTOM4TE:-autom4te} --version 2>/dev/null|head -n 1| sed -e 's/autom4te\(.*\)/\1/' -e 's/^[^0-9]*//' -e 's/[a-z]* *$//'` -if test -z "$am4te_version"; then - echo "buildconf: autom4te not found. Weird autoconf installation!" - exit 1 -fi -if test "$am4te_version" = "$ac_version"; then - echo "buildconf: autom4te version $am4te_version (ok)" -else - echo "buildconf: autom4te version $am4te_version (ERROR: does not match autoconf version)" - exit 1 -fi - -#-------------------------------------------------------------------------- -# autoheader 2.50 or newer -# -ah_version=`${AUTOHEADER:-autoheader} --version 2>/dev/null|head -n 1| sed -e 's/^[^0-9]*//' -e 's/[a-z]* *$//'` -if test -z "$ah_version"; then - echo "buildconf: autoheader not found." - echo " You need autoheader version 2.50 or newer installed." - exit 1 -fi -old_IFS=$IFS; IFS='.'; set $ah_version; IFS=$old_IFS -if test "$1" = "2" -a "$2" -lt "50" || test "$1" -lt "2"; then - echo "buildconf: autoheader version $ah_version found." - echo " You need autoheader version 2.50 or newer installed." - echo " If you have a sufficient autoheader installed, but it" - echo " is not named 'autoheader', then try setting the" - echo " AUTOHEADER environment variable." - exit 1 -fi - -echo "buildconf: autoheader version $ah_version (ok)" - -#-------------------------------------------------------------------------- -# automake 1.7 or newer -# -need_automake="1.7" -am_version=`${AUTOMAKE:-automake} --version 2>/dev/null|head -n 1| sed -e 's/^.* \([0-9]\)/\1/' -e 's/[a-z]* *$//' -e 's/\(.*\)\(-p.*\)/\1/'` -if test -z "$am_version"; then - echo "buildconf: automake not found." - echo " You need automake version $need_automake or newer installed." - exit 1 -fi -old_IFS=$IFS; IFS='.'; set $am_version; IFS=$old_IFS -if test "$1" = "1" -a "$2" -lt "7" || test "$1" -lt "1"; then - echo "buildconf: automake version $am_version found." - echo " You need automake version $need_automake or newer installed." - echo " If you have a sufficient automake installed, but it" - echo " is not named 'automake', then try setting the" - echo " AUTOMAKE environment variable." - exit 1 -fi - -echo "buildconf: automake version $am_version (ok)" - -acloc_version=`${ACLOCAL:-aclocal} --version 2>/dev/null|head -n 1| sed -e 's/^.* \([0-9]\)/\1/' -e 's/[a-z]* *$//' -e 's/\(.*\)\(-p.*\)/\1/'` -if test -z "$acloc_version"; then - echo "buildconf: aclocal not found. Weird automake installation!" - exit 1 -fi -if test "$acloc_version" = "$am_version"; then - echo "buildconf: aclocal version $acloc_version (ok)" -else - echo "buildconf: aclocal version $acloc_version (ERROR: does not match automake version)" - exit 1 -fi - -#-------------------------------------------------------------------------- -# GNU libtoolize preliminary check -# -want_lt_major=1 -want_lt_minor=4 -want_lt_patch=2 -want_lt_version=1.4.2 - -# This approach that tries 'glibtoolize' first is intended for systems that -# have GNU libtool named as 'glibtoolize' and libtoolize not being GNU's. - -libtoolize=`findtool glibtoolize 2>/dev/null` -if test ! -x "$libtoolize"; then - libtoolize=`findtool ${LIBTOOLIZE:-libtoolize}` -fi -if test -z "$libtoolize"; then - echo "buildconf: libtoolize not found." - echo " You need GNU libtoolize $want_lt_version or newer installed." - exit 1 -fi - -lt_pver=`$libtoolize --version 2>/dev/null|head -n 1` -lt_qver=`echo $lt_pver|sed -e "s/([^)]*)//g" -e "s/^[^0-9]*//g"` -lt_version=`echo $lt_qver|sed -e "s/[- ].*//" -e "s/\([a-z]*\)$//"` -if test -z "$lt_version"; then - echo "buildconf: libtoolize not found." - echo " You need GNU libtoolize $want_lt_version or newer installed." - exit 1 -fi -old_IFS=$IFS; IFS='.'; set $lt_version; IFS=$old_IFS -lt_major=$1 -lt_minor=$2 -lt_patch=$3 - -if test -z "$lt_major"; then - lt_status="bad" -elif test "$lt_major" -gt "$want_lt_major"; then - lt_status="good" -elif test "$lt_major" -lt "$want_lt_major"; then - lt_status="bad" -elif test -z "$lt_minor"; then - lt_status="bad" -elif test "$lt_minor" -gt "$want_lt_minor"; then - lt_status="good" -elif test "$lt_minor" -lt "$want_lt_minor"; then - lt_status="bad" -elif test -z "$lt_patch"; then - lt_status="bad" -elif test "$lt_patch" -gt "$want_lt_patch"; then - lt_status="good" -elif test "$lt_patch" -lt "$want_lt_patch"; then - lt_status="bad" -else - lt_status="good" -fi -if test "$lt_status" != "good"; then - echo "buildconf: libtoolize version $lt_version found." - echo " You need GNU libtoolize $want_lt_version or newer installed." - exit 1 -fi - -echo "buildconf: libtoolize version $lt_version (ok)" - -#-------------------------------------------------------------------------- -# m4 check -# -m4=`(${M4:-m4} --version 0<&- || ${M4:-gm4} --version) 2>/dev/null 0<&- | head -n 1`; -m4_version=`echo $m4 | sed -e 's/^.* \([0-9]\)/\1/' -e 's/[a-z]* *$//'` - -if { echo $m4 | grep "GNU" >/dev/null 2>&1; } then - echo "buildconf: GNU m4 version $m4_version (ok)" -else - if test -z "$m4"; then - echo "buildconf: m4 version not recognized. You need a GNU m4 installed!" - else - echo "buildconf: m4 version $m4 found. You need a GNU m4 installed!" - fi - exit 1 -fi - -#-------------------------------------------------------------------------- -# perl check -# -PERL=`findtool ${PERL:-perl}` -if test -z "$PERL"; then - echo "buildconf: perl not found" - exit 1 -fi - -#-------------------------------------------------------------------------- -# Remove files generated on previous buildconf/configure run. -# -for fname in .deps \ - .libs \ - *.la \ - *.lo \ - *.a \ - *.o \ - Makefile \ - Makefile.in \ - aclocal.m4 \ - aclocal.m4.bak \ - ares_build.h \ - ares_config.h \ - ares_config.h.in \ - autom4te.cache \ - compile \ - config.guess \ - curl_config.h \ - curl_config.h.in \ - config.log \ - config.lt \ - config.status \ - config.sub \ - configure \ - configurehelp.pm \ - curl-config \ - depcomp \ - libcares.pc \ - libcurl.pc \ - libtool \ - libtool.m4 \ - libtool.m4.tmp \ - ltmain.sh \ - ltoptions.m4 \ - ltsugar.m4 \ - ltversion.m4 \ - lt~obsolete.m4 \ - missing \ - install-sh \ - stamp-h1 \ - stamp-h2 \ - stamp-h3 ; do - removethis "$fname" -done - -#-------------------------------------------------------------------------- -# run the correct scripts now -# - -echo "buildconf: running libtoolize" -${libtoolize} --copy --force || die "libtoolize command failed" - -# When using libtool 1.5.X (X < 26) we copy libtool.m4 to our local m4 -# subdirectory and this local copy is patched to fix some warnings that -# are triggered when running aclocal and using autoconf 2.62 or later. - -if test "$lt_major" = "1" && test "$lt_minor" = "5"; then - if test -z "$lt_patch" || test "$lt_patch" -lt "26"; then - echo "buildconf: copying libtool.m4 to local m4 subdir" - ac_dir=`${ACLOCAL:-aclocal} --print-ac-dir` - if test -f $ac_dir/libtool.m4; then - cp -f $ac_dir/libtool.m4 m4/libtool.m4 - else - echo "buildconf: $ac_dir/libtool.m4 not found" - fi - if test -f m4/libtool.m4; then - echo "buildconf: renaming some variables in local m4/libtool.m4" - $PERL -i.tmp -pe \ - 's/lt_prog_compiler_pic_works/lt_cv_prog_compiler_pic_works/g; \ - s/lt_prog_compiler_static_works/lt_cv_prog_compiler_static_works/g;' \ - m4/libtool.m4 - rm -f m4/libtool.m4.tmp - fi - fi -fi - -if test -f m4/libtool.m4; then - echo "buildconf: converting all mv to mv -f in local m4/libtool.m4" - $PERL -i.tmp -pe 's/\bmv +([^-\s])/mv -f $1/g' m4/libtool.m4 - rm -f m4/libtool.m4.tmp -fi - -echo "buildconf: running aclocal" -${ACLOCAL:-aclocal} -I m4 $ACLOCAL_FLAGS || die "aclocal command failed" - -echo "buildconf: converting all mv to mv -f in local aclocal.m4" -$PERL -i.bak -pe 's/\bmv +([^-\s])/mv -f $1/g' aclocal.m4 - -echo "buildconf: running autoheader" -${AUTOHEADER:-autoheader} || die "autoheader command failed" - -echo "buildconf: running autoconf" -${AUTOCONF:-autoconf} || die "autoconf command failed" - -if test -d ares; then - cd ares - echo "buildconf: running in ares" - ./buildconf - cd .. -fi - -echo "buildconf: running automake" -${AUTOMAKE:-automake} --add-missing --copy || die "automake command failed" - -#-------------------------------------------------------------------------- -# GNU libtool complementary check -# -# Depending on the libtool and automake versions being used, config.guess -# might not be installed in the subdirectory until automake has finished. -# So we can not attempt to use it until this very last buildconf stage. -# -if test ! -f ./config.guess; then - echo "buildconf: config.guess not found" -else - buildhost=`./config.guess 2>/dev/null|head -n 1` - case $buildhost in - *-*-darwin*) - need_lt_major=1 - need_lt_minor=5 - need_lt_patch=26 - need_lt_check="yes" - ;; - *-*-hpux*) - need_lt_major=1 - need_lt_minor=5 - need_lt_patch=24 - need_lt_check="yes" - ;; - esac - if test ! -z "$need_lt_check"; then - if test -z "$lt_major"; then - lt_status="bad" - elif test "$lt_major" -gt "$need_lt_major"; then - lt_status="good" - elif test "$lt_major" -lt "$need_lt_major"; then - lt_status="bad" - elif test -z "$lt_minor"; then - lt_status="bad" - elif test "$lt_minor" -gt "$need_lt_minor"; then - lt_status="good" - elif test "$lt_minor" -lt "$need_lt_minor"; then - lt_status="bad" - elif test -z "$lt_patch"; then - lt_status="bad" - elif test "$lt_patch" -gt "$need_lt_patch"; then - lt_status="good" - elif test "$lt_patch" -lt "$need_lt_patch"; then - lt_status="bad" - else - lt_status="good" - fi - if test "$lt_status" != "good"; then - need_lt_version="$need_lt_major.$need_lt_minor.$need_lt_patch" - echo "buildconf: libtool version $lt_version found." - echo " $buildhost requires GNU libtool $need_lt_version or newer installed." - rm -f configure - exit 1 - fi - fi -fi - -#-------------------------------------------------------------------------- -# Finished successfully. -# -echo "buildconf: OK" -exit 0 +echo "*** Do not use buildconf. Instead, just use: autoreconf -fi" >&2 +exec ${AUTORECONF:-autoreconf} -fi "${@}" diff --git a/buildconf.bat b/buildconf.bat index a6bded34b2467a..0435233151c8bc 100644 --- a/buildconf.bat +++ b/buildconf.bat @@ -6,7 +6,7 @@ rem * / __| | | | |_) | | rem * | (__| |_| | _ <| |___ rem * \___|\___/|_| \_\_____| rem * -rem * Copyright (C) 1998 - 2017, Daniel Stenberg, , et al. +rem * Copyright (C) 1998 - 2019, Daniel Stenberg, , et al. rem * rem * This software is licensed as described in the file COPYING, which rem * you should have received as part of this distribution. The terms @@ -212,7 +212,7 @@ rem copy /Y src\tool_hugehelp.c.cvs src\tool_hugehelp.c 1>NUL 2>&1 ) else ( echo #include "tool_setup.h"> src\tool_hugehelp.c - echo #include "tool_hugehelp.hd">> src\tool_hugehelp.c + echo #include "tool_hugehelp.h">> src\tool_hugehelp.c echo.>> src\tool_hugehelp.c echo void hugehelp(void^)>> src\tool_hugehelp.c echo {>> src\tool_hugehelp.c diff --git a/configure.ac b/configure.ac index 00fef948918cb9..2304775b12cd05 100755 --- a/configure.ac +++ b/configure.ac @@ -5,7 +5,7 @@ # | (__| |_| | _ <| |___ # \___|\___/|_| \_\_____| # -# Copyright (C) 1998 - 2019, Daniel Stenberg, , et al. +# Copyright (C) 1998 - 2020, Daniel Stenberg, , et al. # # This software is licensed as described in the file COPYING, which # you should have received as part of this distribution. The terms @@ -31,7 +31,7 @@ XC_OVR_ZZ60 CURL_OVERRIDE_AUTOCONF dnl configure script copyright -AC_COPYRIGHT([Copyright (c) 1998 - 2019 Daniel Stenberg, +AC_COPYRIGHT([Copyright (c) 1998 - 2020 Daniel Stenberg, This configure script may be copied, distributed and modified under the terms of the curl license; see COPYING for more details]) @@ -49,6 +49,7 @@ CURL_CHECK_OPTION_CURLDEBUG CURL_CHECK_OPTION_SYMBOL_HIDING CURL_CHECK_OPTION_ARES CURL_CHECK_OPTION_RT +CURL_CHECK_OPTION_ECH XC_CHECK_PATH_SEPARATOR @@ -58,19 +59,6 @@ XC_CHECK_PATH_SEPARATOR CONFIGURE_OPTIONS="\"$ac_configure_args\"" AC_SUBST(CONFIGURE_OPTIONS) -CURL_CFLAG_EXTRAS="" -if test X"$want_werror" = Xyes; then - CURL_CFLAG_EXTRAS="-Werror" - if test "$compiler_id" = "GNU_C"; then - dnl enable -pedantic-errors for GCC 5 and later, - dnl as before that it was the same as -Werror=pedantic - if test "$compiler_num" -ge "500"; then - CURL_CFLAG_EXTRAS="$CURL_CFLAG_EXTRAS -pedantic-errors" - fi - fi -fi -AC_SUBST(CURL_CFLAG_EXTRAS) - dnl SED is mandatory for configure process and libtool. dnl Set it now, allowing it to be changed later. if test -z "$SED"; then @@ -155,10 +143,11 @@ AC_SUBST(PKGADD_VENDOR) dnl dnl initialize all the info variables - curl_ssl_msg="no (--with-{ssl,gnutls,nss,polarssl,mbedtls,cyassl,schannel,secure-transport,mesalink,amissl} )" - curl_ssh_msg="no (--with-libssh2)" + curl_ssl_msg="no (--with-{ssl,gnutls,nss,mbedtls,wolfssl,schannel,secure-transport,mesalink,amissl,bearssl} )" + curl_ssh_msg="no (--with-{libssh,libssh2})" curl_zlib_msg="no (--with-zlib)" curl_brotli_msg="no (--with-brotli)" + curl_zstd_msg="no (--with-zstd)" curl_gss_msg="no (--with-gssapi)" curl_tls_srp_msg="no (--enable-tls-srp)" curl_res_msg="default (--enable-ares / --enable-threaded-resolver)" @@ -285,6 +274,19 @@ if test "$compiler_id" = "INTEL_UNIX_C"; then # fi +CURL_CFLAG_EXTRAS="" +if test X"$want_werror" = Xyes; then + CURL_CFLAG_EXTRAS="-Werror" + if test "$compiler_id" = "GNU_C"; then + dnl enable -pedantic-errors for GCC 5 and later, + dnl as before that it was the same as -Werror=pedantic + if test "$compiler_num" -ge "500"; then + CURL_CFLAG_EXTRAS="$CURL_CFLAG_EXTRAS -pedantic-errors" + fi + fi +fi +AC_SUBST(CURL_CFLAG_EXTRAS) + CURL_CHECK_COMPILER_HALT_ON_ERROR CURL_CHECK_COMPILER_ARRAY_SIZE_NEGATIVE CURL_CHECK_COMPILER_PROTOTYPE_MISMATCH @@ -501,6 +503,7 @@ AC_HELP_STRING([--disable-proxy],[Disable proxy support]), AC_MSG_RESULT(no) AC_DEFINE(CURL_DISABLE_PROXY, 1, [to disable proxies]) AC_SUBST(CURL_DISABLE_PROXY, [1]) + https_proxy="no" ;; *) AC_MSG_RESULT(yes) ;; @@ -636,6 +639,21 @@ AC_HELP_STRING([--disable-gopher],[Disable Gopher support]), AC_MSG_RESULT(yes) ) +AC_MSG_CHECKING([whether to support mqtt]) +AC_ARG_ENABLE(mqtt, +AC_HELP_STRING([--enable-mqtt],[Enable MQTT support]) +AC_HELP_STRING([--disable-mqtt],[Disable MQTT support]), +[ case "$enableval" in + no) + AC_MSG_RESULT(no) + AC_DEFINE(CURL_DISABLE_MQTT, 1, [to disable MQTT]) + AC_SUBST(CURL_DISABLE_MQTT, [1]) + ;; + *) AC_MSG_RESULT(yes) + ;; + esac ], + AC_MSG_RESULT(no) +) dnl ********************************************************************** dnl Check for built-in manual @@ -1039,7 +1057,7 @@ if test X"$OPT_BROTLI" != Xno; then LD_BROTLI=`$PKGCONFIG --libs-only-L libbrotlidec` CPP_BROTLI=`$PKGCONFIG --cflags-only-I libbrotlidec` version=`$PKGCONFIG --modversion libbrotlidec` - DIR_BROTLI=`echo $LD_BROTLI | $SED -e 's/-L//'` + DIR_BROTLI=`echo $LD_BROTLI | $SED -e 's/^-L//'` fi ;; @@ -1098,6 +1116,93 @@ if test X"$OPT_BROTLI" != Xno; then fi fi +dnl ********************************************************************** +dnl Check for libzstd +dnl ********************************************************************** + +dnl Default to compiler & linker defaults for libzstd +OPT_ZSTD=off +AC_ARG_WITH(zstd,dnl +AC_HELP_STRING([--with-zstd=PATH],[Where to look for libzstd, PATH points to the libzstd installation; when possible, set the PKG_CONFIG_PATH environment variable instead of using this option]) +AC_HELP_STRING([--without-zstd], [disable libzstd]), + OPT_ZSTD=$withval) + +if test X"$OPT_ZSTD" != Xno; then + dnl backup the pre-zstd variables + CLEANLDFLAGS="$LDFLAGS" + CLEANCPPFLAGS="$CPPFLAGS" + CLEANLIBS="$LIBS" + + case "$OPT_ZSTD" in + yes) + dnl --with-zstd (without path) used + CURL_CHECK_PKGCONFIG(libzstd) + + if test "$PKGCONFIG" != "no" ; then + LIB_ZSTD=`$PKGCONFIG --libs-only-l libzstd` + LD_ZSTD=`$PKGCONFIG --libs-only-L libzstd` + CPP_ZSTD=`$PKGCONFIG --cflags-only-I libzstd` + version=`$PKGCONFIG --modversion libzstd` + DIR_ZSTD=`echo $LD_ZSTD | $SED -e 's/-L//'` + fi + + ;; + off) + dnl no --with-zstd option given, just check default places + ;; + *) + dnl use the given --with-zstd spot + PREFIX_ZSTD=$OPT_ZSTD + ;; + esac + + dnl if given with a prefix, we set -L and -I based on that + if test -n "$PREFIX_ZSTD"; then + LIB_ZSTD="-lzstd" + LD_ZSTD=-L${PREFIX_ZSTD}/lib$libsuff + CPP_ZSTD=-I${PREFIX_ZSTD}/include + DIR_ZSTD=${PREFIX_ZSTD}/lib$libsuff + fi + + LDFLAGS="$LDFLAGS $LD_ZSTD" + CPPFLAGS="$CPPFLAGS $CPP_ZSTD" + LIBS="$LIB_ZSTD $LIBS" + + AC_CHECK_LIB(zstd, ZSTD_createDStream) + + AC_CHECK_HEADERS(zstd.h, + curl_zstd_msg="enabled (libzstd)" + HAVE_ZSTD=1 + AC_DEFINE(HAVE_ZSTD, 1, [if libzstd is in use]) + AC_SUBST(HAVE_ZSTD, [1]) + ) + + if test X"$OPT_ZSTD" != Xoff && + test "$HAVE_ZSTD" != "1"; then + AC_MSG_ERROR([libzstd was not found where specified!]) + fi + + if test "$HAVE_ZSTD" = "1"; then + if test -n "$DIR_ZSTD"; then + dnl when the zstd shared lib were found in a path that the run-time + dnl linker doesn't search through, we need to add it to + dnl CURL_LIBRARY_PATH to prevent further configure tests to fail due to + dnl this + + if test "x$cross_compiling" != "xyes"; then + CURL_LIBRARY_PATH="$CURL_LIBRARY_PATH:$DIR_ZSTD" + export CURL_LIBRARY_PATH + AC_MSG_NOTICE([Added $DIR_ZSTD to CURL_LIBRARY_PATH]) + fi + fi + else + dnl no zstd, revert back to clean variables + LDFLAGS=$CLEANLDFLAGS + CPPFLAGS=$CLEANCPPFLAGS + LIBS=$CLEANLIBS + fi +fi + dnl ********************************************************************** dnl Check for LDAP dnl ********************************************************************** @@ -1202,16 +1307,23 @@ AC_HELP_STRING([--disable-ipv6],[Disable IPv6 support]), ;; esac ], - AC_TRY_RUN([ /* is AF_INET6 available? */ + AC_TRY_RUN([ /* are AF_INET6 and sockaddr_in6 available? */ #include #ifdef HAVE_WINSOCK2_H #include +#include #else #include +#include +#if defined (__TANDEM) +# include +#endif #endif #include /* for exit() */ main() { + struct sockaddr_in6 s; + (void)s; if (socket(AF_INET6, SOCK_STREAM, 0) < 0) exit(1); else @@ -1226,12 +1338,12 @@ main() ipv6=yes )) -if test "$ipv6" = "yes"; then +if test "$ipv6" = yes; then curl_ipv6_msg="enabled" -fi + AC_DEFINE(ENABLE_IPV6, 1, [Define if you want to enable IPv6 support]) + IPV6_ENABLED=1 + AC_SUBST(IPV6_ENABLED) -# Check if struct sockaddr_in6 have sin6_scope_id member -if test "$ipv6" = yes; then AC_MSG_CHECKING([if struct sockaddr_in6 has sin6_scope_id member]) AC_TRY_COMPILE([ #include @@ -1258,12 +1370,14 @@ dnl Check if the operating system allows programs to write to their own argv[] dnl ********************************************************************** AC_MSG_CHECKING([if argv can be written to]) -CURL_RUN_IFELSE([ -int main(int argc, char ** argv) { - argv[0][0] = ' '; - return (argv[0][0] == ' ')?0:1; +CURL_RUN_IFELSE([[ +int main(int argc, char **argv) +{ + (void)argc; + argv[0][0] = ' '; + return (argv[0][0] == ' ')?0:1; } -],[ +]],[ curl_cv_writable_argv=yes ],[ curl_cv_writable_argv=no @@ -1497,29 +1611,29 @@ dnl ------------------------------------------------- dnl check winssl option before other SSL libraries dnl ------------------------------------------------- -OPT_WINSSL=no +OPT_SCHANNEL=no AC_ARG_WITH(winssl,dnl AC_HELP_STRING([--with-winssl],[enable Windows native SSL/TLS]) AC_HELP_STRING([--without-winssl], [disable Windows native SSL/TLS]), - OPT_WINSSL=$withval) + OPT_SCHANNEL=$withval) AC_ARG_WITH(schannel,dnl AC_HELP_STRING([--with-schannel],[enable Windows native SSL/TLS]) AC_HELP_STRING([--without-schannel], [disable Windows native SSL/TLS]), - OPT_WINSSL=$withval) + OPT_SCHANNEL=$withval) AC_MSG_CHECKING([whether to enable Windows native SSL/TLS (Windows native builds only)]) -if test -z "$ssl_backends" -o "x$OPT_WINSSL" != xno; then +if test -z "$ssl_backends" -o "x$OPT_SCHANNEL" != xno; then ssl_msg= - if test "x$OPT_WINSSL" != "xno" && + if test "x$OPT_SCHANNEL" != "xno" && test "x$curl_cv_native_windows" = "xyes"; then AC_MSG_RESULT(yes) AC_DEFINE(USE_SCHANNEL, 1, [to enable Windows native SSL/TLS support]) AC_SUBST(USE_SCHANNEL, [1]) ssl_msg="Windows-native" test schannel != "$DEFAULT_SSL_BACKEND" || VALID_DEFAULT_SSL_BACKEND=yes - WINSSL_ENABLED=1 - # --with-winssl implies --enable-sspi + SCHANNEL_ENABLED=1 + # --with-schannel implies --enable-sspi AC_DEFINE(USE_WINDOWS_SSPI, 1, [to enable SSPI support]) AC_SUBST(USE_WINDOWS_SSPI, [1]) curl_sspi_msg="enabled" @@ -1546,7 +1660,7 @@ AC_HELP_STRING([--without-secure-transport], [disable Apple OS native SSL/TLS]), AC_MSG_CHECKING([whether to enable Secure Transport]) if test -z "$ssl_backends" -o "x$OPT_SECURETRANSPORT" != xno; then if test "x$OPT_SECURETRANSPORT" != "xno" && - test -d "/System/Library/Frameworks/Security.framework"; then + (test "x$cross_compiling" != "xno" || test -d "/System/Library/Frameworks/Security.framework"); then AC_MSG_RESULT(yes) AC_DEFINE(USE_SECTRANSP, 1, [enable Secure Transport]) AC_SUBST(USE_SECTRANSP, [1]) @@ -1695,7 +1809,7 @@ if test -z "$ssl_backends" -o "x$OPT_SSL" != xno && AC_MSG_NOTICE([pkg-config: SSL_LDFLAGS: "$SSL_LDFLAGS"]) AC_MSG_NOTICE([pkg-config: SSL_CPPFLAGS: "$SSL_CPPFLAGS"]) - LIB_OPENSSL=`echo $SSL_LDFLAGS | sed -e 's/-L//g'` + LIB_OPENSSL=`echo $SSL_LDFLAGS | sed -e 's/^-L//'` dnl use the values pkg-config reported. This is here dnl instead of below with CPPFLAGS and LDFLAGS because we only @@ -1720,6 +1834,8 @@ if test -z "$ssl_backends" -o "x$OPT_SSL" != xno && # only set this if pkg-config wasn't used CPPFLAGS="$CLEANCPPFLAGS -I$PREFIX_OPENSSL/include/openssl -I$PREFIX_OPENSSL/include" fi + # Linking previously failed, try extra paths from --with-ssl or pkg-config. + # Use a different function name to avoid reusing the earlier cached result. AC_CHECK_LIB(crypto, HMAC_Init_ex,[ HAVECRYPTO="yes" LIBS="-lcrypto $LIBS"], [ @@ -1741,6 +1857,7 @@ if test -z "$ssl_backends" -o "x$OPT_SSL" != xno && [ AC_MSG_RESULT(no) dnl ok, so what about both -ldl and -lpthread? + dnl This may be necessary for static libraries. AC_MSG_CHECKING([OpenSSL linking with -ldl and -lpthread]) LIBS="$CLEANLIBS -lcrypto -ldl -lpthread" @@ -1826,13 +1943,9 @@ if test -z "$ssl_backends" -o "x$OPT_SSL" != xno && if test X"$OPENSSL_ENABLED" = X"1"; then dnl These can only exist if OpenSSL exists - dnl Older versions of Cyassl (some time before 2.9.4) don't have - dnl SSL_get_shutdown (but this check won't actually detect it there - dnl as it's a macro that needs the header files be included) dnl OpenSSL_version is introduced in 3.0.0 AC_CHECK_FUNCS( RAND_egd \ - SSL_get_shutdown \ SSLv2_client_method \ OpenSSL_version ) @@ -1869,6 +1982,31 @@ if test -z "$ssl_backends" -o "x$OPT_SSL" != xno && ],[ AC_MSG_RESULT([no]) ]) + + AC_MSG_CHECKING([for OpenSSL >= v3]) + AC_COMPILE_IFELSE([ + AC_LANG_PROGRAM([[ +#include + ]],[[ + #if defined(OPENSSL_VERSION_MAJOR) && (OPENSSL_VERSION_MAJOR >= 3) + return 0; + #else + #error older than 3 + #endif + ]]) + ],[ + AC_MSG_RESULT([yes]) + AC_DEFINE_UNQUOTED(HAVE_OPENSSL3, 1, + [Define to 1 if using OpenSSL 3 or later.]) + dnl OpenSSLv3 marks the DES functions deprecated but we have no + dnl replacements (yet) so tell the compiler to not warn for them + dnl + dnl Ask OpenSSL to suppress the warnings. + CPPFLAGS="$CPPFLAGS -DOPENSSL_SUPPRESS_DEPRECATED" + ssl_msg="OpenSSL v3+" + ],[ + AC_MSG_RESULT([no]) + ]) fi if test "$OPENSSL_ENABLED" = "1"; then @@ -1889,6 +2027,14 @@ if test -z "$ssl_backends" -o "x$OPT_SSL" != xno && test -z "$ssl_msg" || ssl_backends="${ssl_backends:+$ssl_backends, }$ssl_msg" fi +if test X"$OPT_SSL" != Xoff && + test X"$OPT_SSL" != Xno && + test "$OPENSSL_ENABLED" != "1"; then + AC_MSG_NOTICE([OPT_SSL: $OPT_SSL]) + AC_MSG_NOTICE([OPENSSL_ENABLED: $OPENSSL_ENABLED]) + AC_MSG_ERROR([--with-ssl was given but OpenSSL could not be detected]) +fi + dnl ********************************************************************** dnl Check for the random seed preferences dnl ********************************************************************** @@ -1982,7 +2128,7 @@ if test -z "$ssl_backends" -o "x$OPT_GNUTLS" != xno; then addld=`$PKGCONFIG --libs-only-L gnutls` addcflags=`$PKGCONFIG --cflags-only-I gnutls` version=`$PKGCONFIG --modversion gnutls` - gtlslib=`echo $addld | $SED -e 's/-L//'` + gtlslib=`echo $addld | $SED -e 's/^-L//'` else dnl without pkg-config, we try libgnutls-config as that was how it dnl used to be done @@ -2031,7 +2177,8 @@ if test -z "$ssl_backends" -o "x$OPT_GNUTLS" != xno; then CPPFLAGS="$CPPFLAGS $addcflags" fi - AC_CHECK_LIB(gnutls, gnutls_check_version, + dnl this function is selected since it was introduced in 3.1.10 + AC_CHECK_LIB(gnutls, gnutls_x509_crt_get_dn2, [ AC_DEFINE(USE_GNUTLS, 1, [if GnuTLS is enabled]) AC_SUBST(USE_GNUTLS, [1]) @@ -2059,7 +2206,6 @@ if test -z "$ssl_backends" -o "x$OPT_GNUTLS" != xno; then AC_MSG_NOTICE([Added $gtlslib to CURL_LIBRARY_PATH]) fi fi - AC_CHECK_FUNCS([gnutls_certificate_set_x509_key_file2 gnutls_alpn_set_protocols gnutls_ocsp_req_init]) fi fi @@ -2110,98 +2256,6 @@ if test "$GNUTLS_ENABLED" = "1"; then ]) fi -dnl ---------------------------------------------------- -dnl check for PolarSSL -dnl ---------------------------------------------------- - -dnl Default to compiler & linker defaults for PolarSSL files & libraries. -OPT_POLARSSL=no - -_cppflags=$CPPFLAGS -_ldflags=$LDFLAGS -AC_ARG_WITH(polarssl,dnl -AC_HELP_STRING([--with-polarssl=PATH],[where to look for PolarSSL, PATH points to the installation root]) -AC_HELP_STRING([--without-polarssl], [disable PolarSSL detection]), - OPT_POLARSSL=$withval) - -if test -z "$ssl_backends" -o "x$OPT_POLARSSL" != xno; then - ssl_msg= - - if test X"$OPT_POLARSSL" != Xno; then - - if test "$OPT_POLARSSL" = "yes"; then - OPT_POLARSSL="" - fi - - if test -z "$OPT_POLARSSL" ; then - dnl check for lib first without setting any new path - - AC_CHECK_LIB(polarssl, havege_init, - dnl libpolarssl found, set the variable - [ - AC_DEFINE(USE_POLARSSL, 1, [if PolarSSL is enabled]) - AC_SUBST(USE_POLARSSL, [1]) - POLARSSL_ENABLED=1 - USE_POLARSSL="yes" - ssl_msg="PolarSSL" - test polarssl != "$DEFAULT_SSL_BACKEND" || VALID_DEFAULT_SSL_BACKEND=yes - ]) - fi - - addld="" - addlib="" - addcflags="" - polarssllib="" - - if test "x$USE_POLARSSL" != "xyes"; then - dnl add the path and test again - addld=-L$OPT_POLARSSL/lib$libsuff - addcflags=-I$OPT_POLARSSL/include - polarssllib=$OPT_POLARSSL/lib$libsuff - - LDFLAGS="$LDFLAGS $addld" - if test "$addcflags" != "-I/usr/include"; then - CPPFLAGS="$CPPFLAGS $addcflags" - fi - - AC_CHECK_LIB(polarssl, ssl_init, - [ - AC_DEFINE(USE_POLARSSL, 1, [if PolarSSL is enabled]) - AC_SUBST(USE_POLARSSL, [1]) - POLARSSL_ENABLED=1 - USE_POLARSSL="yes" - ssl_msg="PolarSSL" - test polarssl != "$DEFAULT_SSL_BACKEND" || VALID_DEFAULT_SSL_BACKEND=yes - ], - [ - CPPFLAGS=$_cppflags - LDFLAGS=$_ldflags - ]) - fi - - if test "x$USE_POLARSSL" = "xyes"; then - AC_MSG_NOTICE([detected PolarSSL]) - check_for_ca_bundle=1 - LIBS="-lpolarssl $LIBS" - - if test -n "$polarssllib"; then - dnl when shared libs were found in a path that the run-time - dnl linker doesn't search through, we need to add it to - dnl CURL_LIBRARY_PATH to prevent further configure tests to fail - dnl due to this - if test "x$cross_compiling" != "xyes"; then - CURL_LIBRARY_PATH="$CURL_LIBRARY_PATH:$polarssllib" - export CURL_LIBRARY_PATH - AC_MSG_NOTICE([Added $polarssllib to CURL_LIBRARY_PATH]) - fi - fi - fi - - fi dnl PolarSSL not disabled - - test -z "$ssl_msg" || ssl_backends="${ssl_backends:+$ssl_backends, }$ssl_msg" -fi - dnl ---------------------------------------------------- dnl check for mbedTLS dnl ---------------------------------------------------- @@ -2295,175 +2349,162 @@ if test -z "$ssl_backends" -o "x$OPT_MBEDTLS" != xno; then fi dnl ---------------------------------------------------- -dnl check for CyaSSL +dnl check for wolfSSL dnl ---------------------------------------------------- -dnl Default to compiler & linker defaults for CyaSSL files & libraries. -OPT_CYASSL=no +dnl Default to compiler & linker defaults for wolfSSL files & libraries. +OPT_WOLFSSL=no _cppflags=$CPPFLAGS _ldflags=$LDFLAGS -AC_ARG_WITH(cyassl,dnl -AC_HELP_STRING([--with-cyassl=PATH],[where to look for CyaSSL, PATH points to the installation root (default: system lib default)]) -AC_HELP_STRING([--without-cyassl], [disable CyaSSL detection]), - OPT_CYASSL=$withval) -dnl provide --with-wolfssl as an alias for --with-cyassl AC_ARG_WITH(wolfssl,dnl AC_HELP_STRING([--with-wolfssl=PATH],[where to look for WolfSSL, PATH points to the installation root (default: system lib default)]) AC_HELP_STRING([--without-wolfssl], [disable WolfSSL detection]), - OPT_CYASSL=$withval) + OPT_WOLFSSL=$withval) -if test -z "$ssl_backends" -o "x$OPT_CYASSL" != xno; then +case "$OPT_WOLFSSL" in + yes|no) + wolfpkg="" + ;; + *) + wolfpkg="$withval/lib/pkgconfig" + ;; +esac + +if test -z "$ssl_backends" -o "x$OPT_WOLFSSL" != xno; then ssl_msg= - if test X"$OPT_CYASSL" != Xno; then + if test X"$OPT_WOLFSSL" != Xno; then - if test "$OPT_CYASSL" = "yes"; then - OPT_CYASSL="" + if test "$OPT_WOLFSSL" = "yes"; then + OPT_WOLFSSL="" fi - dnl This should be reworked to use pkg-config instead - - cyassllibname=cyassl - - if test -z "$OPT_CYASSL" ; then - dnl check for lib in system default first - - AC_CHECK_LIB(cyassl, CyaSSL_Init, - dnl libcyassl found, set the variable - [ - AC_DEFINE(USE_CYASSL, 1, [if CyaSSL is enabled]) - AC_SUBST(USE_CYASSL, [1]) - CYASSL_ENABLED=1 - USE_CYASSL="yes" - ssl_msg="CyaSSL" - test cyassl != "$DEFAULT_SSL_BACKEND" || VALID_DEFAULT_SSL_BACKEND=yes - ]) - fi + dnl try pkg-config magic + CURL_CHECK_PKGCONFIG(wolfssl, [$wolfpkg]) + AC_MSG_NOTICE([Check dir $wolfpkg]) addld="" addlib="" addcflags="" - cyassllib="" - - if test "x$USE_CYASSL" != "xyes"; then - dnl add the path and test again - addld=-L$OPT_CYASSL/lib$libsuff - addcflags=-I$OPT_CYASSL/include - cyassllib=$OPT_CYASSL/lib$libsuff - - LDFLAGS="$LDFLAGS $addld" - if test "$addcflags" != "-I/usr/include"; then - CPPFLAGS="$CPPFLAGS $addcflags" + if test "$PKGCONFIG" != "no" ; then + addlib=`CURL_EXPORT_PCDIR([$wolfpkg]) + $PKGCONFIG --libs-only-l wolfssl` + addld=`CURL_EXPORT_PCDIR([$wolfpkg]) + $PKGCONFIG --libs-only-L wolfssl` + addcflags=`CURL_EXPORT_PCDIR([$wolfpkg]) + $PKGCONFIG --cflags-only-I wolfssl` + version=`CURL_EXPORT_PCDIR([$wolfpkg]) + $PKGCONFIG --modversion wolfssl` + wolfssllibpath=`echo $addld | $SED -e 's/^-L//'` + else + addlib=-lwolfssl + dnl use system defaults if user does not supply a path + if test -n "$OPT_WOLFSSL"; then + addld=-L$OPT_WOLFSSL/lib$libsuff + addcflags=-I$OPT_WOLFSSL/include + wolfssllibpath=$OPT_WOLFSSL/lib$libsuff fi - - AC_CHECK_LIB(cyassl, CyaSSL_Init, - [ - AC_DEFINE(USE_CYASSL, 1, [if CyaSSL is enabled]) - AC_SUBST(USE_CYASSL, [1]) - CYASSL_ENABLED=1 - USE_CYASSL="yes" - ssl_msg="CyaSSL" - test cyassl != "$DEFAULT_SSL_BACKEND" || VALID_DEFAULT_SSL_BACKEND=yes - ], - [ - CPPFLAGS=$_cppflags - LDFLAGS=$_ldflags - cyassllib="" - ]) fi - addld="" - addlib="" - addcflags="" - - if test "x$USE_CYASSL" != "xyes"; then - dnl libcyassl renamed to libwolfssl as of 3.4.0 - addld=-L$OPT_CYASSL/lib$libsuff - addcflags=-I$OPT_CYASSL/include - cyassllib=$OPT_CYASSL/lib$libsuff + if test "x$USE_WOLFSSL" != "xyes"; then LDFLAGS="$LDFLAGS $addld" + AC_MSG_NOTICE([Add $addld to LDFLAGS]) if test "$addcflags" != "-I/usr/include"; then CPPFLAGS="$CPPFLAGS $addcflags" + AC_MSG_NOTICE([Add $addcflags to CPPFLAGS]) fi - cyassllibname=wolfssl my_ac_save_LIBS="$LIBS" - LIBS="-l$cyassllibname -lm $LIBS" + LIBS="$addlib $LIBS" + AC_MSG_NOTICE([Add $addlib to LIBS]) - AC_MSG_CHECKING([for CyaSSL_Init in -lwolfssl]) + AC_MSG_CHECKING([for wolfSSL_Init in -lwolfssl]) AC_LINK_IFELSE([ AC_LANG_PROGRAM([[ /* These aren't needed for detection and confuse WolfSSL. They are set up properly later if it is detected. */ #undef SIZEOF_LONG #undef SIZEOF_LONG_LONG -#include +#include ]],[[ - return CyaSSL_Init(); + return wolfSSL_Init(); ]]) ],[ AC_MSG_RESULT(yes) - AC_DEFINE(USE_CYASSL, 1, [if CyaSSL/WolfSSL is enabled]) - AC_SUBST(USE_CYASSL, [1]) - CYASSL_ENABLED=1 - USE_CYASSL="yes" + AC_DEFINE(USE_WOLFSSL, 1, [if wolfSSL is enabled]) + AC_SUBST(USE_WOLFSSL, [1]) + WOLFSSL_ENABLED=1 + USE_WOLFSSL="yes" ssl_msg="WolfSSL" - test cyassl != "$DEFAULT_SSL_BACKEND" || VALID_DEFAULT_SSL_BACKEND=yes + test wolfssl != "$DEFAULT_SSL_BACKEND" || VALID_DEFAULT_SSL_BACKEND=yes ], [ AC_MSG_RESULT(no) CPPFLAGS=$_cppflags LDFLAGS=$_ldflags - cyassllib="" + wolfssllibpath="" ]) LIBS="$my_ac_save_LIBS" fi - if test "x$USE_CYASSL" = "xyes"; then - AC_MSG_NOTICE([detected $cyassllibname]) + if test "x$USE_WOLFSSL" = "xyes"; then + AC_MSG_NOTICE([detected wolfSSL]) check_for_ca_bundle=1 - dnl cyassl/ctaocrypt/types.h needs SIZEOF_LONG_LONG defined! + dnl wolfssl/ctaocrypt/types.h needs SIZEOF_LONG_LONG defined! AX_COMPILE_CHECK_SIZEOF(long long) - dnl Versions since at least 2.6.0 may have options.h - AC_CHECK_HEADERS(cyassl/options.h) - - dnl Versions since at least 2.9.4 renamed error.h to error-ssl.h - AC_CHECK_HEADERS(cyassl/error-ssl.h) + LIBS="$addlib -lm $LIBS" - LIBS="-l$cyassllibname -lm $LIBS" + dnl Recent WolfSSL versions build without SSLv3 by default + dnl WolfSSL needs configure --enable-opensslextra to have *get_peer* + dnl DES* is needed for NTLM support and lives in the OpenSSL compatibility + dnl layer + AC_CHECK_FUNCS(wolfSSLv3_client_method \ + wolfSSL_get_peer_certificate \ + wolfSSL_UseALPN ) - if test "x$cyassllibname" = "xwolfssl"; then - dnl Recent WolfSSL versions build without SSLv3 by default - dnl WolfSSL needs configure --enable-opensslextra to have *get_peer* - AC_CHECK_FUNCS(wolfSSLv3_client_method \ - wolfSSL_get_peer_certificate \ - wolfSSL_UseALPN) - else - dnl Cyassl needs configure --enable-opensslextra to have *get_peer* - AC_CHECK_FUNCS(CyaSSL_CTX_UseSupportedCurve \ - CyaSSL_get_peer_certificate) - fi - - if test -n "$cyassllib"; then + dnl if this symbol is present, we want the include path to include the + dnl OpenSSL API root as well + AC_CHECK_FUNC(wolfSSL_DES_ecb_encrypt, + [ + AC_DEFINE(HAVE_WOLFSSL_DES_ECB_ENCRYPT, 1, + [if you have wolfSSL_DES_ecb_encrypt]) + if test -n "$addcflags"; then + dnl use a for loop to strip off whitespace + for f in $addcflags; do + CPPFLAGS="$f/wolfssl $CPPFLAGS" + AC_MSG_NOTICE([Add $f/wolfssl to CPPFLAGS]) + break + done + else + dnl user didn't give a path, so guess/hope they installed wolfssl + dnl headers to system default location + CPPFLAGS="-I$includedir/wolfssl $CPPFLAGS" + AC_MSG_NOTICE([Add $includedir/wolfssl to CPPFLAGS]) + fi + WOLFSSL_NTLM=1 + ] + ) + + if test -n "$wolfssllibpath"; then dnl when shared libs were found in a path that the run-time dnl linker doesn't search through, we need to add it to dnl CURL_LIBRARY_PATH to prevent further configure tests to fail dnl due to this if test "x$cross_compiling" != "xyes"; then - CURL_LIBRARY_PATH="$CURL_LIBRARY_PATH:$cyassllib" + CURL_LIBRARY_PATH="$CURL_LIBRARY_PATH:$wolfssllibpath" export CURL_LIBRARY_PATH - AC_MSG_NOTICE([Added $cyassllib to CURL_LIBRARY_PATH]) + AC_MSG_NOTICE([Added $wolfssllibpath to CURL_LIBRARY_PATH]) fi fi fi - fi dnl CyaSSL not disabled + fi dnl wolfSSL not disabled test -z "$ssl_msg" || ssl_backends="${ssl_backends:+$ssl_backends, }$ssl_msg" fi @@ -2560,6 +2601,98 @@ if test -z "$ssl_backends" -o "x$OPT_MESALINK" != xno; then test -z "$ssl_msg" || ssl_backends="${ssl_backends:+$ssl_backends, }$ssl_msg" fi +dnl ---------------------------------------------------- +dnl check for BearSSL +dnl ---------------------------------------------------- + +OPT_BEARSSL=no + +_cppflags=$CPPFLAGS +_ldflags=$LDFLAGS +AC_ARG_WITH(bearssl,dnl +AC_HELP_STRING([--with-bearssl=PATH],[where to look for BearSSL, PATH points to the installation root]) +AC_HELP_STRING([--without-bearssl], [disable BearSSL detection]), + OPT_BEARSSL=$withval) + +if test -z "$ssl_backends" -o "x$OPT_BEARSSL" != xno; then + ssl_msg= + + if test X"$OPT_BEARSSL" != Xno; then + + if test "$OPT_BEARSSL" = "yes"; then + OPT_BEARSSL="" + fi + + if test -z "$OPT_BEARSSL" ; then + dnl check for lib first without setting any new path + + AC_CHECK_LIB(bearssl, br_ssl_client_init_full, + dnl libbearssl found, set the variable + [ + AC_DEFINE(USE_BEARSSL, 1, [if BearSSL is enabled]) + AC_SUBST(USE_BEARSSL, [1]) + BEARSSL_ENABLED=1 + USE_BEARSSL="yes" + ssl_msg="BearSSL" + test bearssl != "$DEFAULT_SSL_BACKEND" || VALID_DEFAULT_SSL_BACKEND=yes + ], [], -lbearssl) + fi + + addld="" + addlib="" + addcflags="" + bearssllib="" + + if test "x$USE_BEARSSL" != "xyes"; then + dnl add the path and test again + addld=-L$OPT_BEARSSL/lib$libsuff + addcflags=-I$OPT_BEARSSL/include + bearssllib=$OPT_BEARSSL/lib$libsuff + + LDFLAGS="$LDFLAGS $addld" + if test "$addcflags" != "-I/usr/include"; then + CPPFLAGS="$CPPFLAGS $addcflags" + fi + + AC_CHECK_LIB(bearssl, br_ssl_client_init_full, + [ + AC_DEFINE(USE_BEARSSL, 1, [if BearSSL is enabled]) + AC_SUBST(USE_BEARSSL, [1]) + BEARSSL_ENABLED=1 + USE_BEARSSL="yes" + ssl_msg="BearSSL" + test bearssl != "$DEFAULT_SSL_BACKEND" || VALID_DEFAULT_SSL_BACKEND=yes + ], + [ + CPPFLAGS=$_cppflags + LDFLAGS=$_ldflags + ], -lbearssl) + fi + + if test "x$USE_BEARSSL" = "xyes"; then + AC_MSG_NOTICE([detected BearSSL]) + check_for_ca_bundle=1 + + LIBS="-lbearssl $LIBS" + + if test -n "$bearssllib"; then + dnl when shared libs were found in a path that the run-time + dnl linker doesn't search through, we need to add it to + dnl CURL_LIBRARY_PATH to prevent further configure tests to fail + dnl due to this + if test "x$cross_compiling" != "xyes"; then + CURL_LIBRARY_PATH="$CURL_LIBRARY_PATH:$bearssllib" + export CURL_LIBRARY_PATH + AC_MSG_NOTICE([Added $bearssllib to CURL_LIBRARY_PATH]) + fi + fi + fi + + fi dnl BearSSL not disabled + + test -z "$ssl_msg" || ssl_backends="${ssl_backends:+$ssl_backends, }$ssl_msg" +fi + dnl ---------------------------------------------------- dnl NSS. Only check if GnuTLS and OpenSSL are not enabled dnl ---------------------------------------------------- @@ -2690,10 +2823,10 @@ if test -z "$ssl_backends" -o "x$OPT_NSS" != xno; then test -z "$ssl_msg" || ssl_backends="${ssl_backends:+$ssl_backends, }$ssl_msg" fi -case "x$OPENSSL_ENABLED$GNUTLS_ENABLED$NSS_ENABLED$POLARSSL_ENABLED$MBEDTLS_ENABLED$CYASSL_ENABLED$WINSSL_ENABLED$SECURETRANSPORT_ENABLED$MESALINK_ENABLED$AMISSL_ENABLED" in +case "x$OPENSSL_ENABLED$GNUTLS_ENABLED$NSS_ENABLED$MBEDTLS_ENABLED$WOLFSSL_ENABLED$SCHANNEL_ENABLED$SECURETRANSPORT_ENABLED$MESALINK_ENABLED$BEARSSL_ENABLED$AMISSL_ENABLED" in x) AC_MSG_WARN([SSL disabled, you will not be able to use HTTPS, FTPS, NTLM and more.]) - AC_MSG_WARN([Use --with-ssl, --with-gnutls, --with-polarssl, --with-cyassl, --with-nss, --with-schannel, --with-secure-transport, --with-mesalink or --with-amissl to address this.]) + AC_MSG_WARN([Use --with-ssl, --with-gnutls, --with-wolfssl, --with-mbedtls, --with-nss, --with-schannel, --with-secure-transport, --with-mesalink, --with-amissl or --with-bearssl to address this.]) ;; x1) # one SSL backend is enabled @@ -2733,7 +2866,7 @@ dnl ********************************************************************** dnl Check for the CA bundle dnl ********************************************************************** -if test "$check_for_ca_bundle" -gt 0; then +if test -n "$check_for_ca_bundle"; then CURL_CHECK_CA_BUNDLE fi @@ -2802,7 +2935,7 @@ if test X"$OPT_LIBMETALINK" != Xno; then $PKGCONFIG --cflags-only-I libmetalink` version=`CURL_EXPORT_PCDIR([$LIBMETALINK_PCDIR]) dnl $PKGCONFIG --modversion libmetalink` - libmetalinklib=`echo $addld | $SED -e 's/-L//'` + libmetalinklib=`echo $addld | $SED -e 's/^-L//'` fi if test -n "$addlib"; then @@ -2828,6 +2961,12 @@ if test X"$OPT_LIBMETALINK" != Xno; then AC_MSG_NOTICE([libmetalink library defective or too old]) want_metalink="no" ]) + if test "x$OPENSSL_ENABLED" != "x1" -a "x$USE_WINDOWS_SSPI" != "x1" \ + -a "x$GNUTLS_ENABLED" != "x1" -a "x$NSS_ENABLED" != "x1" \ + -a "x$SECURETRANSPORT_ENABLED" != "x1"; then + AC_MSG_WARN([metalink support requires a compatible SSL/TLS backend]) + want_metalink="no" + fi CPPFLAGS="$clean_CPPFLAGS" LDFLAGS="$clean_LDFLAGS" LIBS="$clean_LIBS" @@ -2853,17 +2992,23 @@ dnl ********************************************************************** dnl Default to compiler & linker defaults for LIBSSH2 files & libraries. OPT_LIBSSH2=off AC_ARG_WITH(libssh2,dnl -AC_HELP_STRING([--with-libssh2=PATH],[Where to look for libssh2, PATH points to the LIBSSH2 installation; when possible, set the PKG_CONFIG_PATH environment variable instead of using this option]) -AC_HELP_STRING([--with-libssh2], [enable LIBSSH2]), +AC_HELP_STRING([--with-libssh2=PATH],[Where to look for libssh2, PATH points to the libssh2 installation; when possible, set the PKG_CONFIG_PATH environment variable instead of using this option]) +AC_HELP_STRING([--with-libssh2], [enable libssh2]), OPT_LIBSSH2=$withval, OPT_LIBSSH2=no) OPT_LIBSSH=off AC_ARG_WITH(libssh,dnl -AC_HELP_STRING([--with-libssh=PATH],[Where to look for libssh, PATH points to the LIBSSH installation; when possible, set the PKG_CONFIG_PATH environment variable instead of using this option]) -AC_HELP_STRING([--with-libssh], [enable LIBSSH]), +AC_HELP_STRING([--with-libssh=PATH],[Where to look for libssh, PATH points to the libssh installation; when possible, set the PKG_CONFIG_PATH environment variable instead of using this option]) +AC_HELP_STRING([--with-libssh], [enable libssh]), OPT_LIBSSH=$withval, OPT_LIBSSH=no) +OPT_WOLFSSH=off +AC_ARG_WITH(wolfssh,dnl +AC_HELP_STRING([--with-wolfssh=PATH],[Where to look for wolfssh, PATH points to the wolfSSH installation; when possible, set the PKG_CONFIG_PATH environment variable instead of using this option]) +AC_HELP_STRING([--with-wolfssh], [enable wolfssh]), + OPT_WOLFSSH=$withval, OPT_WOLFSSH=no) + if test X"$OPT_LIBSSH2" != Xno; then dnl backup the pre-libssh2 variables CLEANLDFLAGS="$LDFLAGS" @@ -2880,7 +3025,7 @@ if test X"$OPT_LIBSSH2" != Xno; then LD_SSH2=`$PKGCONFIG --libs-only-L libssh2` CPP_SSH2=`$PKGCONFIG --cflags-only-I libssh2` version=`$PKGCONFIG --modversion libssh2` - DIR_SSH2=`echo $LD_SSH2 | $SED -e 's/-L//'` + DIR_SSH2=`echo $LD_SSH2 | $SED -e 's/^-L//'` fi ;; @@ -2953,7 +3098,7 @@ elif test X"$OPT_LIBSSH" != Xno; then LD_SSH=`$PKGCONFIG --libs-only-L libssh` CPP_SSH=`$PKGCONFIG --cflags-only-I libssh` version=`$PKGCONFIG --modversion libssh` - DIR_SSH=`echo $LD_SSH | $SED -e 's/-L//'` + DIR_SSH=`echo $LD_SSH | $SED -e 's/^-L//'` fi ;; @@ -3010,6 +3155,28 @@ elif test X"$OPT_LIBSSH" != Xno; then CPPFLAGS=$CLEANCPPFLAGS LIBS=$CLEANLIBS fi +elif test X"$OPT_WOLFSSH" != Xno; then + dnl backup the pre-wolfssh variables + CLEANLDFLAGS="$LDFLAGS" + CLEANCPPFLAGS="$CPPFLAGS" + CLEANLIBS="$LIBS" + + + if test "$OPT_WOLFSSH" != yes; then + WOLFCONFIG="$OPT_WOLFSSH/bin/wolfssh-config" + LDFLAGS="$LDFLAGS `$WOLFCONFIG --libs`" + CPPFLAGS="$CPPFLAGS `$WOLFCONFIG --cflags`" + fi + + AC_CHECK_LIB(wolfssh, wolfSSH_Init) + + AC_CHECK_HEADERS(wolfssh/ssh.h, + curl_ssh_msg="enabled (wolfSSH)" + WOLFSSH_ENABLED=1 + AC_DEFINE(USE_WOLFSSH, 1, [if wolfSSH is in use]) + AC_SUBST(USE_WOLFSSH, [1]) + ) + fi dnl ********************************************************************** @@ -3039,7 +3206,7 @@ if test X"$OPT_LIBRTMP" != Xno; then LD_RTMP=`$PKGCONFIG --libs-only-L librtmp` CPP_RTMP=`$PKGCONFIG --cflags-only-I librtmp` version=`$PKGCONFIG --modversion librtmp` - DIR_RTMP=`echo $LD_RTMP | $SED -e 's/-L//'` + DIR_RTMP=`echo $LD_RTMP | $SED -e 's/^-L//'` else dnl To avoid link errors, we do not allow --librtmp without dnl a pkgconfig file @@ -3117,11 +3284,9 @@ AC_HELP_STRING([--disable-versioned-symbols], [Disable versioned symbols in shar versioned_symbols_flavour="GNUTLS_" elif test "x$NSS_ENABLED" = "x1"; then versioned_symbols_flavour="NSS_" - elif test "x$POLARSSL_ENABLED" = "x1"; then - versioned_symbols_flavour="POLARSSL_" - elif test "x$CYASSL_ENABLED" = "x1"; then - versioned_symbols_flavour="CYASSL_" - elif test "x$WINSSL_ENABLED" = "x1"; then + elif test "x$WOLFSSL_ENABLED" = "x1"; then + versioned_symbols_flavour="WOLFSSL_" + elif test "x$SCHANNEL_ENABLED" = "x1"; then versioned_symbols_flavour="SCHANNEL_" elif test "x$SECURETRANSPORT_ENABLED" = "x1"; then versioned_symbols_flavour="SECURE_TRANSPORT_" @@ -3277,7 +3442,7 @@ if test "$want_idn" = "yes"; then $PKGCONFIG --libs-only-L libidn2 2>/dev/null` IDN_CPPFLAGS=`CURL_EXPORT_PCDIR([$IDN_PCDIR]) dnl $PKGCONFIG --cflags-only-I libidn2 2>/dev/null` - IDN_DIR=`echo $IDN_LDFLAGS | $SED -e 's/-L//'` + IDN_DIR=`echo $IDN_LDFLAGS | $SED -e 's/^-L//'` else dnl pkg-config not available or provides no info IDN_LIBS="-lidn2" @@ -3292,7 +3457,7 @@ if test "$want_idn" = "yes"; then IDN_LIBS=`$PKGCONFIG --libs-only-l libidn2 2>/dev/null` IDN_LDFLAGS=`$PKGCONFIG --libs-only-L libidn2 2>/dev/null` IDN_CPPFLAGS=`$PKGCONFIG --cflags-only-I libidn2 2>/dev/null` - IDN_DIR=`echo $IDN_LDFLAGS | $SED -e 's/-L//'` + IDN_DIR=`echo $IDN_LDFLAGS | $SED -e 's/^-L//'` else dnl pkg-config not available or provides no info IDN_LIBS="-lidn2" @@ -3385,7 +3550,7 @@ case "$OPT_H2" in ;; esac -curl_h2_msg="disabled (--with-nghttp2)" +curl_h2_msg="no (--with-nghttp2)" if test X"$want_h2" != Xno; then dnl backup the pre-nghttp2 variables CLEANLDFLAGS="$LDFLAGS" @@ -3411,9 +3576,9 @@ if test X"$want_h2" != Xno; then CPPFLAGS="$CPPFLAGS $CPP_H2" LIBS="$LIB_H2 $LIBS" - # use nghttp2_option_set_no_recv_client_magic to require nghttp2 - # >= 1.0.0 - AC_CHECK_LIB(nghttp2, nghttp2_option_set_no_recv_client_magic, + # use nghttp2_session_set_local_window_size to require nghttp2 + # >= 1.12.0 + AC_CHECK_LIB(nghttp2, nghttp2_session_set_local_window_size, [ AC_CHECK_HEADERS(nghttp2/nghttp2.h, curl_h2_msg="enabled (nghttp2)" @@ -3440,11 +3605,392 @@ if test X"$want_h2" != Xno; then fi dnl ********************************************************************** -dnl Check for zsh completion path +dnl Check for ngtcp2 (QUIC) dnl ********************************************************************** -OPT_ZSH_FPATH=default -AC_ARG_WITH(zsh-functions-dir, +OPT_TCP2="yes" +curl_h3_msg="no (--with-ngtcp2, --with-quiche)" + +if test "x$disable_http" = "xyes"; then + # without HTTP, ngtcp2 is no use + OPT_TCP2="no" +fi + +AC_ARG_WITH(ngtcp2, +AC_HELP_STRING([--with-ngtcp2=PATH],[Enable ngtcp2 usage]) +AC_HELP_STRING([--without-ngtcp2],[Disable ngtcp2 usage]), + [OPT_TCP2=$withval]) +case "$OPT_TCP2" in + no) + dnl --without-ngtcp2 option used + want_tcp2="no" + ;; + yes) + dnl --with-ngtcp2 option used without path + want_tcp2="default" + want_tcp2_path="" + ;; + *) + dnl --with-ngtcp2 option used with path + want_tcp2="yes" + want_tcp2_path="$withval/lib/pkgconfig" + ;; +esac + +curl_tcp2_msg="no (--with-ngtcp2)" +if test X"$want_tcp2" != Xno; then + dnl backup the pre-ngtcp2 variables + CLEANLDFLAGS="$LDFLAGS" + CLEANCPPFLAGS="$CPPFLAGS" + CLEANLIBS="$LIBS" + + CURL_CHECK_PKGCONFIG(libngtcp2, $want_tcp2_path) + + if test "$PKGCONFIG" != "no" ; then + LIB_TCP2=`CURL_EXPORT_PCDIR([$want_tcp2_path]) + $PKGCONFIG --libs-only-l libngtcp2` + AC_MSG_NOTICE([-l is $LIB_TCP2]) + + CPP_TCP2=`CURL_EXPORT_PCDIR([$want_tcp2_path]) dnl + $PKGCONFIG --cflags-only-I libngtcp2` + AC_MSG_NOTICE([-I is $CPP_TCP2]) + + LD_TCP2=`CURL_EXPORT_PCDIR([$want_tcp2_path]) + $PKGCONFIG --libs-only-L libngtcp2` + AC_MSG_NOTICE([-L is $LD_TCP2]) + + LDFLAGS="$LDFLAGS $LD_TCP2" + CPPFLAGS="$CPPFLAGS $CPP_TCP2" + LIBS="$LIB_TCP2 $LIBS" + + if test "x$cross_compiling" != "xyes"; then + DIR_TCP2=`echo $LD_TCP2 | $SED -e 's/^-L//'` + fi + AC_CHECK_LIB(ngtcp2, ngtcp2_conn_client_new, + [ + AC_CHECK_HEADERS(ngtcp2/ngtcp2.h, + NGTCP2_ENABLED=1 + AC_DEFINE(USE_NGTCP2, 1, [if ngtcp2 is in use]) + AC_SUBST(USE_NGTCP2, [1]) + CURL_LIBRARY_PATH="$CURL_LIBRARY_PATH:$DIR_TCP2" + export CURL_LIBRARY_PATH + AC_MSG_NOTICE([Added $DIR_TCP2 to CURL_LIBRARY_PATH]) + ) + ], + dnl not found, revert back to clean variables + LDFLAGS=$CLEANLDFLAGS + CPPFLAGS=$CLEANCPPFLAGS + LIBS=$CLEANLIBS + ) + + else + dnl no ngtcp2 pkg-config found, deal with it + if test X"$want_tcp2" != Xdefault; then + dnl To avoid link errors, we do not allow --with-ngtcp2 without + dnl a pkgconfig file + AC_MSG_ERROR([--with-ngtcp2 was specified but could not find ngtcp2 pkg-config file.]) + fi + fi + +fi + +if test "x$NGTCP2_ENABLED" = "x1" -a "x$OPENSSL_ENABLED" = "x1"; then + dnl backup the pre-ngtcp2_crypto_openssl variables + CLEANLDFLAGS="$LDFLAGS" + CLEANCPPFLAGS="$CPPFLAGS" + CLEANLIBS="$LIBS" + + CURL_CHECK_PKGCONFIG(libngtcp2_crypto_openssl, $want_tcp2_path) + + if test "$PKGCONFIG" != "no" ; then + LIB_NGTCP2_CRYPTO_OPENSSL=`CURL_EXPORT_PCDIR([$want_tcp2_path]) + $PKGCONFIG --libs-only-l libngtcp2_crypto_openssl` + AC_MSG_NOTICE([-l is $LIB_NGTCP2_CRYPTO_OPENSSL]) + + CPP_NGTCP2_CRYPTO_OPENSSL=`CURL_EXPORT_PCDIR([$want_tcp2_path]) dnl + $PKGCONFIG --cflags-only-I libngtcp2_crypto_openssl` + AC_MSG_NOTICE([-I is $CPP_NGTCP2_CRYPTO_OPENSSL]) + + LD_NGTCP2_CRYPTO_OPENSSL=`CURL_EXPORT_PCDIR([$want_tcp2_path]) + $PKGCONFIG --libs-only-L libngtcp2_crypto_openssl` + AC_MSG_NOTICE([-L is $LD_NGTCP2_CRYPTO_OPENSSL]) + + LDFLAGS="$LDFLAGS $LD_NGTCP2_CRYPTO_OPENSSL" + CPPFLAGS="$CPPFLAGS $CPP_NGTCP2_CRYPTO_OPENSSL" + LIBS="$LIB_NGTCP2_CRYPTO_OPENSSL $LIBS" + + if test "x$cross_compiling" != "xyes"; then + DIR_NGTCP2_CRYPTO_OPENSSL=`echo $LD_NGTCP2_CRYPTO_OPENSSL | $SED -e 's/^-L//'` + fi + AC_CHECK_LIB(ngtcp2_crypto_openssl, ngtcp2_crypto_ctx_initial, + [ + AC_CHECK_HEADERS(ngtcp2/ngtcp2_crypto.h, + NGTCP2_ENABLED=1 + AC_DEFINE(USE_NGTCP2_CRYPTO_OPENSSL, 1, [if ngtcp2_crypto_openssl is in use]) + AC_SUBST(USE_NGTCP2_CRYPTO_OPENSSL, [1]) + CURL_LIBRARY_PATH="$CURL_LIBRARY_PATH:$DIR_NGTCP2_CRYPTO_OPENSSL" + export CURL_LIBRARY_PATH + AC_MSG_NOTICE([Added $DIR_NGTCP2_CRYPTO_OPENSSL to CURL_LIBRARY_PATH]) + ) + ], + dnl not found, revert back to clean variables + LDFLAGS=$CLEANLDFLAGS + CPPFLAGS=$CLEANCPPFLAGS + LIBS=$CLEANLIBS + ) + + else + dnl no ngtcp2_crypto_openssl pkg-config found, deal with it + if test X"$want_tcp2" != Xdefault; then + dnl To avoid link errors, we do not allow --with-ngtcp2 without + dnl a pkgconfig file + AC_MSG_ERROR([--with-ngtcp2 was specified but could not find ngtcp2_crypto_openssl pkg-config file.]) + fi + fi +fi + +if test "x$NGTCP2_ENABLED" = "x1" -a "x$GNUTLS_ENABLED" = "x1"; then + dnl backup the pre-ngtcp2_crypto_gnutls variables + CLEANLDFLAGS="$LDFLAGS" + CLEANCPPFLAGS="$CPPFLAGS" + CLEANLIBS="$LIBS" + + CURL_CHECK_PKGCONFIG(libngtcp2_crypto_gnutls, $want_tcp2_path) + + if test "$PKGCONFIG" != "no" ; then + LIB_NGTCP2_CRYPTO_GNUTLS=`CURL_EXPORT_PCDIR([$want_tcp2_path]) + $PKGCONFIG --libs-only-l libngtcp2_crypto_gnutls` + AC_MSG_NOTICE([-l is $LIB_NGTCP2_CRYPTO_GNUTLS]) + + CPP_NGTCP2_CRYPTO_GNUTLS=`CURL_EXPORT_PCDIR([$want_tcp2_path]) dnl + $PKGCONFIG --cflags-only-I libngtcp2_crypto_gnutls` + AC_MSG_NOTICE([-I is $CPP_NGTCP2_CRYPTO_GNUTLS]) + + LD_NGTCP2_CRYPTO_GNUTLS=`CURL_EXPORT_PCDIR([$want_tcp2_path]) + $PKGCONFIG --libs-only-L libngtcp2_crypto_gnutls` + AC_MSG_NOTICE([-L is $LD_NGTCP2_CRYPTO_GNUTLS]) + + LDFLAGS="$LDFLAGS $LD_NGTCP2_CRYPTO_GNUTLS" + CPPFLAGS="$CPPFLAGS $CPP_NGTCP2_CRYPTO_GNUTLS" + LIBS="$LIB_NGTCP2_CRYPTO_GNUTLS $LIBS" + + if test "x$cross_compiling" != "xyes"; then + DIR_NGTCP2_CRYPTO_GNUTLS=`echo $LD_NGTCP2_CRYPTO_GNUTLS | $SED -e 's/^-L//'` + fi + AC_CHECK_LIB(ngtcp2_crypto_gnutls, ngtcp2_crypto_ctx_initial, + [ + AC_CHECK_HEADERS(ngtcp2/ngtcp2_crypto.h, + NGTCP2_ENABLED=1 + AC_DEFINE(USE_NGTCP2_CRYPTO_GNUTLS, 1, [if ngtcp2_crypto_gnutls is in use]) + AC_SUBST(USE_NGTCP2_CRYPTO_GNUTLS, [1]) + CURL_LIBRARY_PATH="$CURL_LIBRARY_PATH:$DIR_NGTCP2_CRYPTO_GNUTLS" + export CURL_LIBRARY_PATH + AC_MSG_NOTICE([Added $DIR_NGTCP2_CRYPTO_GNUTLS to CURL_LIBRARY_PATH]) + ) + ], + dnl not found, revert back to clean variables + LDFLAGS=$CLEANLDFLAGS + CPPFLAGS=$CLEANCPPFLAGS + LIBS=$CLEANLIBS + ) + + else + dnl no ngtcp2_crypto_gnutls pkg-config found, deal with it + if test X"$want_tcp2" != Xdefault; then + dnl To avoid link errors, we do not allow --with-ngtcp2 without + dnl a pkgconfig file + AC_MSG_ERROR([--with-ngtcp2 was specified but could not find ngtcp2_crypto_gnutls pkg-config file.]) + fi + fi +fi + +dnl ********************************************************************** +dnl Check for nghttp3 (HTTP/3 with ngtcp2) +dnl ********************************************************************** + +OPT_NGHTTP3="yes" + +if test "x$NGTCP2_ENABLED" = "x"; then + # without ngtcp2, nghttp3 is of no use for us + OPT_NGHTTP3="no" +fi + +AC_ARG_WITH(nghttp3, +AC_HELP_STRING([--with-nghttp3=PATH],[Enable nghttp3 usage]) +AC_HELP_STRING([--without-nghttp3],[Disable nghttp3 usage]), + [OPT_NGHTTP3=$withval]) +case "$OPT_NGHTTP3" in + no) + dnl --without-nghttp3 option used + want_nghttp3="no" + ;; + yes) + dnl --with-nghttp3 option used without path + want_nghttp3="default" + want_nghttp3_path="" + ;; + *) + dnl --with-nghttp3 option used with path + want_nghttp3="yes" + want_nghttp3_path="$withval/lib/pkgconfig" + ;; +esac + +curl_http3_msg="no (--with-nghttp3)" +if test X"$want_nghttp3" != Xno; then + dnl backup the pre-nghttp3 variables + CLEANLDFLAGS="$LDFLAGS" + CLEANCPPFLAGS="$CPPFLAGS" + CLEANLIBS="$LIBS" + + CURL_CHECK_PKGCONFIG(libnghttp3, $want_nghttp3_path) + + if test "$PKGCONFIG" != "no" ; then + LIB_NGHTTP3=`CURL_EXPORT_PCDIR([$want_nghttp3_path]) + $PKGCONFIG --libs-only-l libnghttp3` + AC_MSG_NOTICE([-l is $LIB_NGHTTP3]) + + CPP_NGHTTP3=`CURL_EXPORT_PCDIR([$want_nghttp3_path]) dnl + $PKGCONFIG --cflags-only-I libnghttp3` + AC_MSG_NOTICE([-I is $CPP_NGHTTP3]) + + LD_NGHTTP3=`CURL_EXPORT_PCDIR([$want_nghttp3_path]) + $PKGCONFIG --libs-only-L libnghttp3` + AC_MSG_NOTICE([-L is $LD_NGHTTP3]) + + LDFLAGS="$LDFLAGS $LD_NGHTTP3" + CPPFLAGS="$CPPFLAGS $CPP_NGHTTP3" + LIBS="$LIB_NGHTTP3 $LIBS" + + if test "x$cross_compiling" != "xyes"; then + DIR_NGHTTP3=`echo $LD_NGHTTP3 | $SED -e 's/^-L//'` + fi + AC_CHECK_LIB(nghttp3, nghttp3_conn_client_new, + [ + AC_CHECK_HEADERS(nghttp3/nghttp3.h, + curl_h3_msg="enabled (ngtcp2 + nghttp3)" + NGHTTP3_ENABLED=1 + AC_DEFINE(USE_NGHTTP3, 1, [if nghttp3 is in use]) + AC_SUBST(USE_NGHTTP3, [1]) + CURL_LIBRARY_PATH="$CURL_LIBRARY_PATH:$DIR_NGHTTP3" + export CURL_LIBRARY_PATH + AC_MSG_NOTICE([Added $DIR_NGHTTP3 to CURL_LIBRARY_PATH]) + experimental="$experimental HTTP3" + ) + ], + dnl not found, revert back to clean variables + LDFLAGS=$CLEANLDFLAGS + CPPFLAGS=$CLEANCPPFLAGS + LIBS=$CLEANLIBS + ) + + else + dnl no nghttp3 pkg-config found, deal with it + if test X"$want_nghttp3" != Xdefault; then + dnl To avoid link errors, we do not allow --with-nghttp3 without + dnl a pkgconfig file + AC_MSG_ERROR([--with-nghttp3 was specified but could not find nghttp3 pkg-config file.]) + fi + fi + +fi + +dnl ********************************************************************** +dnl Check for quiche (QUIC) +dnl ********************************************************************** + +OPT_QUICHE="yes" + +if test "x$disable_http" = "xyes" -o "x$USE_NGTCP" = "x1"; then + # without HTTP or with ngtcp2, quiche is no use + OPT_QUICHE="no" +fi + +AC_ARG_WITH(quiche, +AC_HELP_STRING([--with-quiche=PATH],[Enable quiche usage]) +AC_HELP_STRING([--without-quiche],[Disable quiche usage]), + [OPT_QUICHE=$withval]) +case "$OPT_QUICHE" in + no) + dnl --without-quiche option used + want_quiche="no" + ;; + yes) + dnl --with-quiche option used without path + want_quiche="default" + want_quiche_path="" + ;; + *) + dnl --with-quiche option used with path + want_quiche="yes" + want_quiche_path="$withval" + ;; +esac + +if test X"$want_quiche" != Xno; then + dnl backup the pre-quiche variables + CLEANLDFLAGS="$LDFLAGS" + CLEANCPPFLAGS="$CPPFLAGS" + CLEANLIBS="$LIBS" + + CURL_CHECK_PKGCONFIG(quiche, $want_quiche_path) + + if test "$PKGCONFIG" != "no" ; then + LIB_QUICHE=`CURL_EXPORT_PCDIR([$want_quiche_path]) + $PKGCONFIG --libs-only-l quiche` + AC_MSG_NOTICE([-l is $LIB_QUICHE]) + + CPP_QUICHE=`CURL_EXPORT_PCDIR([$want_quiche_path]) dnl + $PKGCONFIG --cflags-only-I quiche` + AC_MSG_NOTICE([-I is $CPP_QUICHE]) + + LD_QUICHE=`CURL_EXPORT_PCDIR([$want_quiche_path]) + $PKGCONFIG --libs-only-L quiche` + AC_MSG_NOTICE([-L is $LD_QUICHE]) + + LDFLAGS="$LDFLAGS $LD_QUICHE" + CPPFLAGS="$CPPFLAGS $CPP_QUICHE" + LIBS="$LIB_QUICHE $LIBS" + + if test "x$cross_compiling" != "xyes"; then + DIR_QUICHE=`echo $LD_QUICHE | $SED -e 's/^-L//'` + fi + AC_CHECK_LIB(quiche, quiche_connect, + [ + AC_CHECK_HEADERS(quiche.h, + experimental="$experimental HTTP3" + AC_MSG_NOTICE([HTTP3 support is experimental]) + curl_h3_msg="enabled (quiche)" + QUICHE_ENABLED=1 + AC_DEFINE(USE_QUICHE, 1, [if quiche is in use]) + AC_SUBST(USE_QUICHE, [1]) + AC_CHECK_FUNCS([quiche_conn_set_qlog_fd]) + CURL_LIBRARY_PATH="$CURL_LIBRARY_PATH:$DIR_QUICHE" + export CURL_LIBRARY_PATH + AC_MSG_NOTICE([Added $DIR_QUICHE to CURL_LIBRARY_PATH]), + ) + ], + dnl not found, revert back to clean variables + LDFLAGS=$CLEANLDFLAGS + CPPFLAGS=$CLEANCPPFLAGS + LIBS=$CLEANLIBS + ) + else + dnl no quiche pkg-config found, deal with it + if test X"$want_quiche" != Xdefault; then + dnl To avoid link errors, we do not allow --with-quiche without + dnl a pkgconfig file + AC_MSG_ERROR([--with-quiche was specified but could not find quiche pkg-config file.]) + fi + fi +fi + +dnl ********************************************************************** +dnl Check for zsh completion path +dnl ********************************************************************** + +OPT_ZSH_FPATH=default +AC_ARG_WITH(zsh-functions-dir, AC_HELP_STRING([--with-zsh-functions-dir=PATH],[Install zsh completions to PATH]) AC_HELP_STRING([--without-zsh-functions-dir],[Do not install zsh completions]), [OPT_ZSH_FPATH=$withval]) @@ -3563,6 +4109,8 @@ dnl default includes #endif #ifdef HAVE_SYS_SELECT_H #include +#elif defined(HAVE_UNISTD_H) +#include #endif #ifdef HAVE_SYS_SOCKET_H #include @@ -3661,6 +4209,19 @@ AC_CHECK_TYPE(sa_family_t, #endif ]) +# check for suseconds_t +AC_CHECK_TYPE([suseconds_t],[ + AC_DEFINE(HAVE_SUSECONDS_T, 1, + [Define to 1 if suseconds_t is an available type.]) +], ,[ +#ifdef HAVE_SYS_TYPES_H +#include +#endif +#ifdef HAVE_SYS_TIME_H +#include +#endif +]) + AC_MSG_CHECKING([if time_t is unsigned]) CURL_RUN_IFELSE( [ @@ -3711,6 +4272,9 @@ CURL_CHECK_FUNC_GETHOSTBYADDR_R CURL_CHECK_FUNC_GETHOSTBYNAME CURL_CHECK_FUNC_GETHOSTBYNAME_R CURL_CHECK_FUNC_GETHOSTNAME +CURL_CHECK_FUNC_GETPEERNAME +CURL_CHECK_FUNC_GETSOCKNAME +CURL_CHECK_FUNC_IF_NAMETOINDEX CURL_CHECK_FUNC_GETIFADDRS CURL_CHECK_FUNC_GETSERVBYPORT_R CURL_CHECK_FUNC_GMTIME_R @@ -3770,6 +4334,7 @@ AC_CHECK_FUNCS([fnmatch \ setlocale \ setmode \ setrlimit \ + usleep \ utime \ utimes ],[ @@ -3794,14 +4359,6 @@ AC_CHECK_FUNCS([fnmatch \ fi ]) -if test "$ipv6" = "yes"; then - if test "$curl_cv_func_getaddrinfo" = "yes"; then - AC_DEFINE(ENABLE_IPV6, 1, [Define if you want to enable IPv6 support]) - IPV6_ENABLED=1 - AC_SUBST(IPV6_ENABLED) - fi -fi - CURL_CHECK_NONBLOCKING_SOCKET dnl ************************************************************ @@ -3918,9 +4475,17 @@ if test "$want_pthreads" != "no"; then AC_CHECK_HEADER(pthread.h, [ AC_DEFINE(HAVE_PTHREAD_H, 1, [if you have ]) save_CFLAGS="$CFLAGS" - - dnl first check for function without lib + dnl When statically linking against boringssl, -lpthread is added to LIBS. + dnl Make sure to that this does not pass the check below, we really want + dnl -pthread in CFLAGS as recommended for GCC. This also ensures that + dnl lib1541 and lib1565 tests are built with these options. Otherwise + dnl they fail the build since tests/libtest/Makefile.am clears LIBS. + save_LIBS="$LIBS" + + LIBS= + dnl Check for libc variants without a separate pthread lib like bionic AC_CHECK_FUNC(pthread_create, [USE_THREADS_POSIX=1] ) + LIBS="$save_LIBS" dnl on HPUX, life is more complicated... case $host in @@ -3964,6 +4529,8 @@ if test "$want_thres" = "yes" && test "x$USE_THREADS_POSIX" != "x1"; then fi fi +CURL_CONVERT_INCLUDE_TO_ISYSTEM + dnl ************************************************************ dnl disable verbose text strings dnl @@ -4003,16 +4570,16 @@ AC_HELP_STRING([--disable-sspi],[Disable SSPI]), fi ;; *) - if test "x$WINSSL_ENABLED" = "x1"; then - # --with-winssl implies --enable-sspi + if test "x$SCHANNEL_ENABLED" = "x1"; then + # --with-schannel implies --enable-sspi AC_MSG_RESULT(yes) else AC_MSG_RESULT(no) fi ;; esac ], - if test "x$WINSSL_ENABLED" = "x1"; then - # --with-winssl implies --enable-sspi + if test "x$SCHANNEL_ENABLED" = "x1"; then + # --with-schannel implies --enable-sspi AC_MSG_RESULT(yes) else AC_MSG_RESULT(no) @@ -4052,7 +4619,6 @@ AC_HELP_STRING([--disable-tls-srp],[Disable TLS-SRP authentication]), [ case "$enableval" in no) AC_MSG_RESULT(no) - AC_DEFINE(CURL_DISABLE_TLS_SRP, 1, [to disable TLS-SRP authentication]) want_tls_srp=no ;; *) AC_MSG_RESULT(yes) @@ -4105,7 +4671,7 @@ fi dnl ************************************************************ dnl disable cookies support dnl -AC_MSG_CHECKING([whether to enable support for cookies]) +AC_MSG_CHECKING([whether to support cookies]) AC_ARG_ENABLE(cookies, AC_HELP_STRING([--enable-cookies],[Enable cookies support]) AC_HELP_STRING([--disable-cookies],[Disable cookies support]), @@ -4120,6 +4686,168 @@ AC_HELP_STRING([--disable-cookies],[Disable cookies support]), AC_MSG_RESULT(yes) ) +dnl ************************************************************ +dnl disable socketpair +dnl +AC_MSG_CHECKING([whether to support socketpair]) +AC_ARG_ENABLE(socketpair, +AC_HELP_STRING([--enable-socketpair],[Enable socketpair support]) +AC_HELP_STRING([--disable-socketpair],[Disable socketpair support]), +[ case "$enableval" in + no) + AC_MSG_RESULT(no) + AC_DEFINE(CURL_DISABLE_SOCKETPAIR, 1, [to disable socketpair support]) + ;; + *) AC_MSG_RESULT(yes) + ;; + esac ], + AC_MSG_RESULT(yes) +) + +dnl ************************************************************ +dnl disable HTTP authentication support +dnl +AC_MSG_CHECKING([whether to support HTTP authentication]) +AC_ARG_ENABLE(http-auth, +AC_HELP_STRING([--enable-http-auth],[Enable HTTP authentication support]) +AC_HELP_STRING([--disable-http-auth],[Disable HTTP authentication support]), +[ case "$enableval" in + no) + AC_MSG_RESULT(no) + AC_DEFINE(CURL_DISABLE_HTTP_AUTH, 1, [disable HTTP authentication]) + ;; + *) AC_MSG_RESULT(yes) + ;; + esac ], + AC_MSG_RESULT(yes) +) + +dnl ************************************************************ +dnl disable DoH support +dnl +AC_MSG_CHECKING([whether to support DoH]) +AC_ARG_ENABLE(doh, +AC_HELP_STRING([--enable-doh],[Enable DoH support]) +AC_HELP_STRING([--disable-doh],[Disable DoH support]), +[ case "$enableval" in + no) + AC_MSG_RESULT(no) + AC_DEFINE(CURL_DISABLE_DOH, 1, [disable DoH]) + ;; + *) AC_MSG_RESULT(yes) + ;; + esac ], + AC_MSG_RESULT(yes) +) + +dnl ************************************************************ +dnl disable mime API support +dnl +AC_MSG_CHECKING([whether to support the MIME API]) +AC_ARG_ENABLE(mime, +AC_HELP_STRING([--enable-mime],[Enable mime API support]) +AC_HELP_STRING([--disable-mime],[Disable mime API support]), +[ case "$enableval" in + no) + AC_MSG_RESULT(no) + AC_DEFINE(CURL_DISABLE_MIME, 1, [disable mime API]) + ;; + *) AC_MSG_RESULT(yes) + ;; + esac ], + AC_MSG_RESULT(yes) +) + +dnl ************************************************************ +dnl disable date parsing +dnl +AC_MSG_CHECKING([whether to support date parsing]) +AC_ARG_ENABLE(dateparse, +AC_HELP_STRING([--enable-dateparse],[Enable date parsing]) +AC_HELP_STRING([--disable-dateparse],[Disable date parsing]), +[ case "$enableval" in + no) + AC_MSG_RESULT(no) + AC_DEFINE(CURL_DISABLE_PARSEDATE, 1, [disable date parsing]) + ;; + *) AC_MSG_RESULT(yes) + ;; + esac ], + AC_MSG_RESULT(yes) +) + +dnl ************************************************************ +dnl disable netrc +dnl +AC_MSG_CHECKING([whether to support netrc parsing]) +AC_ARG_ENABLE(netrc, +AC_HELP_STRING([--enable-netrc],[Enable netrc parsing]) +AC_HELP_STRING([--disable-netrc],[Disable netrc parsing]), +[ case "$enableval" in + no) + AC_MSG_RESULT(no) + AC_DEFINE(CURL_DISABLE_NETRC, 1, [disable netrc parsing]) + ;; + *) AC_MSG_RESULT(yes) + ;; + esac ], + AC_MSG_RESULT(yes) +) + +dnl ************************************************************ +dnl disable progress-meter +dnl +AC_MSG_CHECKING([whether to support progress-meter]) +AC_ARG_ENABLE(progress-meter, +AC_HELP_STRING([--enable-progress-meter],[Enable progress-meter]) +AC_HELP_STRING([--disable-progress-meter],[Disable progress-meter]), +[ case "$enableval" in + no) + AC_MSG_RESULT(no) + AC_DEFINE(CURL_DISABLE_PROGRESS_METER, 1, [disable progress-meter]) + ;; + *) AC_MSG_RESULT(yes) + ;; + esac ], + AC_MSG_RESULT(yes) +) + +dnl ************************************************************ +dnl disable shuffle DNS support +dnl +AC_MSG_CHECKING([whether to support DNS shuffling]) +AC_ARG_ENABLE(dnsshuffle, +AC_HELP_STRING([--enable-dnsshuffle],[Enable DNS shuffling]) +AC_HELP_STRING([--disable-dnsshuffle],[Disable DNS shuffling]), +[ case "$enableval" in + no) + AC_MSG_RESULT(no) + AC_DEFINE(CURL_DISABLE_SHUFFLE_DNS, 1, [disable DNS shuffling]) + ;; + *) AC_MSG_RESULT(yes) + ;; + esac ], + AC_MSG_RESULT(yes) +) + +dnl ************************************************************ +dnl disable the curl_easy_options API +dnl +AC_MSG_CHECKING([whether to support curl_easy_option*]) +AC_ARG_ENABLE(get-easy-option, +AC_HELP_STRING([--enable-get-easy-options],[Enable curl_easy_options]) +AC_HELP_STRING([--disable-get-easy-options],[Disable curl_easy_options]), +[ case "$enableval" in + no) + AC_MSG_RESULT(no) + AC_DEFINE(CURL_DISABLE_GETOPTIONS, 1, [to disable curl_easy_options]) + ;; + *) AC_MSG_RESULT(yes) + ;; + esac ], + AC_MSG_RESULT(yes) +) + dnl ************************************************************ dnl switch on/off alt-svc dnl @@ -4135,7 +4863,6 @@ AC_HELP_STRING([--disable-alt-svc],[Disable alt-svc support]), *) AC_MSG_RESULT(yes) curl_altsvc_msg="enabled"; enable_altsvc="yes" - experimental="alt-svc" ;; esac ], AC_MSG_RESULT(no) @@ -4143,7 +4870,37 @@ AC_HELP_STRING([--disable-alt-svc],[Disable alt-svc support]), if test "$enable_altsvc" = "yes"; then AC_DEFINE(USE_ALTSVC, 1, [to enable alt-svc]) - experimental="alt-svc" + experimental="$experimental alt-svc" +fi + +dnl ************************************************************* +dnl check whether ECH support, if desired, is actually available +dnl +if test "x$want_ech" != "xno"; then + AC_MSG_CHECKING([whether ECH support is available]) + + dnl assume NOT and look for sufficient condition + ECH_ENABLED=0 + ECH_SUPPORT='' + + dnl OpenSSL with a chosen ECH function should be enough + dnl so more exhaustive checking seems unnecessary for now + if test "x$OPENSSL_ENABLED" = "x1"; then + AC_CHECK_FUNCS(SSL_get_ech_status, + ECH_SUPPORT="ECH support available (OpenSSL with SSL_get_ech_status)" + ECH_ENABLED=1) + + dnl add 'elif' chain here for additional implementations + fi + + dnl now deal with whatever we found + if test "x$ECH_ENABLED" = "x1"; then + AC_DEFINE(USE_ECH, 1, [if ECH support is available]) + AC_MSG_RESULT($ECH_SUPPORT) + experimental="$experimental ECH" + else + AC_MSG_ERROR([--enable-ech ignored: No ECH support found]) + fi fi dnl ************************************************************ @@ -4177,6 +4934,13 @@ dnl to let curl-config output the static libraries correctly ENABLE_STATIC="$enable_static" AC_SUBST(ENABLE_STATIC) +dnl merge the pkg-config Libs.private field into Libs when static-only +if test "x$enable_shared" = "xno"; then + LIBCURL_NO_SHARED=$LIBCURL_LIBS +else + LIBCURL_NO_SHARED= +fi +AC_SUBST(LIBCURL_NO_SHARED) dnl dnl For keeping supported features and protocols also in pkg-config file @@ -4200,6 +4964,9 @@ fi if test "x$HAVE_BROTLI" = "x1"; then SUPPORT_FEATURES="$SUPPORT_FEATURES brotli" fi +if test "x$HAVE_ZSTD" = "x1"; then + SUPPORT_FEATURES="$SUPPORT_FEATURES zstd" +fi if test "x$USE_ARES" = "x1" -o "x$USE_THREADS_POSIX" = "x1" \ -o "x$USE_THREADS_WIN32" = "x1"; then SUPPORT_FEATURES="$SUPPORT_FEATURES AsynchDNS" @@ -4236,7 +5003,8 @@ fi if test "x$CURL_DISABLE_CRYPTO_AUTH" != "x1"; then if test "x$OPENSSL_ENABLED" = "x1" -o "x$USE_WINDOWS_SSPI" = "x1" \ -o "x$GNUTLS_ENABLED" = "x1" -o "x$MBEDTLS_ENABLED" = "x1" \ - -o "x$NSS_ENABLED" = "x1" -o "x$SECURETRANSPORT_ENABLED" = "x1"; then + -o "x$NSS_ENABLED" = "x1" -o "x$SECURETRANSPORT_ENABLED" = "x1" \ + -o "x$WOLFSSL_NTLM" = "x1"; then SUPPORT_FEATURES="$SUPPORT_FEATURES NTLM" if test "x$CURL_DISABLE_HTTP" != "x1" -a \ @@ -4254,15 +5022,30 @@ if test "x$USE_NGHTTP2" = "x1"; then SUPPORT_FEATURES="$SUPPORT_FEATURES HTTP2" fi +if test "x$USE_NGTCP2" = "x1" -o "x$USE_QUICHE" = "x1"; then + SUPPORT_FEATURES="$SUPPORT_FEATURES HTTP3" +fi + if test "x$CURL_WITH_MULTI_SSL" = "x1"; then SUPPORT_FEATURES="$SUPPORT_FEATURES MultiSSL" fi -if test "x$OPENSSL_ENABLED" = "x1" -o "x$GNUTLS_ENABLED" = "x1" \ - -o "x$NSS_ENABLED" = "x1"; then - SUPPORT_FEATURES="$SUPPORT_FEATURES HTTPS-proxy" +dnl if not explictily turned off, HTTPS-proxy comes with some TLS backends +if test "x$https_proxy" != "xno"; then + if test "x$OPENSSL_ENABLED" = "x1" -o "x$GNUTLS_ENABLED" = "x1" \ + -o "x$NSS_ENABLED" = "x1"; then + SUPPORT_FEATURES="$SUPPORT_FEATURES HTTPS-proxy" + fi fi +if test "x$ECH_ENABLED" = "x1"; then + SUPPORT_FEATURES="$SUPPORT_FEATURES ECH" +fi + +dnl replace spaces with newlines +dnl sort the lines +dnl replace the newlines back to spaces +SUPPORT_FEATURES=`echo $SUPPORT_FEATURES | tr ' ' '\012' | sort | tr '\012' ' '` AC_SUBST(SUPPORT_FEATURES) dnl For supported protocols in pkg-config file @@ -4302,6 +5085,9 @@ fi if test "x$CURL_DISABLE_GOPHER" != "x1"; then SUPPORT_PROTOCOLS="$SUPPORT_PROTOCOLS GOPHER" fi +if test "x$CURL_DISABLE_MQTT" != "x1"; then + SUPPORT_PROTOCOLS="$SUPPORT_PROTOCOLS MQTT" +fi if test "x$CURL_DISABLE_POP3" != "x1"; then SUPPORT_PROTOCOLS="$SUPPORT_PROTOCOLS POP3" if test "x$SSL_ENABLED" = "x1"; then @@ -4316,9 +5102,10 @@ if test "x$CURL_DISABLE_IMAP" != "x1"; then fi if test "x$CURL_DISABLE_SMB" != "x1" \ -a "x$CURL_DISABLE_CRYPTO_AUTH" != "x1" \ - -a \( "x$OPENSSL_ENABLED" = "x1" -o "x$USE_WINDOWS_SSPI" = "x1" \ + -a \( "x$OPENSSL_ENABLED" = "x1" \ -o "x$GNUTLS_ENABLED" = "x1" -o "x$MBEDTLS_ENABLED" = "x1" \ - -o "x$NSS_ENABLED" = "x1" -o "x$SECURETRANSPORT_ENABLED" = "x1" \); then + -o "x$NSS_ENABLED" = "x1" -o "x$SECURETRANSPORT_ENABLED" = "x1" \ + -o "x$WOLFSSL_NTLM" = "x1" \); then SUPPORT_PROTOCOLS="$SUPPORT_PROTOCOLS SMB" if test "x$SSL_ENABLED" = "x1"; then SUPPORT_PROTOCOLS="$SUPPORT_PROTOCOLS SMBS" @@ -4338,6 +5125,9 @@ if test "x$USE_LIBSSH" = "x1"; then SUPPORT_PROTOCOLS="$SUPPORT_PROTOCOLS SCP" SUPPORT_PROTOCOLS="$SUPPORT_PROTOCOLS SFTP" fi +if test "x$USE_WOLFSSH" = "x1"; then + SUPPORT_PROTOCOLS="$SUPPORT_PROTOCOLS SFTP" +fi if test "x$CURL_DISABLE_RTSP" != "x1"; then SUPPORT_PROTOCOLS="$SUPPORT_PROTOCOLS RTSP" fi @@ -4422,6 +5212,7 @@ AC_MSG_NOTICE([Configured to build curl/libcurl: SSH: ${curl_ssh_msg} zlib: ${curl_zlib_msg} brotli: ${curl_brotli_msg} + zstd: ${curl_zstd_msg} GSS-API: ${curl_gss_msg} TLS-SRP: ${curl_tls_srp_msg} resolver: ${curl_res_msg} @@ -4445,11 +5236,13 @@ AC_MSG_NOTICE([Configured to build curl/libcurl: PSL: ${curl_psl_msg} Alt-svc: ${curl_altsvc_msg} HTTP2: ${curl_h2_msg} + HTTP3: ${curl_h3_msg} + ECH: ${curl_ech_msg} Protocols: ${SUPPORT_PROTOCOLS} Features: ${SUPPORT_FEATURES} ]) if test -n "$experimental"; then cat >&2 << _EOF - WARNING: $experimental is enabled but marked EXPERIMENTAL. Use with caution! + WARNING: $experimental enabled but marked EXPERIMENTAL. Use with caution! _EOF fi diff --git a/curl-config.in b/curl-config.in index 0a7e0353fab2b3..18b1fc1e6b81bc 100644 --- a/curl-config.in +++ b/curl-config.in @@ -6,7 +6,7 @@ # | (__| |_| | _ <| |___ # \___|\___/|_| \_\_____| # -# Copyright (C) 2001 - 2018, Daniel Stenberg, , et al. +# Copyright (C) 2001 - 2020, Daniel Stenberg, , et al. # # This software is licensed as described in the file COPYING, which # you should have received as part of this distribution. The terms @@ -160,7 +160,7 @@ while test $# -gt 0; do else CURLLIBDIR="" fi - if test "X@REQUIRE_LIB_DEPS@" = "Xyes"; then + if test "X@ENABLE_SHARED@" = "Xno"; then echo ${CURLLIBDIR}-lcurl @LIBCURL_LIBS@ else echo ${CURLLIBDIR}-lcurl diff --git a/docs/ALTSVC.md b/docs/ALTSVC.md index 5aca1c95047b9f..6a462bbbc86b1f 100644 --- a/docs/ALTSVC.md +++ b/docs/ALTSVC.md @@ -2,21 +2,6 @@ curl features **EXPERIMENTAL** support for the Alt-Svc: HTTP header. -## Experimental - -Experimental support in curl means: - -1. Experimental features are provided to allow users to try them out and - provide feedback on functionality and API etc before they ship and get - "carved in stone". -2. You must enable the feature when invoking configure as otherwise curl will - not be built with the feature present. -3. We strongly advice against using this feature in production. -4. **We reserve the right to change behavior** of the feature without sticking - to our API/ABI rules as we do for regular features, as long as it is marked - experimental. -5. Experimental features are clearly marked so in documentation. Beware. - ## Enable Alt-Svc in build `./configure --enable-alt-svc` @@ -25,35 +10,30 @@ Experimental support in curl means: [RFC 7838](https://tools.ietf.org/html/rfc7838) -## What works - -- read alt-svc file from disk -- write alt-svc file from disk -- parse `Alt-Svc:` response headers, including `ma`, `clear` and `persist`. -- replaces old entries when new alternatives are received -- unit tests to verify most of this functionality (test 1654) -- act on `Alt-Svc:` response headers -- build conditionally on `configure --enable-alt-svc` only, feature marked as - **EXPERIMENTAL** -- implement `CURLOPT_ALTSVC_CTRL` -- implement `CURLOPT_ALTSVC` -- document `CURLOPT_ALTSVC_CTRL` -- document `CURLOPT_ALTSVC` -- document `--alt-svc` -- add `CURL_VERSION_ALTSVC` -- make `curl -V` show 'alt-svc' as a feature if built-in -- support `curl --alt-svc [file]` to enable caching, using that file -- make `tests/runtests.pl` able to filter tests on the feature `alt-svc` -- actually use the existing in-memory alt-svc cache for outgoing connections -- alt-svc cache expiry -- test 355 and 356 verify curl acting on Alt-Svc, received from header and - loaded from cache. The latter needs a debug build since it enables Alt-Svc - for plain HTTP. - -## What is left +# Alt-Svc cache file format + +This a text based file with one line per entry and each line consists of nine +space separated fields. + +## Example + + h2 quic.tech 8443 h3-22 quic.tech 8443 "20190808 06:18:37" 0 0 + +## Fields + +1. The ALPN id for the source origin +2. The host name for the source origin +3. The port number for the source origin +4. The ALPN id for the destination host +5. The host name for the destination host +6. The host number for the destination host +7. The expiration date and time of this entry within double quotes. The date format is "YYYYMMDD HH:MM:SS" and the time zone is GMT. +8. Boolean (1 or 0) if "persist" was set for this entry +9. Integer priority value (not currently used) + +# TODO - handle multiple response headers, when one of them says `clear` (should override them all) - using `Age:` value for caching age as per spec - `CURLALTSVC_IMMEDIATELY` support -- `CURLALTSVC_ALTUSED` support diff --git a/docs/BINDINGS.md b/docs/BINDINGS.md index b3624b1cb59754..d0e80b8ac6b1a4 100644 --- a/docs/BINDINGS.md +++ b/docs/BINDINGS.md @@ -23,6 +23,8 @@ Requests](https://github.com/whoshuu/cpr) by Huu Nguyen Cocoa: [BBHTTP](https://github.com/brunodecarvalho/BBHTTP) written by Bruno de Carvalho [curlhandle](https://github.com/karelia/curlhandle) Written by Dan Wood +Clojure: [clj-curl](https://github.com/lsevero/clj-curl) by Lucas Severo + [D](https://dlang.org/library/std/net/curl.html) Written by Kenneth Bogert [Delphi](https://github.com/Mercury13/curl4delphi) Written by Mikhail Merkuryev @@ -53,6 +55,8 @@ Go: [go-curl](https://github.com/andelf/go-curl) by ShuYu Wang [Julia](https://github.com/forio/Curl.jl) Written by Paul Howe +[Kapito](https://github.com/puzza007/katipo) is an Erlang HTTP library around libcurl. + [Lisp](https://common-lisp.net/project/cl-curl/) Written by Liam Healy Lua: [luacurl](http://luacurl.luaforge.net/) by Alexander Marinov, [Lua-cURL](https://github.com/Lua-cURL) by Jürgen Hötzel @@ -61,6 +65,8 @@ Lua: [luacurl](http://luacurl.luaforge.net/) by Alexander Marinov, [Lua-cURL](ht [.NET](https://sourceforge.net/projects/libcurl-net/) libcurl-net by Jeffrey Phillips +[Nim](https://nimble.directory/pkg/libcurl) wrapper for libcurl + [node.js](https://github.com/JCMais/node-libcurl) node-libcurl by Jonathan Cardoso Machado [Object-Pascal](https://web.archive.org/web/20020610214926/www.tekool.com/opcurl) Free Pascal, Delphi and Kylix binding written by Christophe Espern. @@ -69,14 +75,17 @@ Lua: [luacurl](http://luacurl.luaforge.net/) by Alexander Marinov, [Lua-cURL](ht [Pascal](https://web.archive.org/web/20030804091414/houston.quik.com/jkp/curlpas/) Free Pascal, Delphi and Kylix binding written by Jeffrey Pohlmeyer. -Perl: [WWW--Curl](https://github.com/szbalint/WWW--Curl) Maintained by Cris +Perl: [WWW::Curl](https://github.com/szbalint/WWW--Curl) Maintained by Cris Bailiff and Bálint Szilakszi, [perl6-net-curl](https://github.com/azawawi/perl6-net-curl) by Ahmad M. Zawawi +[NET::Curl](https://metacpan.org/pod/Net::Curl) by Przemyslaw Iskra [PHP](https://php.net/curl) Originally written by Sterling Hughes [PostgreSQL](https://github.com/pramsey/pgsql-http) - HTTP client for PostgreSQL +[PureBasic](https://www.purebasic.com/documentation/http/index.html) uses libcurl in its "native" HTTP subsystem + [Python](http://pycurl.io/) PycURL by Kjetil Jacobsen [R](https://cran.r-project.org/package=curl) diff --git a/docs/BUG-BOUNTY.md b/docs/BUG-BOUNTY.md new file mode 100644 index 00000000000000..8ee9ac62f4184a --- /dev/null +++ b/docs/BUG-BOUNTY.md @@ -0,0 +1,106 @@ +# The curl bug bounty + +The curl project runs a bug bounty program in association with +[HackerOne](https://www.hackerone.com) and the [Internet Bug +Bounty](https://internetbugbounty.org). + +# How does it work? + +Start out by posting your suspected security vulnerability directly to [curl's +HackerOne program](https://hackerone.com/curl). + +After you have reported a security issue, it has been deemed credible, and a +patch and advisory has been made public, you may be eligible for a bounty from +this program. + +See all details at [https://hackerone.com/curl](https://hackerone.com/curl) + +This bounty is relying on funds from sponsors. If you use curl professionally, +consider help funding this! See +[https://opencollective.com/curl](https://opencollective.com/curl) for +details. + +# What are the reward amounts? + +The curl projects offer monetary compensation for reported and published +security vulnerabilities. The amount of money that is rewarded depends on how +serious the flaw is determined to be. + +We offer reward money *up to* a certain amount per severity. The curl security +team determines the severity of each reported flaw on a case by case basis and +the exact amount rewarded to the reporter is then decided. + +Check out the current award amounts at [https://hackerone.com/curl](https://hackerone.com/curl) + +# Who is eligible for a reward? + +Everyone and anyone who reports a security problem in a released curl version +that hasn't already been reported can ask for a bounty. + +Vulnerabilities in features that are off by default and documented as +experimental are not eligible for a reward. + +The vulnerability has to be fixed and publicly announced (by the curl project) +before a bug bounty will be considered. + +Bounties need to be requested within twelve months from the publication of the +vulnerability. + +The vulnerabilities must not have been made public before February 1st, 2019. +We do not retroactively pay for old, already known, or published security +problems. + +# Product vulnerabilities only + +This bug bounty only concerns the curl and libcurl products and thus their +respective source codes - when running on existing hardware. It does not +include documentation, websites, or other infrastructure. + +The curl security team will be the sole arbiter if a reported flaw can be +subject to a bounty or not. + +# How are vulnerabilities graded? + +The grading of each reported vulnerability that makes a reward claim will be +performed by the curl security team. The grading will be based on the CVSS +(Common Vulnerability Scoring System) 3.0. + +# How are reward amounts determined? + +The curl security team first gives the vulnerability a score, as mentioned +above, and based on that level we set an amount depending on the specifics of +the individual case. Other sponsors of the program might also get involved and +can raise the amounts depending on the particular issue. + +# What happens if the bounty fund is drained? + +The bounty fund depends on sponsors. If we pay out more bounties than we add, +the fund will eventually drain. If that end up happening, we will simply not +be able to pay out as high bounties as we would like and hope that we can +convince new sponsors to help us top up the fund again. + +# Regarding taxes, etc. on the bounties + +In the event that the individual receiving a curl bug bounty needs to pay +taxes on the reward money, the responsibility lies with the receiver. The +curl project or its security team never actually receive any of this money, +hold the money, or pay out the money. + +## Bonus levels + +In cooperation with [Dropbox](https://www.dropbox.com) the curl bug bounty can +offer the highest levels of rewards if the issue covers one of the interest +areas of theirs - and only if the bug is graded *high* or *critical*. A +non-exhaustive list of vulnerabilities Dropbox is interested in are: + + - RCE + - URL parsing vulnerabilities with demonstrable security impact + +Dropbox would generally hand out rewards for critical vulnerabilities ranging +from 12k-32k USD where RCE is on the upper end of the spectrum. + +URL parsing vulnerabilities with demonstrable security impact might include +incorrectly determining the authority of a URL when a special character is +inserted into the path of the URL (as a hypothetical). This type of +vulnerability would likely yield 6k-12k unless further impact could be +demonstrated. diff --git a/docs/BUGS b/docs/BUGS deleted file mode 100644 index 7322d9b21e85f4..00000000000000 --- a/docs/BUGS +++ /dev/null @@ -1,297 +0,0 @@ - _ _ ____ _ - ___| | | | _ \| | - / __| | | | |_) | | - | (__| |_| | _ <| |___ - \___|\___/|_| \_\_____| - -BUGS - - 1. Bugs - 1.1 There are still bugs - 1.2 Where to report - 1.3 Security bugs - 1.4 What to report - 1.5 libcurl problems - 1.6 Who will fix the problems - 1.7 How to get a stack trace - 1.8 Bugs in libcurl bindings - 1.9 Bugs in old versions - - 2. Bug fixing procedure - 2.1 What happens on first filing - 2.2 First response - 2.3 Not reproducible - 2.4 Unresponsive - 2.5 Lack of time/interest - 2.6 KNOWN_BUGS - 2.7 TODO - 2.8 Closing off stalled bugs - -============================================================================== - -1.1 There are still bugs - - Curl and libcurl keep being developed. Adding features and changing code - means that bugs will sneak in, no matter how hard we try not to. - - Of course there are lots of bugs left. And lots of misfeatures. - - To help us make curl the stable and solid product we want it to be, we need - bug reports and bug fixes. - -1.2 Where to report - - If you can't fix a bug yourself and submit a fix for it, try to report an as - detailed report as possible to a curl mailing list to allow one of us to - have a go at a solution. You can optionally also post your bug/problem at - curl's bug tracking system over at - - https://github.com/curl/curl/issues - - Please read the rest of this document below first before doing that! - - If you feel you need to ask around first, find a suitable mailing list and - post there. The lists are available on https://curl.haxx.se/mail/ - -1.3 Security bugs - - If you find a bug or problem in curl or libcurl that you think has a - security impact, for example a bug that can put users in danger or make them - vulnerable if the bug becomes public knowledge, then please report that bug - using our security development process. - - Security related bugs or bugs that are suspected to have a security impact, - should be reported by email to curl-security@haxx.se so that they first can - be dealt with away from the public to minimize the harm and impact it will - have on existing users out there who might be using the vulnerable versions. - - The curl project's process for handling security related issues is - documented here: - - https://curl.haxx.se/dev/secprocess.html - -1.4 What to report - - When reporting a bug, you should include all information that will help us - understand what's wrong, what you expected to happen and how to repeat the - bad behavior. You therefore need to tell us: - - - your operating system's name and version number - - - what version of curl you're using (curl -V is fine) - - - versions of the used libraries that libcurl is built to use - - - what URL you were working with (if possible), at least which protocol - - and anything and everything else you think matters. Tell us what you - expected to happen, tell use what did happen, tell us how you could make it - work another way. Dig around, try out, test. Then include all the tiny bits - and pieces in your report. You will benefit from this yourself, as it will - enable us to help you quicker and more accurately. - - Since curl deals with networks, it often helps us if you include a protocol - debug dump with your bug report. The output you get by using the -v or - --trace options. - - If curl crashed, causing a core dump (in unix), there is hardly any use to - send that huge file to anyone of us. Unless we have an exact same system - setup as you, we can't do much with it. Instead we ask you to get a stack - trace and send that (much smaller) output to us instead! - - The address and how to subscribe to the mailing lists are detailed in the - MANUAL file. - -1.5 libcurl problems - - When you've written your own application with libcurl to perform transfers, - it is even more important to be specific and detailed when reporting bugs. - - Tell us the libcurl version and your operating system. Tell us the name and - version of all relevant sub-components like for example the SSL library - you're using and what name resolving your libcurl uses. If you use SFTP or - SCP, the libssh2 version is relevant etc. - - Showing us a real source code example repeating your problem is the best way - to get our attention and it will greatly increase our chances to understand - your problem and to work on a fix (if we agree it truly is a problem). - - Lots of problems that appear to be libcurl problems are actually just abuses - of the libcurl API or other malfunctions in your applications. It is advised - that you run your problematic program using a memory debug tool like - valgrind or similar before you post memory-related or "crashing" problems to - us. - -1.6 Who will fix the problems - - If the problems or bugs you describe are considered to be bugs, we want to - have the problems fixed. - - There are no developers in the curl project that are paid to work on bugs. - All developers that take on reported bugs do this on a voluntary basis. We - do it out of an ambition to keep curl and libcurl excellent products and out - of pride. - - But please do not assume that you can just lump over something to us and it - will then magically be fixed after some given time. Most often we need - feedback and help to understand what you've experienced and how to repeat a - problem. Then we may only be able to assist YOU to debug the problem and to - track down the proper fix. - - We get reports from many people every month and each report can take a - considerable amount of time to really go to the bottom with. - -1.7 How to get a stack trace - - First, you must make sure that you compile all sources with -g and that you - don't 'strip' the final executable. Try to avoid optimizing the code as - well, remove -O, -O2 etc from the compiler options. - - Run the program until it cores. - - Run your debugger on the core file, like ' curl core'. - should be replaced with the name of your debugger, in most cases that will - be 'gdb', but 'dbx' and others also occur. - - When the debugger has finished loading the core file and presents you a - prompt, enter 'where' (without the quotes) and press return. - - The list that is presented is the stack trace. If everything worked, it is - supposed to contain the chain of functions that were called when curl - crashed. Include the stack trace with your detailed bug report. It'll help a - lot. - -1.8 Bugs in libcurl bindings - - There will of course pop up bugs in libcurl bindings. You should then - primarily approach the team that works on that particular binding and see - what you can do to help them fix the problem. - - If you suspect that the problem exists in the underlying libcurl, then - please convert your program over to plain C and follow the steps outlined - above. - -1.9 Bugs in old versions - - The curl project typically releases new versions every other month, and we - fix several hundred bugs per year. For a huge table of releases, number of - bug fixes and more, see: https://curl.haxx.se/docs/releases.html - - The developers in the curl project do not have bandwidth or energy enough to - maintain several branches or to spend much time on hunting down problems in - old versions when chances are we already fixed them or at least that they've - changed nature and appearance in later versions. - - When you experience a problem and want to report it, you really SHOULD - include the version number of the curl you're using when you experience the - issue. If that version number shows us that you're using an out-of-date - curl, you should also try out a modern curl version to see if the problem - persists or how/if it has changed in appearance. - - Even if you cannot immediately upgrade your application/system to run the - latest curl version, you can most often at least run a test version or - experimental build or similar, to get this confirmed or not. - - At times people insist that they cannot upgrade to a modern curl version, - but instead they "just want the bug fixed". That's fine, just don't count on - us spending many cycles on trying to identify which single commit, if that's - even possible, that at some point in the past fixed the problem you're now - experiencing. - - Security wise, it is almost always a bad idea to lag behind the current curl - versions by a lot. We keeping discovering and reporting security problems - over time see you can see in this table: - https://curl.haxx.se/docs/vulnerabilities.html - -2. Bug fixing procedure - -2.1 What happens on first filing - - When a new issue is posted in the issue tracker or on the mailing list, the - team of developers first need to see the report. Maybe they took the day - off, maybe they're off in the woods hunting. Have patience. Allow at least a - few days before expecting someone to have responded. - - In the issue tracker you can expect that some labels will be set on the - issue to help categorize it. - -2.2 First response - - If your issue/bug report wasn't perfect at once (and few are), chances are - that someone will ask follow-up questions. Which version did you use? Which - options did you use? How often does the problem occur? How can we reproduce - this problem? Which protocols does it involve? Or perhaps much more specific - and deep diving questions. It all depends on your specific issue. - - You should then respond to these follow-up questions and provide more info - about the problem, so that we can help you figure it out. Or maybe you can - help us figure it out. An active back-and-forth communication is important - and the key for finding a cure and landing a fix. - -2.3 Not reproducible - - For problems that we can't reproduce and can't understand even after having - gotten all the info we need and having studied the source code over again, - are really hard to solve so then we may require further work from you who - actually see or experience the problem. - -2.4 Unresponsive - - If the problem haven't been understood or reproduced, and there's nobody - responding to follow-up questions or questions asking for clarifications or - for discussing possible ways to move forward with the task, we take that as - a strong suggestion that the bug is not important. - - Unimportant issues will be closed as inactive sooner or later as they can't - be fixed. The inactivity period (waiting for responses) should not be - shorter than two weeks but may extend months. - -2.5 Lack of time/interest - - Bugs that are filed and are understood can unfortunately end up in the - "nobody cares enough about it to work on it" category. Such bugs are - perfectly valid problems that *should* get fixed but apparently aren't. We - try to mark such bugs as "KNOWN_BUGS material" after a time of inactivity - and if no activity is noticed after yet some time those bugs are added to - KNOWN_BUGS and are closed in the issue tracker. - -2.6 KNOWN_BUGS - - This is a list of known bugs. Bugs we know exist and that have been pointed - out but that haven't yet been fixed. The reasons for why they haven't been - fixed can involve anything really, but the primary reason is that nobody has - considered these problems to be important enough to spend the necessary time - and effort to have them fixed. - - The KNOWN_BUGS are always up for grabs and we will always love the ones who - bring one of them back to live and offers solutions to them. - - The KNOWN_BUGS document has a sibling document known as TODO. - -2.7 TODO - - Issues that are filed or reported that aren't really bugs but more missing - features or ideas for future improvements and so on are marked as - 'enhancement' or 'feature-request' and will be added to the TODO document - instead and the issue is closed. We don't keep TODO items in the issue - tracker. - - The TODO document is full of ideas and suggestions of what we can add or fix - one day. You're always encouraged and free to grab one of those items and - take up a discussion with the curl development team on how that could be - implemented or provided in the project so that you can work on ticking it - odd that document. - - If the issue is rather a bug and not a missing feature or functionality, it - is listed in KNOWN_BUGS instead. - -2.8 Closing off stalled bugs - - The issue and pull request trackers on https://github.com/curl/curl will - only hold "active" entries (using a non-precise definition of what active - actually is, but they're at least not completely dead). Those that are - abandoned or in other ways dormant will be closed and sometimes added to - TODO and KNOWN_BUGS instead. - - This way, we only have "active" issues open on github. Irrelevant issues and - pull requests will not distract developers or casual visitors. diff --git a/docs/BUGS.md b/docs/BUGS.md new file mode 100644 index 00000000000000..cc09a5d1b55c84 --- /dev/null +++ b/docs/BUGS.md @@ -0,0 +1,266 @@ +# BUGS + +## There are still bugs + + Curl and libcurl keep being developed. Adding features and changing code + means that bugs will sneak in, no matter how hard we try not to. + + Of course there are lots of bugs left. And lots of misfeatures. + + To help us make curl the stable and solid product we want it to be, we need + bug reports and bug fixes. + +## Where to report + + If you can't fix a bug yourself and submit a fix for it, try to report an as + detailed report as possible to a curl mailing list to allow one of us to have + a go at a solution. You can optionally also submit your problem in [curl's + bug tracking system](https://github.com/curl/curl/issues). + + Please read the rest of this document below first before doing that! + + If you feel you need to ask around first, find a suitable [mailing list]( + https://curl.haxx.se/mail/) and post your questions there. + +## Security bugs + + If you find a bug or problem in curl or libcurl that you think has a security + impact, for example a bug that can put users in danger or make them + vulnerable if the bug becomes public knowledge, then please report that bug + using our security development process. + + Security related bugs or bugs that are suspected to have a security impact, + should be reported on the [curl security tracker at + HackerOne](https://hackerone.com/curl). + + This ensures that the report reaches the curl security team so that they + first can be deal with the report away from the public to minimize the harm + and impact it will have on existing users out there who might be using the + vulnerable versions. + + The curl project's process for handling security related issues is + [documented separately](https://curl.haxx.se/dev/secprocess.html). + +## What to report + + When reporting a bug, you should include all information that will help us + understand what's wrong, what you expected to happen and how to repeat the + bad behavior. You therefore need to tell us: + + - your operating system's name and version number + + - what version of curl you're using (`curl -V` is fine) + + - versions of the used libraries that libcurl is built to use + + - what URL you were working with (if possible), at least which protocol + + and anything and everything else you think matters. Tell us what you expected + to happen, tell use what did happen, tell us how you could make it work + another way. Dig around, try out, test. Then include all the tiny bits and + pieces in your report. You will benefit from this yourself, as it will enable + us to help you quicker and more accurately. + + Since curl deals with networks, it often helps us if you include a protocol + debug dump with your bug report. The output you get by using the `-v` or + `--trace` options. + + If curl crashed, causing a core dump (in unix), there is hardly any use to + send that huge file to anyone of us. Unless we have an exact same system + setup as you, we can't do much with it. Instead we ask you to get a stack + trace and send that (much smaller) output to us instead! + + The address and how to subscribe to the mailing lists are detailed in the + `MANUAL.md` file. + +## libcurl problems + + When you've written your own application with libcurl to perform transfers, + it is even more important to be specific and detailed when reporting bugs. + + Tell us the libcurl version and your operating system. Tell us the name and + version of all relevant sub-components like for example the SSL library + you're using and what name resolving your libcurl uses. If you use SFTP or + SCP, the libssh2 version is relevant etc. + + Showing us a real source code example repeating your problem is the best way + to get our attention and it will greatly increase our chances to understand + your problem and to work on a fix (if we agree it truly is a problem). + + Lots of problems that appear to be libcurl problems are actually just abuses + of the libcurl API or other malfunctions in your applications. It is advised + that you run your problematic program using a memory debug tool like valgrind + or similar before you post memory-related or "crashing" problems to us. + +## Who will fix the problems + + If the problems or bugs you describe are considered to be bugs, we want to + have the problems fixed. + + There are no developers in the curl project that are paid to work on bugs. + All developers that take on reported bugs do this on a voluntary basis. We do + it out of an ambition to keep curl and libcurl excellent products and out of + pride. + + But please do not assume that you can just lump over something to us and it + will then magically be fixed after some given time. Most often we need + feedback and help to understand what you've experienced and how to repeat a + problem. Then we may only be able to assist YOU to debug the problem and to + track down the proper fix. + + We get reports from many people every month and each report can take a + considerable amount of time to really go to the bottom with. + +## How to get a stack trace + + First, you must make sure that you compile all sources with `-g` and that you + don't 'strip' the final executable. Try to avoid optimizing the code as well, + remove `-O`, `-O2` etc from the compiler options. + + Run the program until it cores. + + Run your debugger on the core file, like ` curl + core`. `` should be replaced with the name of your debugger, in + most cases that will be `gdb`, but `dbx` and others also occur. + + When the debugger has finished loading the core file and presents you a + prompt, enter `where` (without quotes) and press return. + + The list that is presented is the stack trace. If everything worked, it is + supposed to contain the chain of functions that were called when curl + crashed. Include the stack trace with your detailed bug report. It'll help a + lot. + +## Bugs in libcurl bindings + + There will of course pop up bugs in libcurl bindings. You should then + primarily approach the team that works on that particular binding and see + what you can do to help them fix the problem. + + If you suspect that the problem exists in the underlying libcurl, then please + convert your program over to plain C and follow the steps outlined above. + +## Bugs in old versions + + The curl project typically releases new versions every other month, and we + fix several hundred bugs per year. For a huge table of releases, number of + bug fixes and more, see: https://curl.haxx.se/docs/releases.html + + The developers in the curl project do not have bandwidth or energy enough to + maintain several branches or to spend much time on hunting down problems in + old versions when chances are we already fixed them or at least that they've + changed nature and appearance in later versions. + + When you experience a problem and want to report it, you really SHOULD + include the version number of the curl you're using when you experience the + issue. If that version number shows us that you're using an out-of-date curl, + you should also try out a modern curl version to see if the problem persists + or how/if it has changed in appearance. + + Even if you cannot immediately upgrade your application/system to run the + latest curl version, you can most often at least run a test version or + experimental build or similar, to get this confirmed or not. + + At times people insist that they cannot upgrade to a modern curl version, but + instead they "just want the bug fixed". That's fine, just don't count on us + spending many cycles on trying to identify which single commit, if that's + even possible, that at some point in the past fixed the problem you're now + experiencing. + + Security wise, it is almost always a bad idea to lag behind the current curl + versions by a lot. We keeping discovering and reporting security problems + over time see you can see in [this + table](https://curl.haxx.se/docs/vulnerabilities.html) + +# Bug fixing procedure + +## What happens on first filing + + When a new issue is posted in the issue tracker or on the mailing list, the + team of developers first need to see the report. Maybe they took the day off, + maybe they're off in the woods hunting. Have patience. Allow at least a few + days before expecting someone to have responded. + + In the issue tracker you can expect that some labels will be set on the issue + to help categorize it. + +## First response + + If your issue/bug report wasn't perfect at once (and few are), chances are + that someone will ask follow-up questions. Which version did you use? Which + options did you use? How often does the problem occur? How can we reproduce + this problem? Which protocols does it involve? Or perhaps much more specific + and deep diving questions. It all depends on your specific issue. + + You should then respond to these follow-up questions and provide more info + about the problem, so that we can help you figure it out. Or maybe you can + help us figure it out. An active back-and-forth communication is important + and the key for finding a cure and landing a fix. + +## Not reproducible + + For problems that we can't reproduce and can't understand even after having + gotten all the info we need and having studied the source code over again, + are really hard to solve so then we may require further work from you who + actually see or experience the problem. + +## Unresponsive + + If the problem haven't been understood or reproduced, and there's nobody + responding to follow-up questions or questions asking for clarifications or + for discussing possible ways to move forward with the task, we take that as a + strong suggestion that the bug is not important. + + Unimportant issues will be closed as inactive sooner or later as they can't + be fixed. The inactivity period (waiting for responses) should not be shorter + than two weeks but may extend months. + +## Lack of time/interest + + Bugs that are filed and are understood can unfortunately end up in the + "nobody cares enough about it to work on it" category. Such bugs are + perfectly valid problems that *should* get fixed but apparently aren't. We + try to mark such bugs as `KNOWN_BUGS material` after a time of inactivity and + if no activity is noticed after yet some time those bugs are added to + `KNOWN_BUGS` and are closed in the issue tracker. + +## `KNOWN_BUGS` + + This is a list of known bugs. Bugs we know exist and that have been pointed + out but that haven't yet been fixed. The reasons for why they haven't been + fixed can involve anything really, but the primary reason is that nobody has + considered these problems to be important enough to spend the necessary time + and effort to have them fixed. + + The `KNOWN_BUGS` are always up for grabs and we will always love the ones who + bring one of them back to live and offers solutions to them. + + The `KNOWN_BUGS` document has a sibling document known as `TODO`. + +## `TODO` + + Issues that are filed or reported that aren't really bugs but more missing + features or ideas for future improvements and so on are marked as + 'enhancement' or 'feature-request' and will be added to the `TODO` document + instead and the issue is closed. We don't keep TODO items in the issue + tracker. + + The `TODO` document is full of ideas and suggestions of what we can add or + fix one day. You're always encouraged and free to grab one of those items and + take up a discussion with the curl development team on how that could be + implemented or provided in the project so that you can work on ticking it odd + that document. + + If the issue is rather a bug and not a missing feature or functionality, it + is listed in `KNOWN_BUGS` instead. + +## Closing off stalled bugs + + The [issue and pull request trackers](https://github.com/curl/curl) only + holds "active" entries open (using a non-precise definition of what active + actually is, but they're at least not completely dead). Those that are + abandoned or in other ways dormant will be closed and sometimes added to + `TODO` and `KNOWN_BUGS` instead. + + This way, we only have "active" issues open on github. Irrelevant issues and + pull requests will not distract developers or casual visitors. diff --git a/docs/CHECKSRC.md b/docs/CHECKSRC.md index 10e2f4d73751ef..d36763bc542dd6 100644 --- a/docs/CHECKSRC.md +++ b/docs/CHECKSRC.md @@ -9,7 +9,7 @@ check that it adheres to our [Source Code Style guide](CODE_STYLE.md). ## Command line options -`-W[file]` whitelists that file and excludes it from being checked. Helpful +`-W[file]` skip that file and excludes it from being checked. Helpful when, for example, one of the files is generated. `-D[dir]` directory name to prepend to file names when accessing them. @@ -70,6 +70,8 @@ warnings are: - `NOSPACEEQUALS`: An equals sign was found without preceding space. We prefer `a = 2` and *not* `a=2`. +- `ONELINECONDITION`: do not put the conditional block on the same line as `if()` + - `OPENCOMMENT`: File ended with a comment (`/*`) still "open". - `PARENBRACE`: `){` was used without sufficient space in between. @@ -98,7 +100,9 @@ warnings are: - `TABS`: TAB characters are not allowed! -- `TRAILINGSPACE`: Trailing white space on the line +- `TRAILINGSPACE`: Trailing whitespace on the line + +- `TYPEDEFSTRUCT`: we frown upon (most) typedefed structs - `UNUSEDIGNORE`: a checksrc inlined warning ignore was asked for but not used, that's an ignore that should be removed or changed to get used. @@ -158,5 +162,5 @@ instances are ignored and nothing extra. This is a method we've transitioned away from. Use inline ignores as far as possible. -Make a `checksrc.whitelist` file in the directory of the source code with the +Make a `checksrc.skip` file in the directory of the source code with the false positive, and include the full offending line into this file. diff --git a/docs/CIPHERS.md b/docs/CIPHERS.md index c011804267fcba..19aedf36fc30e4 100644 --- a/docs/CIPHERS.md +++ b/docs/CIPHERS.md @@ -6,11 +6,12 @@ and [`--ciphers`](https://curl.haxx.se/docs/manpage.html#--ciphers) users can control which ciphers to consider when negotiating TLS connections. -TLS 1.3 ciphers are supported since curl 7.61 with options +TLS 1.3 ciphers are supported since curl 7.61 for OpenSSL 1.1.1+ with options [`CURLOPT_TLS13_CIPHERS`](https://curl.haxx.se/libcurl/c/CURLOPT_TLS13_CIPHERS.html) and [`--tls13-ciphers`](https://curl.haxx.se/docs/manpage.html#--tls13-ciphers) -. +. If you are using a different SSL backend you can try setting TLS 1.3 cipher +suites by using the respective regular cipher option. The names of the known ciphers differ depending on which TLS backend that libcurl was built to use. This is an attempt to list known cipher names. @@ -269,9 +270,16 @@ When specifying multiple cipher names, separate them with colon (`:`). `ecdhe_ecdsa_chacha20_poly1305_sha_256` `dhe_rsa_chacha20_poly1305_sha_256` +### TLS 1.3 cipher suites + +`aes_128_gcm_sha_256` +`aes_256_gcm_sha_384` +`chacha20_poly1305_sha_256` + ## GSKit -Ciphers are internally defined as numeric codes (https://www.ibm.com/support/knowledgecenter/ssw_ibm_i_73/apis/gsk_attribute_set_buffer.htm), +Ciphers are internally defined as +[numeric codes](https://www.ibm.com/support/knowledgecenter/ssw_ibm_i_73/apis/gsk_attribute_set_buffer.htm), but libcurl maps them to the following case-insensitive names. ### SSL2 cipher suites (insecure: disabled by default) @@ -446,9 +454,18 @@ but libcurl maps them to the following case-insensitive names. `DHE-PSK-CHACHA20-POLY1305`, `EDH-RSA-DES-CBC3-SHA`, -## WinSSL +## Schannel + +Schannel allows the enabling and disabling of encryption algorithms, but not +specific ciphersuites. They are +[defined](https://docs.microsoft.com/windows/desktop/SecCrypto/alg-id) by +Microsoft. -WinSSL allows the enabling and disabling of encryption algorithms, but not specific ciphersuites. They are defined by Microsoft (https://msdn.microsoft.com/en-us/library/windows/desktop/aa375549(v=vs.85).aspx) +There is also the case that the selected algorithm is not supported by the +protocol or does not match the ciphers offered by the server during the SSL +negotiation. In this case curl will return error +`CURLE_SSL_CONNECT_ERROR (35) SEC_E_ALGORITHM_MISMATCH` +and the request will fail. `CALG_MD2`, `CALG_MD4`, diff --git a/docs/CMakeLists.txt b/docs/CMakeLists.txt index 694861725b5569..22863bc0f2ec09 100644 --- a/docs/CMakeLists.txt +++ b/docs/CMakeLists.txt @@ -1,3 +1,24 @@ +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 1998 - 2020, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.haxx.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +########################################################################### #add_subdirectory(examples) add_subdirectory(libcurl) add_subdirectory(cmdline-opts) diff --git a/docs/CODE_REVIEW.md b/docs/CODE_REVIEW.md new file mode 100644 index 00000000000000..f55cb09850acf8 --- /dev/null +++ b/docs/CODE_REVIEW.md @@ -0,0 +1,168 @@ +# How to do code reviews for curl + +Anyone and everyone is encouraged and welcome to review code submissions in +curl. This is a guide on what to check for and how to perform a successful +code review. + +## All submissions should get reviewed + +All pull requests and patches submitted to the project should be reviewed by +at least one experienced curl maintainer before that code is accepted and +merged. + +## Let the tools and tests take the first rounds + +On initial pull requests, let the tools and tests do their job first and then +start out by helping the submitter understand the test failures and tool +alerts. + +## How to provide feedback to author + +Be nice. Ask questions. Provide examples or suggestions of improvements. +Assume best intentions. Remember language barriers. + +All first-time contributors can become regulars. Let's help them go there. + +## Is this a change we want? + +If this is not a change that seems to be aligned with the project's path +forward and as such cannot be accepted, inform the author about this sooner +rather than later. Do it gently and explain why and possibly what could be +done to make it more acceptable. + +## API/ABI stability or changed behavior + +Changing the API and the ABI may be fine in a change but it needs to be done +deliberately and carefully. If not, a reviewer must help the author to realize +the mistake. + +curl and libcurl are similarly very strict on not modifying existing +behavior. API and ABI stability is not enough, the behavior should also remain +intact as far as possible. + +## Code style + +Most code style nits are detected by checksrc but not all. Only leave remarks +on style deviation once checksrc doesn't find any more. + +Minor nits from fresh submitters can also be handled by the maintainer when +merging, in case it seems like the submitter isn't clear on what to do. We +want to make the process fun and exciting for new contributors. + +## Encourage consistency + +Make sure new code is written in a similar style as existing code. Naming, +logic, conditions, etc. + +## Are pointers always non-NULL? + +If a function or code rely on pointers being non-NULL, take an extra look if +that seems to be a fair assessment. + +## Asserts + +Conditions that should never be false can be verified with `DEBUGASSERT()` +calls to get caught in tests and debugging easier, while not having an impact +on final or release builds. + +## Memory allocation + +Can the mallocs be avoided? Do not introduce mallocs in any hot paths. If +there are (new) mallocs, can they be combined into fewer calls? + +Are all allocations handled in errorpaths to avoid leaks and crashes? + +## Thread-safety + +We do not like static variables as they break thread-safety and prevent +functions from being reentrant. + +## Should features be `#ifdef`ed? + +Features and functionality may not be present everywhere and should therefore +be `#ifdef`ed. Additionally, some features should be possible to switch on/off +in the build. + +Write `#ifdef`s to be as little of a "maze" as possible. + +## Does it look portable enough? + +curl runs "everywhere". Does the code take a reasonable stance and enough +precautions to be possible to build and run on most platforms? + +Remember that we live by C89 restrictions. + +## Tests and testability + +New features should be added in conjunction with one or more test cases. +Ideally, functions should also be written so that unit tests can be done to +test individual functions. + +## Documentation + +New features or changes to existing functionality **must** be accompanied with +updated documentation. Submitting that in a separate follow-up pull request is +not OK. A code review must also verify that the submitted documentation update +matches the code submission. + +English isn't everyone's first language, be mindful of this and help the +submitter improve the text if it needs a rewrite to read better. + +## Code shouldn't be hard to understand + +Source code should be written to maximize readability and be easy to +understand. + +## Functions shouldn't be large + +A single function should never be large as that makes it hard to follow and +understand all the exit points and state changes. Some existing functions in +curl certainly violate this ground rule but when reviewing new code we should +propose splitting into smaller functions. + +## Duplication is evil + +Anything that looks like duplicated code is a red flag. Anything that seems to +introduce code that we *should* already have or provide needs a closer check. + +## Sensitive data + +When credentials are involved, take an extra look at what happens with this +data. Where it comes from and where it goes. + +## Variable types differ + +`size_t` is not a fixed size. `time_t` can be signed or unsigned and have +different sizes. Relying on variable sizes is a red flag. + +Also remember that endianness and >= 32 bit accesses to unaligned addresses +are problematic areas. + +## Integer overflows + +Be careful about integer overflows. Some variable types can be either 32 bit +or 64 bit. Integer overflows must be detected and acted on *before* they +happen. + +## Dangerous use of functions + +Maybe use of `realloc()` should rather use the dynbuf functions? + +Do not allow new code that grows buffers without using dynbuf. + +Use of C functions that rely on a terminating zero must only be used on data +that really do have a zero terminating zero. + +## Dangerous "data styles" + +Make extra precautions and verify that memory buffers that need a terminating +zero always have exactly that. Buffers *without* a zero terminator must not be +used as input to string functions. + +# Commit messages + +Tightly coupled with a code review is making sure that the commit message is +good. It is the responsibilitiy of the person who merges the code to make sure +that the commit message follows our standard (detailed in the +[CONTRIBUTE.md](CONTRIBUTE.md) document). This includes making sure the PR +identifies related issues and giving credit to reporters and helpers. diff --git a/docs/CODE_STYLE.md b/docs/CODE_STYLE.md index 2d275cd7d9db3e..5613437520476e 100644 --- a/docs/CODE_STYLE.md +++ b/docs/CODE_STYLE.md @@ -9,8 +9,8 @@ style is more important than individual contributors having their own personal tastes satisfied. Our C code has a few style rules. Most of them are verified and upheld by the -"lib/checksrc.pl" script. Invoked with "make checksrc" or even by default by -the build system when built after "./configure --enable-debug" has been used. +`lib/checksrc.pl` script. Invoked with `make checksrc` or even by default by +the build system when built after `./configure --enable-debug` has been used. It is normally not a problem for anyone to follow the guidelines, as you just need to copy the style already used in the source code and there are no @@ -227,7 +227,7 @@ Align with the "current open" parenthesis: Use **#ifdef HAVE_FEATURE** to do conditional code. We avoid checking for particular operating systems or hardware in the #ifdef lines. The HAVE_FEATURE shall be generated by the configure script for unix-like systems and they are -hard-coded in the config-[system].h files for the others. +hard-coded in the `config-[system].h` files for the others. We also encourage use of macros/functions that possibly are empty or defined to constants when libcurl is built without that feature, to make the code @@ -244,3 +244,22 @@ depending on a build-time conditional: #endif int content = magic(3); + +## No typedefed structs + +Use structs by all means, but do not typedef them. Use the `struct name` way +of identifying them: + + struct something { + void *valid; + size_t way_to_write; + }; + struct something instance; + +**Not okay**: + + typedef struct { + void *wrong; + size_t way_to_write; + } something; + something instance; diff --git a/docs/CONTRIBUTE.md b/docs/CONTRIBUTE.md index 978b87d27d9d12..69ddfbee0f4062 100644 --- a/docs/CONTRIBUTE.md +++ b/docs/CONTRIBUTE.md @@ -108,7 +108,7 @@ submit a small description of your fix or your new features with every contribution so that it can be swiftly added to the package documentation. The documentation is always made in man pages (nroff formatted) or plain -ASCII files. All HTML files on the web site and in the release archives are +ASCII files. All HTML files on the website and in the release archives are generated from the nroff/ASCII versions. ### Test Cases @@ -172,6 +172,33 @@ you are expected to fix the problem. If you don't understand when the issue is or have other problems to fix the complaint, just ask and other project members will likely be able to help out. +Consider the following table while looking at pull request failures: + + | CI platform as shown in PR | State | What to look at next | + | ----------------------------------- | ------ | -------------------------- | + | CI / codeql | stable | quality check results | + | CI / fuzzing | stable | fuzzing results | + | CI / macos ... | stable | all errors and failures | + | Code scanning results / CodeQL | stable | quality check results | + | FreeBSD FreeBSD: ... | stable | all errors and failures | + | LGTM analysis: Python | stable | new findings | + | LGTM analysis: C/C++ | stable | new findings | + | buildbot/curl_winssl_ ... | stable | all errors and failures | + | continuous-integration/appveyor/pr | stable | all errors and failures | + | continuous-integration/travis-ci/pr | stable | all errors and failures | + | curl.curl (linux ...) | stable | all errors and failures | + | curl.curl (windows ...) | flaky | repetitive errors/failures | + | deepcode-ci-bot | stable | new findings | + | musedev | stable | new findings | + +Sometimes the tests fail due to a dependency service temporarily being offline +or otherwise unavailable, eg. package downloads. In this case you can just +try to update your pull requests to rerun the tests later as described below. + +You can update your pull requests by pushing new commits or force-pushing +changes to existing commits. Force-pushing an amended commit without any +actual content changed also allows you to retrigger the tests for that commit. + When you adjust your pull requests after review, consider squashing the commits so that we can review the full updated version more easily. @@ -200,6 +227,16 @@ A short guide to how to write commit messages in the curl project. [whatever-else-by: credit all helpers, finders, doers] ---- stop ---- +The first line is a succinct description of the change: + + - use the imperative, present tense: "change" not "changed" nor "changes" + - don't capitalize first letter + - no dot (.) at the end + +The `[area]` in the first line can be `http2`, `cookies`, `openssl` or +similar. There's no fixed list to select from but using the same "area" as +other related changes could make sense. + Don't forget to use commit --author="" if you commit someone else's work, and make sure that you have your own user and email setup correctly in git before you commit @@ -265,3 +302,6 @@ For Windows: - [https://gnuwin32.sourceforge.io/packages/patch.htm](https://gnuwin32.sourceforge.io/packages/patch.htm) - [https://gnuwin32.sourceforge.io/packages/diffutils.htm](https://gnuwin32.sourceforge.io/packages/diffutils.htm) + +### Useful resources +* [Webinar on getting code into cURL](https://www.youtube.com/watch?v=QmZ3W1d6LQI) diff --git a/docs/CURL-DISABLE.md b/docs/CURL-DISABLE.md new file mode 100644 index 00000000000000..a9e6acaf8fcb78 --- /dev/null +++ b/docs/CURL-DISABLE.md @@ -0,0 +1,124 @@ +# Code defines to disable features and protocols + +## CURL_DISABLE_COOKIES + +Disable support for HTTP cookies. + +## CURL_DISABLE_CRYPTO_AUTH + +Disable support for authentication methods using crypto. + +## CURL_DISABLE_DICT + +Disable the DICT protocol + +## CURL_DISABLE_DOH + +Disable DNS-over-HTTPS + +## CURL_DISABLE_FILE + +Disable the FILE protocol + +## CURL_DISABLE_FTP + +Disable the FTP (and FTPS) protocol + +## CURL_DISABLE_GETOPTIONS + +Disable the `curl_easy_options` API calls that lets users get information +about existing options to `curl_easy_setopt`. + +## CURL_DISABLE_GOPHER + +Disable the GOPHER protocol. + +## CURL_DISABLE_HTTP + +Disable the HTTP(S) protocols. Note that this then also disable HTTP proxy +support. + +## CURL_DISABLE_HTTP_AUTH + +Disable support for all HTTP authentication methods. + +## CURL_DISABLE_IMAP + +Disable the IMAP(S) protocols. + +## CURL_DISABLE_LDAP + +Disable the LDAP(S) protocols. + +## CURL_DISABLE_LDAPS + +Disable the LDAPS protocol. + +## CURL_DISABLE_LIBCURL_OPTION + +Disable the --libcurl option from the curl tool. + +## CURL_DISABLE_MIME + +Disable MIME support. + +## CURL_DISABLE_MQTT + +Disable MQTT support. + +## CURL_DISABLE_NETRC + +Disable the netrc parser. + +## CURL_DISABLE_OPENSSL_AUTO_LOAD_CONFIG + +Disable the auto load config support in the OpenSSL backend. + +## CURL_DISABLE_PARSEDATE + +Disable date parsing + +## CURL_DISABLE_POP + +Disable the POP(S) protocols + +## CURL_DISABLE_PROGRESS_METER + +Disable the built-in progress meter + +## CURL_DISABLE_PROXY + +Disable support for proxies + +## CURL_DISABLE_RTSP + +Disable the RTSP protocol. + +## CURL_DISABLE_SHUFFLE_DNS + +Disable the shuffle DNS feature + +## CURL_DISABLE_SMB + +Disable the SMB(S) protocols + +## CURL_DISABLE_SMTP + +Disable the SMTP(S) protocols + +## CURL_DISABLE_SOCKETPAIR + +Disable the use of socketpair internally to allow waking up and canceling +curl_multi_poll(). + +## CURL_DISABLE_TELNET + +Disable the TELNET protocol + +## CURL_DISABLE_TFTP + +Disable the TFTP protocol + +## CURL_DISABLE_VERBOSE_STRINGS + +Disable verbose strings and error messages. diff --git a/docs/DEPRECATE.md b/docs/DEPRECATE.md index 4f2570ade94fc8..26877c48aa71bc 100644 --- a/docs/DEPRECATE.md +++ b/docs/DEPRECATE.md @@ -5,17 +5,8 @@ email the curl-library mailing list as soon as possible and explain to us why this is a problem for you and how your use case can't be satisfied properly using a work around. -## HTTP/0.9 +## Past removals -Supporting this is non-obvious and might even come as a surprise to some -users. Potentially even being a security risk in some cases. - -### State - -curl 7.64.0 introduces options to disable/enable support for this protocol -version. The default remains supported for now. - -### Removal - -The support for HTTP/0.9 will be switched to disabled by default in 6 months, -in the September 2019 release (possibly called curl 7.68.0). + - Pipelining + - axTLS + - PolarSSL diff --git a/docs/DYNBUF.md b/docs/DYNBUF.md new file mode 100644 index 00000000000000..aa528ec3ae33d1 --- /dev/null +++ b/docs/DYNBUF.md @@ -0,0 +1,86 @@ +# dynbuf + +This is the internal module for creating and handling "dynamic buffers". This +means buffers that can be appended to, dynamically and grow in size to adapt. + +There will always be a terminating zero put at the end of the dynamic buffer. + +The `struct dynbuf` is used to hold data for each instance of a dynamic +buffer. The members of that struct **MUST NOT** be accessed or modified +without using the dedicated dynbuf API. + +## init + + void Curl_dyn_init(struct dynbuf *s, size_t toobig); + +This inits a struct to use for dynbuf and it can't fail. The `toobig` value +**must** be set to the maximum size we allow this buffer instance to grow to. +The functions below will return `CURLE_OUT_OF_MEMORY` when hitting this limit. + +## free + + void Curl_dyn_free(struct dynbuf *s); + +Free the associated memory and clean up. After a free, the `dynbuf` struct can +be re-used to start appending new data to. + +## addn + + CURLcode Curl_dyn_addn(struct dynbuf *s, const void *mem, size_t len); + +Append arbitrary data of a given length to the end of the buffer. + +## add + + CURLcode Curl_dyn_add(struct dynbuf *s, const char *str); + +Append a C string to the end of the buffer. + +## addf + + CURLcode Curl_dyn_addf(struct dynbuf *s, const char *fmt, ...); + +Append a `printf()`-style string to the end of the buffer. + +## vaddf + + CURLcode Curl_dyn_vaddf(struct dynbuf *s, const char *fmt, va_list ap); + +Append a `vprintf()`-style string to the end of the buffer. + +## reset + + void Curl_dyn_reset(struct dynbuf *s); + +Reset the buffer length, but leave the allocation. + +## tail + + CURLcode Curl_dyn_tail(struct dynbuf *s, size_t length) + +Keep `length` bytes of the buffer tail (the last `length` bytes of the +buffer). The rest of the buffer is dropped. The specified `length` must not be +larger than the buffer length. + +## ptr + + char *Curl_dyn_ptr(const struct dynbuf *s); + +Returns a `char *` to the buffer if it has a length, otherwise a NULL. Since +the buffer may be reallocated, this pointer should not be trusted or used +anymore after the next buffer manipulation call. + +## uptr + + unsigned char *Curl_dyn_uptr(const struct dynbuf *s); + +Returns an `unsigned char *` to the buffer if it has a length, otherwise a +NULL. Since the buffer may be reallocated, this pointer should not be trusted +or used anymore after the next buffer manipulation call. + +## len + + size_t Curl_dyn_len(const struct dynbuf *s); + +Returns the length of the buffer in bytes. Does not include the terminating +zero byte. diff --git a/docs/ECH.md b/docs/ECH.md new file mode 100644 index 00000000000000..ea1efaa6789275 --- /dev/null +++ b/docs/ECH.md @@ -0,0 +1,135 @@ +# TLS: ECH support in curl and libcurl + +## Summary + +**ECH** means **Encrypted Client Hello**, a TLS 1.3 extension which is +currently the subject of an [IETF Draft][tlsesni]. (ECH was formerly known as +ESNI). + +This file is intended to show the latest current state of ECH support +in **curl** and **libcurl**. + +At end of August 2019, an [experimental fork of curl][niallorcurl], built +using an [experimental fork of OpenSSL][sftcdopenssl], which in turn provided +an implementation of ECH, was demonstrated interoperating with a server +belonging to the [DEfO Project][defoproj]. + +Further sections here describe + +- resources needed for building and demonstrating **curl** support + for ECH, + +- progress to date, + +- TODO items, and + +- additional details of specific stages of the progress. + +## Resources needed + +To build and demonstrate ECH support in **curl** and/or **libcurl**, +you will need + +- a TLS library, supported by **libcurl**, which implements ECH; + +- an edition of **curl** and/or **libcurl** which supports the ECH + implementation of the chosen TLS library; + +- an environment for building and running **curl**, and at least + building **OpenSSL**; + +- a server, supporting ECH, against which to run a demonstration + and perhaps a specific target URL; + +- some instructions. + +The following set of resources is currently known to be available. + +| Set | Component | Location | Remarks | +|:-----|:-------------|:------------------------------|:-------------------------------------------| +| DEfO | TLS library | [sftcd/openssl][sftcdopenssl] | Tag *esni-2019-08-30* avoids bleeding edge | +| | curl fork | [niallor/curl][niallorcurl] | Tag *esni-2019-08-30* likewise | +| | instructions | [ESNI-README][niallorreadme] | | + +## Progress + +### PR 4011 (Jun 2019) expected in curl release 7.67.0 (Oct 2019) + +- Details [below](#pr4011); + +- New configuration option: `--enable-ech`; + +- Build-time check for availability of resources needed for ECH + support; + +- Pre-processor symbol `USE_ECH` for conditional compilation of + ECH support code, subject to configuration option and + availability of needed resources. + +## TODO + +- (next PR) Add libcurl options to set ECH parameters. + +- (next PR) Add curl tool command line options to set ECH parameters. + +- (WIP) Extend DoH functions so that published ECH parameters can be + retrieved from DNS instead of being required as options. + +- (WIP) Work with OpenSSL community to finalize ECH API. + +- Track OpenSSL ECH API in libcurl + +- Identify and implement any changes needed for CMake. + +- Optimize build-time checking of available resources. + +- Encourage ECH support work on other TLS/SSL backends. + +## Additional detail + +### PR 4011 + +**TLS: Provide ECH support framework for curl and libcurl** + +The proposed change provides a framework to facilitate work to implement ECH +support in curl and libcurl. It is not intended either to provide ECH +functionality or to favour any particular TLS-providing backend. Specifically, +the change reserves a feature bit for ECH support (symbol +`CURL_VERSION_ECH`), implements setting and reporting of this bit, includes +dummy book-keeping for the symbol, adds a build-time configuration option +(`--enable-ech`), provides an extensible check for resources available to +provide ECH support, and defines a compiler pre-processor symbol (`USE_ECH`) +accordingly. + +Proposed-by: @niallor (Niall O'Reilly)\ +Encouraged-by: @sftcd (Stephen Farrell)\ +See-also: [this message](https://curl.haxx.se/mail/lib-2019-05/0108.html) + +Limitations: +- Book-keeping (symbols-in-versions) needs real release number, not 'DUMMY'. + +- Framework is incomplete, as it covers autoconf, but not CMake. + +- Check for available resources, although extensible, refers only to + specific work in progress ([described + here](https://github.com/sftcd/openssl/tree/master/esnistuff)) to + implement ECH for OpenSSL, as this is the immediate motivation + for the proposed change. + +## References + +Cloudflare blog: [Encrypting SNI: Fixing One of the Core Internet Bugs][corebug] + +Cloudflare blog: [Encrypt it or lose it: how encrypted SNI works][esniworks] + +IETF Draft: [Encrypted Server Name Indication for TLS 1.3][tlsesni] + +--- + +[tlsesni]: https://datatracker.ietf.org/doc/draft-ietf-tls-esni/ +[esniworks]: https://blog.cloudflare.com/encrypted-sni/ +[corebug]: https://blog.cloudflare.com/esni/ +[defoproj]: https://defo.ie/ +[sftcdopenssl]: https://github.com/sftcd/openssl/ +[niallorcurl]: https://github.com/niallor/curl/ +[niallorreadme]: https://github.com/niallor/curl/blob/master/ESNI-README.md diff --git a/docs/EXPERIMENTAL.md b/docs/EXPERIMENTAL.md new file mode 100644 index 00000000000000..6a014c3da7be1f --- /dev/null +++ b/docs/EXPERIMENTAL.md @@ -0,0 +1,23 @@ +# Experimental + +Some features and functionality in curl and libcurl are considered +**EXPERIMENTAL**. + +Experimental support in curl means: + +1. Experimental features are provided to allow users to try them out and + provide feedback on functionality and API etc before they ship and get + "carved in stone". +2. You must enable the feature when invoking configure as otherwise curl will + not be built with the feature present. +3. We strongly advice against using this feature in production. +4. **We reserve the right to change behavior** of the feature without sticking + to our API/ABI rules as we do for regular features, as long as it is marked + experimental. +5. Experimental features are clearly marked so in documentation. Beware. + +## Experimental features right now + + - HTTP/3 support and options + - alt-svc support and options + - CURLSSLOPT_NATIVE_CA (No configure option, feature built in when supported) diff --git a/docs/FAQ b/docs/FAQ index c1bc9bea53d071..b5efb1192a5d28 100644 --- a/docs/FAQ +++ b/docs/FAQ @@ -43,8 +43,8 @@ FAQ 3.9 How do I use curl in my favorite programming language? 3.10 What about SOAP, WebDAV, XML-RPC or similar protocols over HTTP? 3.11 How do I POST with a different Content-Type? - 3.12 Why do FTP specific features over HTTP proxy fail? - 3.13 Why does my single/double quotes fail? + 3.12 Why do FTP-specific features over HTTP proxy fail? + 3.13 Why do my single/double quotes fail? 3.14 Does curl support Javascript or PAC (automated proxy config)? 3.15 Can I do recursive fetches with curl? 3.16 What certificates do I need when I use SSL? @@ -72,7 +72,7 @@ FAQ 4.8 I found a bug! 4.9 Curl can't authenticate to the server that requires NTLM? 4.10 My HTTP request using HEAD, PUT or DELETE doesn't work! - 4.11 Why does my HTTP range requests return the full document? + 4.11 Why do my HTTP range requests return the full document? 4.12 Why do I get "certificate verify failed" ? 4.13 Why is curl -R on Windows one hour off? 4.14 Redirects work in browser but not with curl! @@ -136,8 +136,9 @@ FAQ libcurl A free and easy-to-use client-side URL transfer library, supporting DICT, - FILE, FTP, FTPS, GOPHER, HTTP, HTTPS, IMAP, IMAPS, LDAP, LDAPS, POP3, - POP3S, RTMP, RTSP, SCP, SFTP, SMB, SMBS, SMTP, SMTPS, TELNET and TFTP. + FILE, FTP, FTPS, GOPHER, HTTP, HTTPS, IMAP, IMAPS, LDAP, LDAPS, MQTT, + POP3, POP3S, RTMP, RTMPS, RTSP, SCP, SFTP, SMB, SMBS, SMTP, SMTPS, TELNET + and TFTP. libcurl supports HTTPS certificates, HTTP POST, HTTP PUT, FTP uploading, Kerberos, SPNEGO, HTTP form based upload, proxies, cookies, user+password @@ -188,7 +189,7 @@ FAQ curl's development, have we intended curl to replace wget or compete on its market. Curl is targeted at single-shot file transfers. - Curl is not a web site mirroring program. If you want to use curl to mirror + Curl is not a website mirroring program. If you want to use curl to mirror something: fine, go ahead and write a script that wraps around curl to make it reality (like curlmirror.pl does). @@ -246,21 +247,22 @@ FAQ 1.6 What do you get for making curl? - Project cURL is entirely free and open. No person gets paid for developing - curl full time. We do this voluntarily, mostly in our spare time. - Occasionally companies pay individual developers to work on curl, but that's - up to each company and developer. This is not controlled by nor supervised in - any way by the project. + Project cURL is entirely free and open. We do this voluntarily, mostly in + our spare time. Companies may pay individual developers to work on curl, + but that's up to each company and developer. This is not controlled by nor + supervised in any way by the curl project. - We still get help from companies. Haxx provides web site, bandwidth, mailing - lists etc, GitHub hosts the primary git repository and other services like - the bug tracker at https://github.com/curl/curl. Also again, some companies - have sponsored certain parts of the development in the past and I hope some - will continue to do so in the future. + We get help from companies. Haxx provides website, bandwidth, mailing lists + etc, GitHub hosts the primary git repository and other services like the bug + tracker at https://github.com/curl/curl. Also again, some companies have + sponsored certain parts of the development in the past and I hope some will + continue to do so in the future. If you want to support our project, consider a donation or a banner-program or even better: by helping us with coding, documenting or testing etc. + See also: https://curl.haxx.se/sponsors.html + 1.7 What about CURL from curl.com? During the summer of 2001, curl.com was busy advertising their client-side @@ -301,7 +303,7 @@ FAQ curl is fully open source. It means you can hire any skilled engineer to fix your curl-related problems. - We list available alternatives on the curl web site: + We list available alternatives on the curl website: https://curl.haxx.se/support.html 1.10 How many are using curl? @@ -316,21 +318,14 @@ FAQ We don't know how many users that downloaded or installed curl and then never use it. - In May 2012 Daniel did a counting game and came up with a number that may - be completely wrong or somewhat accurate. Over 500 million! - - See https://daniel.haxx.se/blog/2012/05/16/300m-users/ + In 2020, we estimate that curl runs in rougly ten billion installations + world wide. 1.11 Why don't you update ca-bundle.crt - The ca cert bundle that used to be shipped with curl was very outdated and - must be replaced with an up-to-date version by anyone who wants to verify - peers. It is no longer provided by curl. The last curl release that ever - shipped a ca cert bundle was curl 7.18.0. - In the cURL project we've decided not to attempt to keep this file updated - (or even present anymore) since deciding what to add to a ca cert bundle is - an undertaking we've not been ready to accept, and the one we can get from + (or even present) since deciding what to add to a ca cert bundle is an + undertaking we've not been ready to accept, and the one we can get from Mozilla is perfectly fine so there's no need to duplicate that work. Today, with many services performed over HTTPS, every operating system @@ -373,16 +368,13 @@ FAQ 1.14 How do I submit my patch? - When you have made a patch or a change of whatever sort, and want to submit - that to the project, there are a few different ways we prefer: - - o send a patch to the curl-library mailing list. We're many subscribers - there and there are lots of people who can review patches, comment on them - and "receive" them properly. + We strongly encourage you to submit changes and improvements directly as + "pull requests" on github: https://github.com/curl/curl/pulls - o if your patch changes or fixes a bug, you can also opt to submit a bug - report in the bug tracker and attach your patch there. There are less - people involved there. + If you for any reason can't or won't deal with github, send your patch to + the curl-library mailing list. We're many subscribers there and there are + lots of people who can review patches, comment on them and "receive" them + properly. Lots of more details are found in the CONTRIBUTE and INTERNALS docs. @@ -447,9 +439,9 @@ FAQ curl can be built to use one of the following SSL alternatives: OpenSSL, libressl, BoringSSL, GnuTLS, wolfSSL, NSS, mbedTLS, MesaLink, Secure - Transport (native iOS/OS X), Schannel (native Windows) or GSKit (native IBM - i). They all have their pros and cons, and we try to maintain a comparison - of them here: https://curl.haxx.se/docs/ssl-compared.html + Transport (native iOS/OS X), Schannel (native Windows), GSKit (native IBM + i), or BearSSL. They all have their pros and cons, and we try to maintain a + comparison of them here: https://curl.haxx.se/docs/ssl-compared.html 2.3 Where can I find a copy of LIBEAY32.DLL? @@ -457,7 +449,7 @@ FAQ Curl can be built with OpenSSL to do the SSL stuff. The LIBEAY32.DLL is then what curl needs on a windows machine to do https:// etc. Check out the curl - web site to find accurate and up-to-date pointers to recent OpenSSL DLLs and + website to find accurate and up-to-date pointers to recent OpenSSL DLLs and other binary packages. 2.4 Does curl support SOCKS (RFC 1928) ? @@ -557,13 +549,12 @@ FAQ 3.9 How do I use curl in my favorite programming language? - There exist many language interfaces/bindings for curl that integrates it - better with various languages. If you are fluid in a script language, you - may very well opt to use such an interface instead of using the command line - tool. + Many programming languages have interfaces/bindings that allow you to use + curl without having to use the command line tool. If you are fluent in such + a language, you may prefer to use one of these interfaces instead. Find out more about which languages that support curl directly, and how to - install and use them, in the libcurl section of the curl web site: + install and use them, in the libcurl section of the curl website: https://curl.haxx.se/libcurl/ All the various bindings to libcurl are made by other projects and people, @@ -598,11 +589,11 @@ FAQ curl -d "datatopost" -H "Content-Type: text/xml" [URL] - 3.12 Why do FTP specific features over HTTP proxy fail? + 3.12 Why do FTP-specific features over HTTP proxy fail? Because when you use a HTTP proxy, the protocol spoken on the network will be HTTP, even if you specify a FTP URL. This effectively means that you - normally can't use FTP specific features such as FTP upload and FTP quote + normally can't use FTP-specific features such as FTP upload and FTP quote etc. There is one exception to this rule, and that is if you can "tunnel through" @@ -610,7 +601,7 @@ FAQ and is generally not available as proxy admins usually disable tunneling to ports other than 443 (which is used for HTTPS access through proxies). - 3.13 Why does my single/double quotes fail? + 3.13 Why do my single/double quotes fail? To specify a command line option that includes spaces, you might need to put the entire option within quotes. Like in: @@ -724,7 +715,7 @@ FAQ 3.19 How do I get HTTP from a host using a specific IP address? - For example, you may be trying out a web site installation that isn't yet in + For example, you may be trying out a website installation that isn't yet in the DNS. Or you have a site using multiple IP addresses for a given host name and you want to address a specific one out of the set. @@ -895,7 +886,7 @@ FAQ

Moved Permanently

The document has moved here. - it might be because you request a directory URL but without the trailing + it might be because you requested a directory URL but without the trailing slash. Try the same operation again _with_ the trailing URL, or use the -L/--location option to follow the redirection. @@ -926,8 +917,8 @@ FAQ anyone would call security. Also note that regular HTTP (using Basic authentication) and FTP passwords - are sent in clear across the network. All it takes for anyone to fetch them - is to listen on the network. Eavesdropping is very easy. Use more secure + are sent as cleartext across the network. All it takes for anyone to fetch + them is to listen on the network. Eavesdropping is very easy. Use more secure authentication methods (like Digest, Negotiate or even NTLM) or consider the SSL-based alternatives HTTPS and FTPS. @@ -962,7 +953,7 @@ FAQ software you're trying to interact with. This is not anything curl can do anything about. - 4.11 Why does my HTTP range requests return the full document? + 4.11 Why do my HTTP range requests return the full document? Because the range may not be supported by the server, or the server may choose to ignore it and return the full document anyway. @@ -1012,8 +1003,8 @@ FAQ redirects the browser to another given URL. There is no way to make curl follow these redirects. You must either - manually figure out what the page is set to do, or you write a script that - parses the results and fetches the new URL. + manually figure out what the page is set to do, or write a script that parses + the results and fetches the new URL. 4.15 FTPS doesn't work diff --git a/docs/FEATURES b/docs/FEATURES index 68d38fc27fcff9..35b26e1b2ccd0c 100644 --- a/docs/FEATURES +++ b/docs/FEATURES @@ -180,6 +180,9 @@ IMAPS (*1) - explicit "STARTTLS" usage to "upgrade" plain imap:// connections to use SSL - via http-proxy +MQTT + - Subscribe to and publish topics using url scheme mqtt://broker/topic + FOOTNOTES ========= @@ -191,7 +194,7 @@ FOOTNOTES currently supported *5 = requires nghttp2 and possibly a recent TLS library *6 = requires c-ares - *7 = requires OpenSSL, NSS, GSKit, WinSSL or Secure Transport; GnuTLS, for + *7 = requires OpenSSL, NSS, GSKit, Schannel or Secure Transport; GnuTLS, for example, only supports SSLv3 and TLSv1 *8 = requires libssh2 *9 = requires OpenSSL, GnuTLS, mbedTLS, NSS, yassl, Secure Transport or SSPI diff --git a/docs/GOVERNANCE.md b/docs/GOVERNANCE.md index d49358b95e2502..a4c006fbef9177 100644 --- a/docs/GOVERNANCE.md +++ b/docs/GOVERNANCE.md @@ -18,7 +18,8 @@ expects from us. There is no legal entity. The curl project is just a bunch of people scattered around the globe with the common goal to produce source code that creates -great products. +great products. We are not part of any umbrella organization and we are not +located in any specific country. We are totally independent. The copyrights in the project are owned by the individuals and organizations that wrote those parts of the code. @@ -36,6 +37,28 @@ If there is no obvious consensus, a maintainer who's knowledgeable in the specific area will take an "executive" decision that they think is the right for the project. +## Donations + +Donating plain money to curl is best done to curl's [Open Collective +fund](https://opencollective.com/curl). Open Collective is a US based +non-profit organization that holds on to funds for us. This fund is then used +for paying the curl security bug bounties, to reimburse project related +expenses etc. + +Donations to the project can also come in form of server hosting, providing +services and paying for people to work on curl related code etc. Usually, such +donations are services paid for directly by the sponsors. + +We grade sponsors in a few different levels and if they meet the criteria, +they can be mentioned on the Sponsors page on the curl website. + +## Commercial Support + +The curl project does not do or offer commercial support. It only hosts +mailing lists, runs bug trackers etc to facilitate communication and work. + +However, Daniel works for wolfSSL and we offer commercial curl support there. + ## Key roles ### Maintainers @@ -80,7 +103,7 @@ Feltzing serve as backup admins for when Daniel is gone or unable. The primary server is paid for by Haxx. The machine is physically located in a server bunker in Stockholm Sweden, operated by the company Portlane. -The web site contents are served to the web via Fastly and Daniel is the +The website contents are served to the web via Fastly and Daniel is the primary curl contact with Fastly. ### BDFL @@ -108,8 +131,8 @@ within the area of personal expertise and experience. ### Recommendations -- please enable 2fa on your github account to reduce risk of malicious source - code tampering +- we require two-factor authentication enabled on your github account to + reduce risk of malicious source code tampering - consider enabling signed git commits for additional verification of changes ### Merge advice diff --git a/docs/HELP-US.md b/docs/HELP-US.md index aae2b9f5990803..54744346a19475 100644 --- a/docs/HELP-US.md +++ b/docs/HELP-US.md @@ -15,6 +15,23 @@ found yourself or perhaps got annoyed at in the past. It can be a spelling error in an error text or a weirdly phrased section in a man page. Hunt it down and report the bug. Or make your first pull request with a fix for that. +## Smaller tasks + +Some projects mark small issues as "beginner friendly", "bite-sized" or +similar. We don't do that in curl since such issues never linger around long +enough. Simple issues get handled very fast. + +If you're looking for a smaller or simpler task in the project to help out +with as an entry-point into the project, perhaps because you are a newcomer or +even maybe not a terribly experienced developer, here's our advice: + + - Read through this document to get a grasp on a general approach to use + - Consider adding a test case for something not currentled tested (correctly) + - Consider updating or adding documentation + - One way to get your feet wet gently in the project, is to participate in an + existing issue/PR and help out by reproducing the issue, review the code in + the PR etc. + ## Help wanted In the issue tracker we occasionally mark bugs with [help diff --git a/docs/HISTORY.md b/docs/HISTORY.md index 30249071adc5c0..c5ed13846957ba 100644 --- a/docs/HISTORY.md +++ b/docs/HISTORY.md @@ -7,9 +7,12 @@ currency-exchange calculations available to Internet Relay Chat (IRC) users. All the necessary data were published on the Web; he just needed to automate their retrieval. -Daniel simply adopted an existing command-line open-source tool, httpget, that -Brazilian Rafael Sagula had written and recently released version 0.1 of. After -a few minor adjustments, it did just what he needed. +1996 +---- + +Daniel adopted an existing command-line open-source tool, httpget, that +Brazilian Rafael Sagula had written and recently released version 0.1 +of. After a few minor adjustments, it did just what he needed. 1997 ---- @@ -58,7 +61,7 @@ OpenSSL took over and SSLeay was abandoned. May: first Debian package. -August: LDAP:// and FILE:// support added. The curl web site gets 1300 visits +August: LDAP:// and FILE:// support added. The curl website gets 1300 visits weekly. Moved site to curl.haxx.nu. September: Released curl 6.0. 15000 lines of code. @@ -77,7 +80,7 @@ other software and programs to be based on and powered by libcurl. Almost June: the curl site moves to "curl.haxx.se" -August, the curl web site gets 4000 visits weekly. +August, the curl website gets 4000 visits weekly. The PHP guys adopted libcurl already the same month, when the first ever third party libcurl binding showed up. CURL has been a supported module in PHP since @@ -106,7 +109,7 @@ The first experimental ftps:// support was added. August: curl is bundled in Mac OS X, 10.1. It was already becoming more and more of a standard utility of Linux distributions and a regular in the BSD -ports collections. The curl web site gets 8000 visits weekly. Curl Corporation +ports collections. The curl website gets 8000 visits weekly. Curl Corporation contacted Daniel to discuss "the name issue". After Daniel's reply, they have never since got back in touch again. @@ -117,7 +120,7 @@ without many whistles. 2002 ---- -June: the curl web site gets 13000 visits weekly. curl and libcurl is +June: the curl website gets 13000 visits weekly. curl and libcurl is 35000 lines of code. Reported successful compiles on more than 40 combinations of CPUs and operating systems. @@ -126,9 +129,11 @@ impossible. Around 5000 downloaded packages each week from the main site gives a hint, but the packages are mirrored extensively, bundled with numerous OS distributions and otherwise retrieved as part of other software. -September: with the release of curl 7.10 it is released under the MIT license +October 1: with the release of curl 7.10 it is released under the MIT license only. +Starting with 7.10, curl verifies SSL server certificates by default. + 2003 ---- @@ -162,7 +167,7 @@ August: Curl and libcurl 7.12.1 Available command line options: 96 Available curl_easy_setopt() options: 120 Number of public functions in libcurl: 36 - Amount of public web site mirrors: 12 + Amount of public website mirrors: 12 Number of known libcurl bindings: 26 2005 @@ -175,7 +180,7 @@ April: Added the multi_socket() API September: TFTP support was added. -More than 100,000 unique visitors of the curl web site. 25 mirrors. +More than 100,000 unique visitors of the curl website. 25 mirrors. December: security vulnerability: libcurl URL Buffer Overflow @@ -218,6 +223,8 @@ November: March: security vulnerability: libcurl Arbitrary File Access +April: added CMake support + August: security vulnerability: libcurl embedded zero in cert name December: Added support for IMAP, POP3 and SMTP @@ -281,7 +288,9 @@ April: added the cyassl backend (later renamed to WolfSSL) March: first real release supporting HTTP/2 - September: Web site had 245,000 unique visitors and served 236GB data + September: Website had 245,000 unique visitors and served 236GB data + + SMB and SMBS support 2015 ---- @@ -304,9 +313,11 @@ April: added the cyassl backend (later renamed to WolfSSL) 2017 ---- + July: OSS-Fuzz started fuzzing libcurl + September: Added Multi-SSL support - The web site serves 3100 GB/month + The website serves 3100 GB/month Public curl releases: 169 Command line options: 211 @@ -316,6 +327,8 @@ April: added the cyassl backend (later renamed to WolfSSL) October: SSLKEYLOGFILE support, new MIME API + October: Daniel received the Polhem Prize for his work on curl + November: brotli 2018 @@ -344,3 +357,28 @@ April: added the cyassl backend (later renamed to WolfSSL) curl_easy_setopt() options: 261 Public functions in libcurl: 80 Contributors: 1808 + + December: removed axTLS support + +2019 +---- + + March: added experimental alt-svc support + + August: the first HTTP/3 requests with curl. + + September: 7.66.0 is released and the tool offers parallel downloads + +2020 +---- + + curl and libcurl are installed in an estimated 10 *billion* instances + world-wide. + + January: added BearSSL support + + March: removed support for PolarSSL, added wolfSSH support + + April: experimental MQTT support + + August: zstd support diff --git a/docs/HTTP-COOKIES.md b/docs/HTTP-COOKIES.md index 632cb4ebe1817e..de9d1de682eedb 100644 --- a/docs/HTTP-COOKIES.md +++ b/docs/HTTP-COOKIES.md @@ -24,7 +24,7 @@ and in 2017, another update was [drafted](https://tools.ietf.org/html/draft-ietf-httpbis-cookie-alone-01) to deprecate modification of 'secure' cookies from non-secure origins. Both - of these drafs have been incorporated into a proposal to + of these drafts have been incorporated into a proposal to [replace](https://tools.ietf.org/html/draft-ietf-httpbis-rfc6265bis-02) RFC6265. Cookie prefixes and secure cookie modification protection has been implemented by curl. @@ -43,6 +43,27 @@ When libcurl saves a cookiejar, it creates a file header of its own in which there is a URL mention that will link to the web version of this document. +## Cookie file format + + The cookie file format is text based and stores one cookie per line. Lines + that start with `#` are treated as comments. + + Each line that each specifies a single cookie consists of seven text fields + separated with TAB characters. A valid line must end with a newline + character. + +### Fields in the file + + Field number, what type and example data and the meaning of it: + + 0. string `example.com` - the domain name + 1. boolean `FALSE` - include subdomains + 2. string `/foobar/` - path + 3. boolean `TRUE` - send/receive over HTTPS only + 4. number `1462299217` - expires at - seconds since Jan 1st 1970, or 0 + 5. string `person` - name of the cookie + 6. string `daniel` - value of the cookie + ## Cookies with curl the command line tool curl has a full cookie "engine" built in. If you just activate it, you can @@ -108,6 +129,6 @@ Since curl and libcurl are plain HTTP clients without any knowledge of or capability to handle javascript, such cookies will not be detected or used. - Often, if you want to mimic what a browser does on such web sites, you can + Often, if you want to mimic what a browser does on such websites, you can record web browser HTTP traffic when using such a site and then repeat the cookie operations using curl or libcurl. diff --git a/docs/HTTP2.md b/docs/HTTP2.md index 5b4435f8889f3c..4c72a29b210ec0 100644 --- a/docs/HTTP2.md +++ b/docs/HTTP2.md @@ -18,7 +18,7 @@ parts. The reason for this is that HTTP/2 is much more complex at that layer than HTTP/1.1 (which we implement on our own) and that nghttp2 is an already existing and well functional library. -We require at least version 1.0.0. +We require at least version 1.12.0. Over an http:// URL ------------------- diff --git a/docs/HTTP3.md b/docs/HTTP3.md new file mode 100644 index 00000000000000..fa9d24437f5669 --- /dev/null +++ b/docs/HTTP3.md @@ -0,0 +1,142 @@ +# HTTP3 (and QUIC) + +## Resources + +[HTTP/3 Explained](https://daniel.haxx.se/http3-explained/) - the online free +book describing the protocols involved. + +[QUIC implementation](https://github.com/curl/curl/wiki/QUIC-implementation) - +the wiki page describing the plan for how to support QUIC and HTTP/3 in curl +and libcurl. + +[quicwg.org](https://quicwg.org/) - home of the official protocol drafts + +## QUIC libraries + +QUIC libraries we're experimenting with: + +[ngtcp2](https://github.com/ngtcp2/ngtcp2) + +[quiche](https://github.com/cloudflare/quiche) + +## Experimental! + +HTTP/3 and QUIC support in curl is considered **EXPERIMENTAL** until further +notice. It needs to be enabled at build-time. + +Further development and tweaking of the HTTP/3 support in curl will happen in +in the master branch using pull-requests, just like ordinary changes. + +# ngtcp2 version + +## Build with OpenSSL + +Build (patched) OpenSSL + + % git clone --depth 1 -b OpenSSL_1_1_1g-quic-draft-29 https://github.com/tatsuhiro-t/openssl + % cd openssl + % ./config enable-tls1_3 --prefix= + % make + % make install_sw + +Build nghttp3 + + % cd .. + % git clone https://github.com/ngtcp2/nghttp3 + % cd nghttp3 + % autoreconf -i + % ./configure --prefix= --enable-lib-only + % make + % make install + +Build ngtcp2 + + % cd .. + % git clone https://github.com/ngtcp2/ngtcp2 + % cd ngtcp2 + % autoreconf -i + % ./configure PKG_CONFIG_PATH=/lib/pkgconfig:/lib/pkgconfig LDFLAGS="-Wl,-rpath,/lib" --prefix= + % make + % make install + +Build curl + + % cd .. + % git clone https://github.com/curl/curl + % cd curl + % ./buildconf + % LDFLAGS="-Wl,-rpath,/lib" ./configure --with-ssl= --with-nghttp3= --with-ngtcp2= --enable-alt-svc + % make + +## Build with GnuTLS + +Build (patched) GnuTLS + + % git clone --depth 1 -b tmp-quic https://gitlab.com/gnutls/gnutls.git + % cd gnutls + % ./bootstrap + % ./configure --disable-doc --prefix= + % make + % make install + +Build nghttp3 + + % cd .. + % git clone https://github.com/ngtcp2/nghttp3 + % cd nghttp3 + % autoreconf -i + % ./configure --prefix= --enable-lib-only + % make + % make install + +Build ngtcp2 + + % cd .. + % git clone https://github.com/ngtcp2/ngtcp2 + % cd ngtcp2 + % autoreconf -i + % ./configure PKG_CONFIG_PATH=/lib/pkgconfig:/lib/pkgconfig LDFLAGS="-Wl,-rpath,/lib" --prefix= + % make + % make install + +Build curl + + % cd .. + % git clone https://github.com/curl/curl + % cd curl + % ./buildconf + % ./configure --without-ssl --with-gnutls= --with-nghttp3= --with-ngtcp2= --enable-alt-svc + % make + +# quiche version + +## build + +Build quiche and BoringSSL: + + % git clone --recursive https://github.com/cloudflare/quiche + % cd quiche + % cargo build --release --features pkg-config-meta,qlog + % mkdir deps/boringssl/src/lib + % ln -vnf $(find target/release -name libcrypto.a -o -name libssl.a) deps/boringssl/src/lib/ + +Build curl: + + % cd .. + % git clone https://github.com/curl/curl + % cd curl + % ./buildconf + % ./configure LDFLAGS="-Wl,-rpath,$PWD/../quiche/target/release" --with-ssl=$PWD/../quiche/deps/boringssl/src --with-quiche=$PWD/../quiche/target/release --enable-alt-svc + % make + +## Run + +Use HTTP/3 directly: + + curl --http3 https://nghttp2.org:8443/ + +Upgrade via Alt-Svc: + + curl --alt-svc altsvc.cache https://quic.aiortc.org/ + +See this [list of public HTTP/3 servers](https://bagder.github.io/HTTP3-test/) diff --git a/docs/INSTALL.cmake b/docs/INSTALL.cmake index 03328cbcd9346d..828d9b9c545115 100644 --- a/docs/INSTALL.cmake +++ b/docs/INSTALL.cmake @@ -24,9 +24,8 @@ Current flaws in the curl CMake build Missing features in the cmake build: - Builds libcurl without large file support - - Does not support all SSL libraries (only OpenSSL, WinSSL, DarwinSSL, and - mbed TLS) - - Doesn't build with SCP and SFTP support (libssh2) (see issue #1155) + - Does not support all SSL libraries (only OpenSSL, Schannel, + Secure Transport, and mbed TLS, NSS, WolfSSL) - Doesn't allow different resolver backends (no c-ares build support) - No RTMP support built - Doesn't allow build curl and libcurl debug enabled diff --git a/docs/INSTALL.md b/docs/INSTALL.md index f2f93227a8ec29..33b439ec56e991 100644 --- a/docs/INSTALL.md +++ b/docs/INSTALL.md @@ -7,6 +7,18 @@ document does not describe how to install curl or libcurl using such a binary package. This document describes how to compile, build and install curl and libcurl from source code. +## Building using vcpkg + +You can download and install curl and libcurl using the [vcpkg](https://github.com/Microsoft/vcpkg/) dependency manager: + + git clone https://github.com/Microsoft/vcpkg.git + cd vcpkg + ./bootstrap-vcpkg.sh + ./vcpkg integrate install + vcpkg install curl[tool] + +The curl port in vcpkg is kept up to date by Microsoft team members and community contributors. If the version is out of date, please [create an issue or pull request](https://github.com/Microsoft/vcpkg) on the vcpkg repository. + ## Building from git If you get your code off a git repository instead of a release tarball, see @@ -56,15 +68,15 @@ you have pkg-config installed, set the pkg-config path first, like this: Without pkg-config installed, use this: - ./configure --with-ssl=/opt/OpenSSL + ./configure --with-ssl=/opt/OpenSSL If you insist on forcing a build without SSL support, even though you may have OpenSSL installed in your system, you can run configure like this: - ./configure --without-ssl + ./configure --without-ssl If you have OpenSSL installed, but with the libraries in one place and the -header files somewhere else, you have to set the LDFLAGS and CPPFLAGS +header files somewhere else, you have to set the `LDFLAGS` and `CPPFLAGS` environment variables prior to running configure. Something like this should work: @@ -103,11 +115,11 @@ libressl. - GnuTLS: `--without-ssl --with-gnutls`. - wolfSSL: `--without-ssl --with-wolfssl` - NSS: `--without-ssl --with-nss` - - PolarSSL: `--without-ssl --with-polarssl` - mbedTLS: `--without-ssl --with-mbedtls` - schannel: `--without-ssl --with-schannel` - secure transport: `--without-ssl --with-secure-transport` - MesaLink: `--without-ssl --with-mesalink` + - BearSSL: `--without-ssl --with-bearssl` # Windows @@ -121,9 +133,9 @@ libressl. KB140584 is a must for any Windows developer. Especially important is full understanding if you are not going to follow the advice given above. - - [How To Use the C Run-Time](https://support.microsoft.com/kb/94248/en-us) - - [How to link with the correct C Run-Time CRT library](https://support.microsoft.com/kb/140584/en-us) - - [Potential Errors Passing CRT Objects Across DLL Boundaries](https://msdn.microsoft.com/en-us/library/ms235460) + - [How To Use the C Run-Time](https://support.microsoft.com/help/94248/how-to-use-the-c-run-time) + - [Run-Time Library Compiler Options](https://docs.microsoft.com/cpp/build/reference/md-mt-ld-use-run-time-library) + - [Potential Errors Passing CRT Objects Across DLL Boundaries](https://docs.microsoft.com/cpp/c-runtime-library/potential-errors-passing-crt-objects-across-dll-boundaries) If your app is misbehaving in some strange way, or it is suffering from memory corruption, before asking for further help, please try first to @@ -148,7 +160,7 @@ make targets available to build libcurl with more features, use: and SSPI support. If you have any problems linking libraries or finding header files, be sure -to verify that the provided "Makefile.m32" files use the proper paths, and +to verify that the provided `Makefile.m32` files use the proper paths, and adjust as necessary. It is also possible to override these paths with environment variables, for example: @@ -172,8 +184,8 @@ If you want to enable LDAPS support then set LDAPS=1. ## Cygwin Almost identical to the unix installation. Run the configure script in the -curl source tree root with `sh configure`. Make sure you have the sh -executable in /bin/ or you'll see the configure fail toward the end. +curl source tree root with `sh configure`. Make sure you have the `sh` +executable in `/bin/` or you'll see the configure fail toward the end. Run `make` @@ -183,26 +195,14 @@ The configure utility, unfortunately, is not available for the Windows environment, therefore, you cannot use the various disable-protocol options of the configure utility on this platform. -However, you can use the following defines to disable specific -protocols: - - - `HTTP_ONLY` disables all protocols except HTTP - - `CURL_DISABLE_FTP` disables FTP - - `CURL_DISABLE_LDAP` disables LDAP - - `CURL_DISABLE_TELNET` disables TELNET - - `CURL_DISABLE_DICT` disables DICT - - `CURL_DISABLE_FILE` disables FILE - - `CURL_DISABLE_TFTP` disables TFTP - - `CURL_DISABLE_HTTP` disables HTTP - - `CURL_DISABLE_IMAP` disables IMAP - - `CURL_DISABLE_POP3` disables POP3 - - `CURL_DISABLE_SMTP` disables SMTP +You can use specific defines to disable specific protocols and features. See +[CURL-DISABLE.md](CURL-DISABLE-md) for the full list. If you want to set any of these defines you have the following options: - - Modify lib/config-win32.h - - Modify lib/curl_setup.h - - Modify winbuild/Makefile.vc + - Modify `lib/config-win32.h` + - Modify `lib/curl_setup.h` + - Modify `winbuild/Makefile.vc` - Modify the "Preprocessor Definitions" in the libcurl project Note: The pre-processor settings can be found using the Visual Studio IDE @@ -213,12 +213,12 @@ versions. ## Using BSD-style lwIP instead of Winsock TCP/IP stack in Win32 builds In order to compile libcurl and curl using BSD-style lwIP TCP/IP stack it is -necessary to make definition of preprocessor symbol USE_LWIPSOCK visible to +necessary to make definition of preprocessor symbol `USE_LWIPSOCK` visible to libcurl and curl compilation processes. To set this definition you have the following alternatives: - - Modify lib/config-win32.h and src/config-win32.h - - Modify winbuild/Makefile.vc + - Modify `lib/config-win32.h` and `src/config-win32.h` + - Modify `winbuild/Makefile.vc` - Modify the "Preprocessor Definitions" in the libcurl project Note: The pre-processor settings can be found using the Visual Studio IDE @@ -248,19 +248,19 @@ look for dynamic import symbols. ## Legacy Windows and SSL -WinSSL (specifically Schannel from Windows SSPI), is the native SSL library in -Windows. However, WinSSL in Windows <= XP is unable to connect to servers that +Schannel (from Windows SSPI), is the native SSL library in Windows. However, +Schannel in Windows <= XP is unable to connect to servers that no longer support the legacy handshakes and algorithms used by those versions. If you will be using curl in one of those earlier versions of Windows you should choose another SSL backend such as OpenSSL. -# Apple iOS and Mac OS X +# Apple iOS and macOS On modern Apple operating systems, curl can be built to use Apple's SSL/TLS implementation, Secure Transport, instead of OpenSSL. To build with Secure -Transport for SSL/TLS, use the configure option `--with-darwinssl`. (It is not -necessary to use the option `--without-ssl`.) This feature requires iOS 5.0 or -later, or OS X 10.5 ("Leopard") or later. +Transport for SSL/TLS, use the configure option `--with-secure-transport`. (It +is not necessary to use the option `--without-ssl`.) This feature requires iOS +5.0 or later, or OS X 10.5 ("Leopard") or later. When Secure Transport is in use, the curl options `--cacert` and `--capath` and their libcurl equivalents, will be ignored, because Secure Transport uses @@ -269,26 +269,67 @@ the server. This, of course, includes the root certificates that ship with the OS. The `--cert` and `--engine` options, and their libcurl equivalents, are currently unimplemented in curl with Secure Transport. -For OS X users: In OS X 10.8 ("Mountain Lion"), Apple made a major overhaul to -the Secure Transport API that, among other things, added support for the newer -TLS 1.1 and 1.2 protocols. To get curl to support TLS 1.1 and 1.2, you must -build curl on Mountain Lion or later, or by using the equivalent SDK. If you -set the `MACOSX_DEPLOYMENT_TARGET` environmental variable to an earlier -version of OS X prior to building curl, then curl will use the new Secure +For macOS users: In OS X 10.8 ("Mountain Lion"), Apple made a major overhaul +to the Secure Transport API that, among other things, added support for the +newer TLS 1.1 and 1.2 protocols. To get curl to support TLS 1.1 and 1.2, you +must build curl on Mountain Lion or later, or by using the equivalent SDK. If +you set the `MACOSX_DEPLOYMENT_TARGET` environmental variable to an earlier +version of macOS prior to building curl, then curl will use the new Secure Transport API on Mountain Lion and later, and fall back on the older API when the same curl binary is executed on older cats. For example, running these commands in curl's directory in the shell will build the code such that it will run on cats as old as OS X 10.6 ("Snow Leopard") (using bash): export MACOSX_DEPLOYMENT_TARGET="10.6" - ./configure --with-darwinssl + ./configure --with-secure-transport make +# Android + +When building curl for Android it's recommended to use a Linux environment +since using curl's `configure` script is the easiest way to build curl +for Android. Before you can build curl for Android, you need to install the +Android NDK first. This can be done using the SDK Manager that is part of +Android Studio. Once you have installed the Android NDK, you need to figure out +where it has been installed and then set up some environment variables before +launching `configure`. On macOS, those variables could look like this to compile +for `aarch64` and API level 29: + + export NDK=~/Library/Android/sdk/ndk/20.1.5948944 + export HOST_TAG=darwin-x86_64 + export TOOLCHAIN=$NDK/toolchains/llvm/prebuilt/$HOST_TAG + export AR=$TOOLCHAIN/bin/aarch64-linux-android-ar + export AS=$TOOLCHAIN/bin/aarch64-linux-android-as + export CC=$TOOLCHAIN/bin/aarch64-linux-android29-clang + export CXX=$TOOLCHAIN/bin/aarch64-linux-android29-clang++ + export LD=$TOOLCHAIN/bin/aarch64-linux-android-ld + export RANLIB=$TOOLCHAIN/bin/aarch64-linux-android-ranlib + export STRIP=$TOOLCHAIN/bin/aarch64-linux-android-strip + +When building on Linux or targeting other API levels or architectures, you need +to adjust those variables accordingly. After that you can build curl like this: + + ./configure --host aarch64-linux-android --with-pic --disable-shared + +Note that this won't give you SSL/TLS support. If you need SSL/TLS, you have +to build curl against a SSL/TLS layer, e.g. OpenSSL, because it's impossible for +curl to access Android's native SSL/TLS layer. To build curl for Android using +OpenSSL, follow the OpenSSL build instructions and then install `libssl.a` and +`libcrypto.a` to `$TOOLCHAIN/sysroot/usr/lib` and copy `include/openssl` to +`$TOOLCHAIN/sysroot/usr/include`. Now you can build curl for Android using +OpenSSL like this: + + ./configure --host aarch64-linux-android --with-pic --disable-shared --with-ssl="$TOOLCHAIN/sysroot/usr" + +Note, however, that you must target at least Android M (API level 23) or `configure` +won't be able to detect OpenSSL since `stderr` (and the like) weren't defined +before Android M. + # Cross compile Download and unpack the curl package. -'cd' to the new directory. (e.g. `cd curl-7.12.3`) +`cd` to the new directory. (e.g. `cd curl-7.12.3`) Set environment variables to point to the cross-compile toolchain and call configure with any options you need. Be sure and specify the `--host` and @@ -327,7 +368,7 @@ In some cases, you may be able to simplify the above commands to as little as: There are a number of configure options that can be used to reduce the size of libcurl for embedded applications where binary size is an important factor. -First, be sure to set the CFLAGS variable when configuring with any relevant +First, be sure to set the `CFLAGS` variable when configuring with any relevant compiler optimization flags to reduce the size of the binary. For gcc, this would mean at minimum the -Os option, and potentially the `-march=X`, `-mdynamic-no-pic` and `-flto` options as well, e.g. @@ -360,8 +401,8 @@ use, here are some other flags that can reduce the size of the library: The GNU compiler and linker have a number of options that can reduce the size of the libcurl dynamic libraries on some platforms even further. -Specify them by providing appropriate CFLAGS and LDFLAGS variables on the -configure command-line, e.g. +Specify them by providing appropriate `CFLAGS` and `LDFLAGS` variables on +the configure command-line, e.g. CFLAGS="-Os -ffunction-sections -fdata-sections -fno-unwind-tables -fno-asynchronous-unwind-tables -flto" @@ -383,7 +424,7 @@ in a lower total size than dynamically linking. Note that the curl test harness can detect the use of some, but not all, of the `--disable` statements suggested above. Use will cause tests relying on those features to fail. The test harness can be manually forced to skip the -relevant tests by specifying certain key words on the runtests.pl command +relevant tests by specifying certain key words on the `runtests.pl` command line. Following is a list of appropriate key words: - `--disable-cookies` !cookies diff --git a/docs/INTERNALS.md b/docs/INTERNALS.md index 69b92d4e2f6c72..e9be0889a2621f 100644 --- a/docs/INTERNALS.md +++ b/docs/INTERNALS.md @@ -13,7 +13,7 @@ curl internals - [`Curl_disconnect`](#Curl_disconnect) - [HTTP(S)](#http) - [FTP](#ftp) - - [Kerberos](#kerberos) + - [Kerberos](#kerberos) - [TELNET](#telnet) - [FILE](#file) - [SMB](#smb) @@ -34,10 +34,17 @@ curl internals - [`curl_off_t`](#curl_off_t) - [curlx](#curlx) - [Content Encoding](#contentencoding) - - [hostip.c explained](#hostip) + - [`hostip.c` explained](#hostip) - [Track Down Memory Leaks](#memoryleak) - [`multi_socket`](#multi_socket) - [Structs in libcurl](#structs) + - [Curl_easy](#Curl_easy) + - [connectdata](#connectdata) + - [Curl_multi](#Curl_multi) + - [Curl_handler](#Curl_handler) + - [conncache](#conncache) + - [Curl_share](#Curl_share) + - [CookieInfo](#CookieInfo) Intro @@ -66,7 +73,7 @@ git Portability =========== - We write curl and libcurl to compile with C89 compilers. On 32bit and up + We write curl and libcurl to compile with C89 compilers. On 32-bit and up machines. Most of libcurl assumes more or less POSIX compliance but that's not a requirement. @@ -78,19 +85,19 @@ Dependencies ------------ - OpenSSL 0.9.7 - - GnuTLS 2.11.3 + - GnuTLS 3.1.10 - zlib 1.1.4 - libssh2 0.16 - c-ares 1.6.0 - libidn2 2.0.0 - - cyassl 2.0.0 + - wolfSSL 2.0.0 - openldap 2.0 - MIT Kerberos 1.2.4 - GSKit V5R3M0 - NSS 3.14.x - - PolarSSL 1.3.0 - Heimdal ? - - nghttp2 1.0.0 + - nghttp2 1.12.0 + - WinSock 2.2 (on Windows 95+ and Windows CE .NET 4.1+) Operating Systems ----------------- @@ -118,7 +125,7 @@ Build tools - GNU M4 1.4 - perl 5.004 - roffit 0.5 - - groff ? (any version that supports "groff -Tps -man [in] [out]") + - groff ? (any version that supports `groff -Tps -man [in] [out]`) - ps2pdf (gs) ? @@ -132,7 +139,7 @@ Windows vs Unix In curl, this is solved with defines and macros, so that the source looks the same in all places except for the header file that defines them. The - macros in use are sclose(), sread() and swrite(). + macros in use are `sclose()`, `sread()` and `swrite()`. 2. Windows requires a couple of init calls for the socket stuff. @@ -140,6 +147,8 @@ Windows vs Unix also do it etc there might be reasons for applications to alter that behaviour. + We require WinSock version 2.2 and load this version during global init. + 3. The file descriptors for network communication and file operations are not as easily interchangeable as in Unix. @@ -171,14 +180,14 @@ Library There are plenty of entry points to the library, namely each publicly defined function that libcurl offers to applications. All of those functions are rather small and easy-to-follow. All the ones prefixed with `curl_easy` are - put in the lib/easy.c file. + put in the `lib/easy.c` file. `curl_global_init()` and `curl_global_cleanup()` should be called by the application to initialize and clean up global stuff in the library. As of today, it can handle the global SSL initing if SSL is enabled and it can init the socket layer on windows machines. libcurl itself has no "global" scope. - All printf()-style functions use the supplied clones in lib/mprintf.c. This + All printf()-style functions use the supplied clones in `lib/mprintf.c`. This makes sure we stay absolutely platform independent. [ `curl_easy_init()`][2] allocates an internal struct and makes some @@ -197,8 +206,8 @@ Library `curl_multi_wait()`, and `curl_multi_perform()` until the transfer is done and then returns. - Some of the most important key functions in url.c are called from multi.c - when certain key steps are to be made in the transfer operation. + Some of the most important key functions in `url.c` are called from + `multi.c` when certain key steps are to be made in the transfer operation. Curl_connect() @@ -206,15 +215,15 @@ Curl_connect() Analyzes the URL, it separates the different components and connects to the remote host. This may involve using a proxy and/or using SSL. The - `Curl_resolv()` function in lib/hostip.c is used for looking up host names - (it does then use the proper underlying method, which may vary between - platforms and builds). + `Curl_resolv()` function in `lib/hostip.c` is used for looking up host + names (it does then use the proper underlying method, which may vary + between platforms and builds). When `Curl_connect` is done, we are connected to the remote site. Then it is time to tell the server to get a document/file. `Curl_do()` arranges this. - This function makes sure there's an allocated and initiated 'connectdata' + This function makes sure there's an allocated and initiated `connectdata` struct that is used for this particular connection only (although there may be several requests performed on the same connect). A bunch of things are inited/inherited from the `Curl_easy` struct. @@ -223,15 +232,15 @@ Curl_connect() multi_do() --------- - `multi_do()` makes sure the proper protocol-specific function is called. The - functions are named after the protocols they handle. + `multi_do()` makes sure the proper protocol-specific function is called. + The functions are named after the protocols they handle. The protocol-specific functions of course deal with protocol-specific negotiations and setup. They have access to the `Curl_sendf()` (from - lib/sendf.c) function to send printf-style formatted data to the remote + `lib/sendf.c`) function to send printf-style formatted data to the remote host and when they're ready to make the actual file transfer they call the - `Curl_setup_transfer()` function (in lib/transfer.c) to setup the transfer - and returns. + `Curl_setup_transfer()` function (in `lib/transfer.c`) to setup the + transfer and returns. If this DO function fails and the connection is being re-used, libcurl will then close this connection, setup a new connection and re-issue the DO @@ -245,9 +254,9 @@ Curl_readwrite() Called during the transfer of the actual protocol payload. - During transfer, the progress functions in lib/progress.c are called at + During transfer, the progress functions in `lib/progress.c` are called at frequent intervals (or at the user's choice, a specified callback might get - called). The speedcheck functions in lib/speedcheck.c are also used to + called). The speedcheck functions in `lib/speedcheck.c` are also used to verify that the transfer is as fast as required. @@ -279,11 +288,12 @@ HTTP(S) ======= HTTP offers a lot and is the protocol in curl that uses the most lines of - code. There is a special file (lib/formdata.c) that offers all the multipart - post functions. + code. There is a special file `lib/formdata.c` that offers all the + multipart post functions. - base64-functions for user+password stuff (and more) is in (lib/base64.c) and - all functions for parsing and sending cookies are found in (lib/cookie.c). + base64-functions for user+password stuff (and more) is in `lib/base64.c` + and all functions for parsing and sending cookies are found in + `lib/cookie.c`. HTTPS uses in almost every case the same procedure as HTTP, with only two exceptions: the connect procedure is different and the function used to read @@ -305,74 +315,76 @@ FTP === The `Curl_if2ip()` function can be used for getting the IP number of a - specified network interface, and it resides in lib/if2ip.c. + specified network interface, and it resides in `lib/if2ip.c`. `Curl_ftpsendf()` is used for sending FTP commands to the remote server. It was made a separate function to prevent us programmers from forgetting that - they must be CRLF terminated. They must also be sent in one single write() to - make firewalls and similar happy. + they must be CRLF terminated. They must also be sent in one single `write()` + to make firewalls and similar happy. Kerberos ======== - Kerberos support is mainly in lib/krb5.c and lib/security.c but also - `curl_sasl_sspi.c` and `curl_sasl_gssapi.c` for the email protocols and - `socks_gssapi.c` and `socks_sspi.c` for SOCKS5 proxy specifics. + Kerberos support is mainly in `lib/krb5.c` but also `curl_sasl_sspi.c` and + `curl_sasl_gssapi.c` for the email protocols and `socks_gssapi.c` and + `socks_sspi.c` for SOCKS5 proxy specifics. TELNET ====== - Telnet is implemented in lib/telnet.c. + Telnet is implemented in `lib/telnet.c`. FILE ==== - The file:// protocol is dealt with in lib/file.c. + The `file://` protocol is dealt with in `lib/file.c`. SMB === - The smb:// protocol is dealt with in lib/smb.c. + The `smb://` protocol is dealt with in `lib/smb.c`. LDAP ==== - Everything LDAP is in lib/ldap.c and lib/openldap.c + Everything LDAP is in `lib/ldap.c` and `lib/openldap.c`. E-mail ====== - The e-mail related source code is in lib/imap.c, lib/pop3.c and lib/smtp.c. + The e-mail related source code is in `lib/imap.c`, `lib/pop3.c` and + `lib/smtp.c`. General ======= URL encoding and decoding, called escaping and unescaping in the source code, - is found in lib/escape.c. + is found in `lib/escape.c`. - While transferring data in Transfer() a few functions might get used. - `curl_getdate()` in lib/parsedate.c is for HTTP date comparisons (and more). + While transferring data in `Transfer()` a few functions might get used. + `curl_getdate()` in `lib/parsedate.c` is for HTTP date comparisons (and + more). - lib/getenv.c offers `curl_getenv()` which is for reading environment + `lib/getenv.c` offers `curl_getenv()` which is for reading environment variables in a neat platform independent way. That's used in the client, but - also in lib/url.c when checking the proxy environment variables. Note that - contrary to the normal unix getenv(), this returns an allocated buffer that - must be free()ed after use. + also in `lib/url.c` when checking the proxy environment variables. Note that + contrary to the normal unix `getenv()`, this returns an allocated buffer that + must be `free()`ed after use. - lib/netrc.c holds the .netrc parser + `lib/netrc.c` holds the `.netrc` parser. - lib/timeval.c features replacement functions for systems that don't have - gettimeofday() and a few support functions for timeval conversions. + `lib/timeval.c` features replacement functions for systems that don't have + `gettimeofday()` and a few support functions for timeval conversions. A function named `curl_version()` that returns the full curl version string - is found in lib/version.c. + is found in `lib/version.c`. Persistent Connections @@ -386,7 +398,7 @@ Persistent Connections as well as all the options etc that the library-user may choose. - The `Curl_easy` struct holds the "connection cache" (an array of - pointers to 'connectdata' structs). + pointers to `connectdata` structs). - This enables the 'curl handle' to be reused on subsequent transfers. @@ -434,10 +446,10 @@ SSL libraries in future libcurl versions. To deal with this internally in the best way possible, we have a generic SSL - function API as provided by the vtls/vtls.[ch] system, and they are the only + function API as provided by the `vtls/vtls.[ch]` system, and they are the only SSL functions we must use from within libcurl. vtls is then crafted to use the appropriate lower-level function calls to whatever SSL library that is in - use. For example vtls/openssl.[ch] for the OpenSSL library. + use. For example `vtls/openssl.[ch]` for the OpenSSL library. Library Symbols @@ -456,7 +468,7 @@ Return Codes and Informationals I've made things simple. Almost every function in libcurl returns a CURLcode, that must be `CURLE_OK` if everything is OK or otherwise a suitable error - code as the curl/curl.h include file defines. The very spot that detects an + code as the `curl/curl.h` include file defines. The very spot that detects an error must use the `Curl_failf()` function to set the human-readable error description. @@ -478,20 +490,20 @@ API/ABI Client ====== - main() resides in `src/tool_main.c`. + `main()` resides in `src/tool_main.c`. - `src/tool_hugehelp.c` is automatically generated by the mkhelp.pl perl script - to display the complete "manual" and the `src/tool_urlglob.c` file holds the - functions used for the URL-"globbing" support. Globbing in the sense that the - {} and [] expansion stuff is there. + `src/tool_hugehelp.c` is automatically generated by the `mkhelp.pl` perl + script to display the complete "manual" and the `src/tool_urlglob.c` file + holds the functions used for the URL-"globbing" support. Globbing in the + sense that the `{}` and `[]` expansion stuff is there. - The client mostly sets up its 'config' struct properly, then + The client mostly sets up its `config` struct properly, then it calls the `curl_easy_*()` functions of the library and when it gets back control after the `curl_easy_perform()` it cleans up the library, checks status and exits. - When the operation is done, the ourWriteOut() function in src/writeout.c may - be called to report about the operation. That function is using the + When the operation is done, the `ourWriteOut()` function in `src/writeout.c` + may be called to report about the operation. That function is mostly using the `curl_easy_getinfo()` function to extract useful information from the curl session. @@ -502,30 +514,32 @@ Client Memory Debugging ================ - The file lib/memdebug.c contains debug-versions of a few functions. Functions - such as malloc, free, fopen, fclose, etc that somehow deal with resources - that might give us problems if we "leak" them. The functions in the memdebug - system do nothing fancy, they do their normal function and then log - information about what they just did. The logged data can then be analyzed - after a complete session, + The file `lib/memdebug.c` contains debug-versions of a few functions. + Functions such as `malloc()`, `free()`, `fopen()`, `fclose()`, etc that + somehow deal with resources that might give us problems if we "leak" them. + The functions in the memdebug system do nothing fancy, they do their normal + function and then log information about what they just did. The logged data + can then be analyzed after a complete session, - memanalyze.pl is the perl script present in tests/ that analyzes a log file - generated by the memory tracking system. It detects if resources are + `memanalyze.pl` is the perl script present in `tests/` that analyzes a log + file generated by the memory tracking system. It detects if resources are allocated but never freed and other kinds of errors related to resource management. - Internally, definition of preprocessor symbol DEBUGBUILD restricts code which - is only compiled for debug enabled builds. And symbol CURLDEBUG is used to - differentiate code which is _only_ used for memory tracking/debugging. + Internally, definition of preprocessor symbol `DEBUGBUILD` restricts code + which is only compiled for debug enabled builds. And symbol `CURLDEBUG` is + used to differentiate code which is _only_ used for memory + tracking/debugging. - Use -DCURLDEBUG when compiling to enable memory debugging, this is also - switched on by running configure with --enable-curldebug. Use -DDEBUGBUILD - when compiling to enable a debug build or run configure with --enable-debug. + Use `-DCURLDEBUG` when compiling to enable memory debugging, this is also + switched on by running configure with `--enable-curldebug`. Use + `-DDEBUGBUILD` when compiling to enable a debug build or run configure with + `--enable-debug`. - curl --version will list 'Debug' feature for debug enabled builds, and + `curl --version` will list 'Debug' feature for debug enabled builds, and will list 'TrackMemory' feature for curl debug memory tracking capable builds. These features are independent and can be controlled when running - the configure script. When --enable-debug is given both features will be + the configure script. When `--enable-debug` is given both features will be enabled, unless some restriction prevents memory tracking from being used. @@ -536,12 +550,12 @@ Test Suite curl archive tree, and it contains a bunch of scripts and a lot of test case data. - The main test script is runtests.pl that will invoke test servers like - httpserver.pl and ftpserver.pl before all the test cases are performed. The - test suite currently only runs on Unix-like platforms. + The main test script is `runtests.pl` that will invoke test servers like + `httpserver.pl` and `ftpserver.pl` before all the test cases are performed. + The test suite currently only runs on Unix-like platforms. - You'll find a description of the test suite in the tests/README file, and the - test case data files in the tests/FILEFORMAT file. + You'll find a description of the test suite in the `tests/README` file, and + the test case data files in the `tests/FILEFORMAT` file. The test suite automatically detects if curl was built with the memory debugging enabled, and if it was, it will detect memory leaks, too. @@ -569,7 +583,7 @@ Asynchronous name resolves prevent linking errors later on). Then I simply build the areslib project (the other projects adig/ahost seem to fail under MSVC). - Next was libcurl. I opened lib/config-win32.h and I added a: + Next was libcurl. I opened `lib/config-win32.h` and I added a: `#define USE_ARES 1` Next thing I did was I added the path for the ares includes to the include @@ -578,8 +592,8 @@ Asynchronous name resolves Lastly, I also changed libcurl to be single-threaded rather than multi-threaded, again this was to prevent some duplicate symbol errors. I'm not sure why I needed to change everything to single-threaded, but when I - didn't I got redefinition errors for several CRT functions (malloc, stricmp, - etc.) + didn't I got redefinition errors for several CRT functions (`malloc()`, + `stricmp()`, etc.) `curl_off_t` @@ -587,7 +601,7 @@ Asynchronous name resolves `curl_off_t` is a data type provided by the external libcurl include headers. It is the type meant to be used for the [`curl_easy_setopt()`][1] - options that end with LARGE. The type is 64bit large on most modern + options that end with LARGE. The type is 64-bit large on most modern platforms. @@ -600,15 +614,15 @@ curlx additional functions. We provide them through a single header file for easy access for apps: - "curlx.h" + `curlx.h` `curlx_strtoofft()` ------------------- A macro that converts a string containing a number to a `curl_off_t` number. This might use the `curlx_strtoll()` function which is provided as source code in strtoofft.c. Note that the function is only provided if no - strtoll() (or equivalent) function exist on your platform. If `curl_off_t` - is only a 32 bit number on your platform, this macro uses strtol(). + `strtoll()` (or equivalent) function exist on your platform. If `curl_off_t` + is only a 32-bit number on your platform, this macro uses `strtol()`. Future ------ @@ -642,27 +656,28 @@ Content Encoding [HTTP/1.1][4] specifies that a client may request that a server encode its response. This is usually used to compress a response using one (or more) encodings from a set of commonly available compression techniques. These - schemes include 'deflate' (the zlib algorithm), 'gzip' 'br' (brotli) and - 'compress'. A client requests that the server perform an encoding by including - an Accept-Encoding header in the request document. The value of the header - should be one of the recognized tokens 'deflate', ... (there's a way to + schemes include `deflate` (the zlib algorithm), `gzip`, `br` (brotli) and + `compress`. A client requests that the server perform an encoding by including + an `Accept-Encoding` header in the request document. The value of the header + should be one of the recognized tokens `deflate`, ... (there's a way to register new schemes/tokens, see sec 3.5 of the spec). A server MAY honor the client's encoding request. When a response is encoded, the server - includes a Content-Encoding header in the response. The value of the - Content-Encoding header indicates which encodings were used to encode the + includes a `Content-Encoding` header in the response. The value of the + `Content-Encoding` header indicates which encodings were used to encode the data, in the order in which they were applied. It's also possible for a client to attach priorities to different schemes so that the server knows which it prefers. See sec 14.3 of RFC 2616 for more - information on the Accept-Encoding header. See sec [3.1.2.2 of RFC 7231][15] - for more information on the Content-Encoding header. + information on the `Accept-Encoding` header. See sec + [3.1.2.2 of RFC 7231][15] for more information on the `Content-Encoding` + header. ## Supported content encodings - The 'deflate', 'gzip' and 'br' content encodings are supported by libcurl. + The `deflate`, `gzip` and `br` content encodings are supported by libcurl. Both regular and chunked transfers work fine. The zlib library is required - for the 'deflate' and 'gzip' encodings, while the brotli decoding library is - for the 'br' encoding. + for the `deflate` and `gzip` encodings, while the brotli decoding library is + for the `br` encoding. ## The libcurl interface @@ -670,45 +685,45 @@ Content Encoding [`curl_easy_setopt`][1](curl, [`CURLOPT_ACCEPT_ENCODING`][5], string) - where string is the intended value of the Accept-Encoding header. + where string is the intended value of the `Accept-Encoding` header. Currently, libcurl does support multiple encodings but only - understands how to process responses that use the "deflate", "gzip" and/or - "br" content encodings, so the only values for [`CURLOPT_ACCEPT_ENCODING`][5] - that will work (besides "identity," which does nothing) are "deflate", - "gzip" and "br". If a response is encoded using the "compress" or methods, + understands how to process responses that use the `deflate`, `gzip` and/or + `br` content encodings, so the only values for [`CURLOPT_ACCEPT_ENCODING`][5] + that will work (besides `identity`, which does nothing) are `deflate`, + `gzip` and `br`. If a response is encoded using the `compress` or methods, libcurl will return an error indicating that the response could - not be decoded. If `` is NULL no Accept-Encoding header is generated. - If `` is a zero-length string, then an Accept-Encoding header - containing all supported encodings will be generated. + not be decoded. If `` is NULL no `Accept-Encoding` header is + generated. If `` is a zero-length string, then an `Accept-Encoding` + header containing all supported encodings will be generated. The [`CURLOPT_ACCEPT_ENCODING`][5] must be set to any non-NULL value for content to be automatically decoded. If it is not set and the server still sends encoded content (despite not having been asked), the data is returned - in its raw form and the Content-Encoding type is not checked. + in its raw form and the `Content-Encoding` type is not checked. ## The curl interface - Use the [--compressed][6] option with curl to cause it to ask servers to + Use the [`--compressed`][6] option with curl to cause it to ask servers to compress responses using any format supported by curl. -hostip.c explained -================== +`hostip.c` explained +==================== - The main compile-time defines to keep in mind when reading the host*.c source - file are these: + The main compile-time defines to keep in mind when reading the `host*.c` + source file are these: ## `CURLRES_IPV6` - this host has getaddrinfo() and family, and thus we use that. The host may + this host has `getaddrinfo()` and family, and thus we use that. The host may not be able to resolve IPv6, but we don't really have to take that into account. Hosts that aren't IPv6-enabled have `CURLRES_IPV4` defined. ## `CURLRES_ARES` is defined if libcurl is built to use c-ares for asynchronous name - resolves. This can be Windows or *nix. + resolves. This can be Windows or \*nix. ## `CURLRES_THREADED` @@ -721,20 +736,20 @@ hostip.c explained libcurl is not built to use an asynchronous resolver, `CURLRES_SYNCH` is defined. -## host*.c sources +## `host*.c` sources - The host*.c sources files are split up like this: + The `host*.c` sources files are split up like this: - - hostip.c - method-independent resolver functions and utility functions - - hostasyn.c - functions for asynchronous name resolves - - hostsyn.c - functions for synchronous name resolves - - asyn-ares.c - functions for asynchronous name resolves using c-ares - - asyn-thread.c - functions for asynchronous name resolves using threads - - hostip4.c - IPv4 specific functions - - hostip6.c - IPv6 specific functions + - `hostip.c` - method-independent resolver functions and utility functions + - `hostasyn.c` - functions for asynchronous name resolves + - `hostsyn.c` - functions for synchronous name resolves + - `asyn-ares.c` - functions for asynchronous name resolves using c-ares + - `asyn-thread.c` - functions for asynchronous name resolves using threads + - `hostip4.c` - IPv4 specific functions + - `hostip6.c` - IPv6 specific functions - The hostip.h is the single united header file for all this. It defines the - `CURLRES_*` defines based on the config*.h and `curl_setup.h` defines. + The `hostip.h` is the single united header file for all this. It defines the + `CURLRES_*` defines based on the `config*.h` and `curl_setup.h` defines. Track Down Memory Leaks @@ -746,14 +761,13 @@ Track Down Memory Leaks than one thread. If you want/need to use it in a multi-threaded app. Please adjust accordingly. - ## Build - Rebuild libcurl with -DCURLDEBUG (usually, rerunning configure with - --enable-debug fixes this). 'make clean' first, then 'make' so that all + Rebuild libcurl with `-DCURLDEBUG` (usually, rerunning configure with + `--enable-debug` fixes this). `make clean` first, then `make` so that all files are actually rebuilt properly. It will also make sense to build - libcurl with the debug option (usually -g to the compiler) so that debugging - it will be easier if you actually do find a leak in the library. + libcurl with the debug option (usually `-g` to the compiler) so that + debugging it will be easier if you actually do find a leak in the library. This will create a library that has memory debugging enabled. @@ -761,7 +775,7 @@ Track Down Memory Leaks Add a line in your application code: - `curl_memdebug("dump");` + `curl_dbg_memdebug("dump");` This will make the malloc debug system output a full trace of all resource using functions to the given file name. Make sure you rebuild your program @@ -777,7 +791,7 @@ Track Down Memory Leaks ## Analyze the Flow - Use the tests/memanalyze.pl perl script to analyze the dump file: + Use the `tests/memanalyze.pl` perl script to analyze the dump file: tests/memanalyze.pl dump @@ -793,45 +807,46 @@ Track Down Memory Leaks Implementation of the `curl_multi_socket` API - The main ideas of this API are simply: - - 1 - The application can use whatever event system it likes as it gets info - from libcurl about what file descriptors libcurl waits for what action - on. (The previous API returns `fd_sets` which is very select()-centric). - - 2 - When the application discovers action on a single socket, it calls - libcurl and informs that there was action on this particular socket and - libcurl can then act on that socket/transfer only and not care about - any other transfers. (The previous API always had to scan through all - the existing transfers.) - - The idea is that [`curl_multi_socket_action()`][7] calls a given callback - with information about what socket to wait for what action on, and the - callback only gets called if the status of that socket has changed. - - We also added a timer callback that makes libcurl call the application when - the timeout value changes, and you set that with [`curl_multi_setopt()`][9] - and the [`CURLMOPT_TIMERFUNCTION`][10] option. To get this to work, - Internally, there's an added struct to each easy handle in which we store - an "expire time" (if any). The structs are then "splay sorted" so that we - can add and remove times from the linked list and yet somewhat swiftly - figure out both how long there is until the next nearest timer expires - and which timer (handle) we should take care of now. Of course, the upside - of all this is that we get a [`curl_multi_timeout()`][8] that should also - work with old-style applications that use [`curl_multi_perform()`][11]. - - We created an internal "socket to easy handles" hash table that given - a socket (file descriptor) returns the easy handle that waits for action on - that socket. This hash is made using the already existing hash code - (previously only used for the DNS cache). - - To make libcurl able to report plain sockets in the socket callback, we had - to re-organize the internals of the [`curl_multi_fdset()`][12] etc so that - the conversion from sockets to `fd_sets` for that function is only done in - the last step before the data is returned. I also had to extend c-ares to - get a function that can return plain sockets, as that library too returned - only `fd_sets` and that is no longer good enough. The changes done to c-ares - are available in c-ares 1.3.1 and later. + The main ideas of this API are simply: + + 1. The application can use whatever event system it likes as it gets info + from libcurl about what file descriptors libcurl waits for what action + on. (The previous API returns `fd_sets` which is very + `select()`-centric). + + 2. When the application discovers action on a single socket, it calls + libcurl and informs that there was action on this particular socket and + libcurl can then act on that socket/transfer only and not care about + any other transfers. (The previous API always had to scan through all + the existing transfers.) + + The idea is that [`curl_multi_socket_action()`][7] calls a given callback + with information about what socket to wait for what action on, and the + callback only gets called if the status of that socket has changed. + + We also added a timer callback that makes libcurl call the application when + the timeout value changes, and you set that with [`curl_multi_setopt()`][9] + and the [`CURLMOPT_TIMERFUNCTION`][10] option. To get this to work, + Internally, there's an added struct to each easy handle in which we store + an "expire time" (if any). The structs are then "splay sorted" so that we + can add and remove times from the linked list and yet somewhat swiftly + figure out both how long there is until the next nearest timer expires + and which timer (handle) we should take care of now. Of course, the upside + of all this is that we get a [`curl_multi_timeout()`][8] that should also + work with old-style applications that use [`curl_multi_perform()`][11]. + + We created an internal "socket to easy handles" hash table that given + a socket (file descriptor) returns the easy handle that waits for action on + that socket. This hash is made using the already existing hash code + (previously only used for the DNS cache). + + To make libcurl able to report plain sockets in the socket callback, we had + to re-organize the internals of the [`curl_multi_fdset()`][12] etc so that + the conversion from sockets to `fd_sets` for that function is only done in + the last step before the data is returned. I also had to extend c-ares to + get a function that can return plain sockets, as that library too returned + only `fd_sets` and that is no longer good enough. The changes done to c-ares + are available in c-ares 1.3.1 and later. Structs in libcurl @@ -840,40 +855,42 @@ Structs in libcurl This section should cover 7.32.0 pretty accurately, but will make sense even for older and later versions as things don't change drastically that often. + ## Curl_easy The `Curl_easy` struct is the one returned to the outside in the external API - as a "CURL *". This is usually known as an easy handle in API documentations + as a `CURL *`. This is usually known as an easy handle in API documentations and examples. Information and state that is related to the actual connection is in the - 'connectdata' struct. When a transfer is about to be made, libcurl will + `connectdata` struct. When a transfer is about to be made, libcurl will either create a new connection or re-use an existing one. The particular connectdata that is used by this handle is pointed out by `Curl_easy->easy_conn`. Data and information that regard this particular single transfer is put in - the SingleRequest sub-struct. + the `SingleRequest` sub-struct. When the `Curl_easy` struct is added to a multi handle, as it must be in - order to do any transfer, the ->multi member will point to the `Curl_multi` - struct it belongs to. The ->prev and ->next members will then be used by the - multi code to keep a linked list of `Curl_easy` structs that are added to - that same multi handle. libcurl always uses multi so ->multi *will* point to - a `Curl_multi` when a transfer is in progress. + order to do any transfer, the `->multi` member will point to the `Curl_multi` + struct it belongs to. The `->prev` and `->next` members will then be used by + the multi code to keep a linked list of `Curl_easy` structs that are added to + that same multi handle. libcurl always uses multi so `->multi` *will* point + to a `Curl_multi` when a transfer is in progress. - ->mstate is the multi state of this particular `Curl_easy`. When + `->mstate` is the multi state of this particular `Curl_easy`. When `multi_runsingle()` is called, it will act on this handle according to which state it is in. The mstate is also what tells which sockets to return for a specific `Curl_easy` when [`curl_multi_fdset()`][12] is called etc. - The libcurl source code generally use the name 'data' for the variable that + The libcurl source code generally use the name `data` for the variable that points to the `Curl_easy`. When doing multiplexed HTTP/2 transfers, each `Curl_easy` is associated with an individual stream, sharing the same connectdata struct. Multiplexing makes it even more important to keep things associated with the right thing! + ## connectdata A general idea in libcurl is to keep connections around in a connection @@ -881,16 +898,16 @@ for older and later versions as things don't change drastically that often. re-use an existing one instead of creating a new as it creates a significant performance boost. - Each 'connectdata' identifies a single physical connection to a server. If + Each `connectdata` identifies a single physical connection to a server. If the connection can't be kept alive, the connection will be closed after use and then this struct can be removed from the cache and freed. Thus, the same `Curl_easy` can be used multiple times and each time select - another connectdata struct to use for the connection. Keep this in mind, as - it is then important to consider if options or choices are based on the + another `connectdata` struct to use for the connection. Keep this in mind, + as it is then important to consider if options or choices are based on the connection or the `Curl_easy`. - Functions in libcurl will assume that connectdata->data points to the + Functions in libcurl will assume that `connectdata->data` points to the `Curl_easy` that uses this connection (for the moment). As a special complexity, some protocols supported by libcurl require a @@ -905,15 +922,16 @@ for older and later versions as things don't change drastically that often. this single struct and thus can be considered a single connection for most internal concerns. - The libcurl source code generally use the name 'conn' for the variable that + The libcurl source code generally use the name `conn` for the variable that points to the connectdata. + ## Curl_multi Internally, the easy interface is implemented as a wrapper around multi interface functions. This makes everything multi interface. - `Curl_multi` is the multi handle struct exposed as "CURLM *" in external + `Curl_multi` is the multi handle struct exposed as `CURLM *` in external APIs. This struct holds a list of `Curl_easy` structs that have been added to this @@ -940,18 +958,19 @@ for older and later versions as things don't change drastically that often. `->conn_cache` points to the connection cache. It keeps track of all connections that are kept after use. The cache has a maximum size. - `->closure_handle` is described in the 'connectdata' section. + `->closure_handle` is described in the `connectdata` section. - The libcurl source code generally use the name 'multi' for the variable that + The libcurl source code generally use the name `multi` for the variable that points to the `Curl_multi` struct. + ## Curl_handler Each unique protocol that is supported by libcurl needs to provide at least one `Curl_handler` struct. It defines what the protocol is called and what functions the main code should call to deal with protocol specific issues. - In general, there's a source file named [protocol].c in which there's a - "struct `Curl_handler` `Curl_handler_[protocol]`" declared. In url.c there's + In general, there's a source file named `[protocol].c` in which there's a + `struct Curl_handler Curl_handler_[protocol]` declared. In `url.c` there's then the main array with all individual `Curl_handler` structs pointed to from a single array which is scanned through when a URL is given to libcurl to work with. @@ -963,9 +982,9 @@ for older and later versions as things don't change drastically that often. `->setup_connection` is called to allow the protocol code to allocate protocol specific data that then gets associated with that `Curl_easy` for the rest of this transfer. It gets freed again at the end of the transfer. - It will be called before the 'connectdata' for the transfer has been + It will be called before the `connectdata` for the transfer has been selected/created. Most protocols will allocate its private - 'struct [PROTOCOL]' here and assign `Curl_easy->req.protop` to point to it. + `struct [PROTOCOL]` here and assign `Curl_easy->req.protop` to point to it. `->connect_it` allows a protocol to do some specific actions after the TCP connect is done, that can still be considered part of the connection phase. @@ -992,25 +1011,25 @@ for older and later versions as things don't change drastically that often. `->do_more` gets called during the `DO_MORE` state. The FTP protocol uses this state when setting up the second connection. - ->`proto_getsock` - ->`doing_getsock` - ->`domore_getsock` - ->`perform_getsock` + `->proto_getsock` + `->doing_getsock` + `->domore_getsock` + `->perform_getsock` Functions that return socket information. Which socket(s) to wait for which action(s) during the particular multi state. - ->disconnect is called immediately before the TCP connection is shutdown. + `->disconnect` is called immediately before the TCP connection is shutdown. - ->readwrite gets called during transfer to allow the protocol to do extra + `->readwrite` gets called during transfer to allow the protocol to do extra reads/writes - ->defport is the default report TCP or UDP port this protocol uses + `->defport` is the default report TCP or UDP port this protocol uses - ->protocol is one or more bits in the `CURLPROTO_*` set. The SSL versions + `->protocol` is one or more bits in the `CURLPROTO_*` set. The SSL versions have their "base" protocol set and then the SSL variation. Like "HTTP|HTTPS". - ->flags is a bitmask with additional information about the protocol that will + `->flags` is a bitmask with additional information about the protocol that will make it get treated differently by the generic engine: - `PROTOPT_SSL` - will make it connect and negotiate SSL @@ -1025,7 +1044,7 @@ for older and later versions as things don't change drastically that often. limit which "direction" of socket actions that the main engine will concern itself with. - - `PROTOPT_NONETWORK` - a protocol that doesn't use network (read file:) + - `PROTOPT_NONETWORK` - a protocol that doesn't use network (read `file:`) - `PROTOPT_NEEDSPWD` - this protocol needs a password and will use a default one unless one is provided @@ -1033,16 +1052,18 @@ for older and later versions as things don't change drastically that often. - `PROTOPT_NOURLQUERY` - this protocol can't handle a query part on the URL (?foo=bar) + ## conncache Is a hash table with connections for later re-use. Each `Curl_easy` has a pointer to its connection cache. Each multi handle sets up a connection cache that all added `Curl_easy`s share by default. + ## Curl_share The libcurl share API allocates a `Curl_share` struct, exposed to the - external API as "CURLSH *". + external API as `CURLSH *`. The idea is that the struct can have a set of its own versions of caches and pools and then by providing this struct in the `CURLOPT_SHARE` option, those @@ -1055,10 +1076,11 @@ for older and later versions as things don't change drastically that often. The `Curl_share` struct can currently hold cookies, DNS cache and the SSL session cache. + ## CookieInfo This is the main cookie struct. It holds all known cookies and related - information. Each `Curl_easy` has its own private CookieInfo even when + information. Each `Curl_easy` has its own private `CookieInfo` even when they are added to a multi handle. They can be made to share cookies by using the share API. diff --git a/docs/KNOWN_BUGS b/docs/KNOWN_BUGS index 67c7b16611bb4f..1aa025fd5b9459 100644 --- a/docs/KNOWN_BUGS +++ b/docs/KNOWN_BUGS @@ -12,48 +12,52 @@ check the changelog of the current development status, as one or more of these problems may have been fixed or changed somewhat since this was written! 1. HTTP - 1.1 CURLFORM_CONTENTLEN in an array - 1.2 Disabling HTTP Pipelining + 1.2 Multiple methods in a single WWW-Authenticate: header 1.3 STARTTRANSFER time is wrong for HTTP POSTs 1.4 multipart formposts file name encoding 1.5 Expect-100 meets 417 1.6 Unnecessary close when 401 received waiting for 100 1.7 Deflate error after all content was received + 1.8 DoH isn't used for all name resolves when enabled 1.9 HTTP/2 frames while in the connection pool kill reuse - 1.10 Strips trailing dot from host name 1.11 CURLOPT_SEEKFUNCTION not called with CURLFORM_STREAM 2. TLS 2.1 CURLINFO_SSL_VERIFYRESULT has limited support 2.2 DER in keychain - 2.3 GnuTLS backend skips really long certificate fields - 2.4 DarwinSSL won't import PKCS#12 client certificates without a password + 2.3 Unable to use PKCS12 certificate with Secure Transport + 2.4 Secure Transport won't import PKCS#12 client certificates without a password 2.5 Client cert handling with Issuer DN differs between backends 2.6 CURL_GLOBAL_SSL 2.7 Client cert (MTLS) issues with Schannel + 2.8 Schannel disable CURLOPT_SSL_VERIFYPEER and verify hostname + 2.9 TLS session cache doesn't work with TFO + 2.10 Store TLS context per transfer instead of per connection + 2.11 Schannel TLS 1.2 handshake bug in old Windows versions 3. Email protocols 3.1 IMAP SEARCH ALL truncated response 3.2 No disconnect command - 3.3 SMTP to multiple recipients - 3.4 POP3 expects "CRLF.CRLF" eob for some single-line responses + 3.3 POP3 expects "CRLF.CRLF" eob for some single-line responses + 3.4 AUTH PLAIN for SMTP is not working on all servers 4. Command line 4.1 -J and -O with %-encoded file names 4.2 -J with -C - fails 4.3 --retry and transfer timeouts - 4.4 --upload-file . hang if delay in STDIN - 4.5 Improve --data-urlencode space encoding + 4.4 Improve --data-urlencode space encoding 5. Build and portability issues + 5.1 OS400 port requires deprecated IBM library 5.2 curl-config --libs contains private details 5.3 curl compiled on OSX 10.13 failed to run on OSX 10.10 - 5.4 Cannot compile against a static build of OpenLDAP - 5.5 can't handle Unicode arguments in Windows + 5.4 Build with statically built dependency + 5.5 can't handle Unicode arguments in non-Unicode builds on Windows 5.6 cmake support gaps 5.7 Visual Studio project gaps 5.8 configure finding libs in wrong directory 5.9 Utilize Requires.private directives in libcurl.pc + 5.11 configure --with-gssapi with Heimdal is ignored on macOS 6. Authentication 6.1 NTLM authentication and unicode @@ -62,6 +66,8 @@ problems may have been fixed or changed somewhat since this was written! 6.4 Negotiate and Kerberos V5 need a fake user name 6.5 NTLM doesn't support password with § character 6.6 libcurl can fail to try alternatives with --proxy-any + 6.7 Don't clear digest for single realm + 6.8 RTSP authentication breaks without redirect support 7. FTP 7.1 FTP without or slow 220 response @@ -73,7 +79,7 @@ problems may have been fixed or changed somewhat since this was written! 7.7 FTP and empty path parts in the URL 7.8 Premature transfer end but healthy control channel 7.9 Passive transfer tries only one IP address - 7.10 Stick to same family over SOCKS proxy + 7.10 FTPS needs session reuse 8. TELNET 8.1 TELNET and time limitations don't work @@ -81,10 +87,9 @@ problems may have been fixed or changed somewhat since this was written! 9. SFTP and SCP 9.1 SFTP doesn't do CURLOPT_POSTQUOTE correct + 9.2 wolfssh: publickey auth doesn't work 10. SOCKS - 10.1 SOCKS proxy connections are done blocking - 10.2 SOCKS don't support timeouts 10.3 FTPS over SOCKS 10.4 active FTP over a SOCKS @@ -95,9 +100,17 @@ problems may have been fixed or changed somewhat since this was written! 11.4 HTTP test server 'connection-monitor' problems 11.5 Connection information when using TCP Fast Open 11.6 slow connect to localhost on Windows + 11.7 signal-based resolver timeouts + 11.8 DoH leaks memory after followlocation + 11.9 DoH doesn't inherit all transfer options + 11.10 Blocking socket operations in non-blocking API + 11.11 A shared connection cache is not thread-safe + 11.12 'no_proxy' string-matches IPv6 numerical addreses 12. LDAP and OpenLDAP 12.1 OpenLDAP hangs after returning results + 12.2 LDAP on Windows does authentication wrong? + 12.3 LDAP on Windows doesn't work 13. TCP/IP 13.1 --interface for ipv6 binds to unusable IP address @@ -109,22 +122,12 @@ problems may have been fixed or changed somewhat since this was written! 1. HTTP -1.1 CURLFORM_CONTENTLEN in an array +1.2 Multiple methods in a single WWW-Authenticate: header - It is not possible to pass a 64-bit value using CURLFORM_CONTENTLEN with - CURLFORM_ARRAY, when compiled on 32-bit platforms that support 64-bit - integers. This is because the underlying structure 'curl_forms' uses a dual - purpose char* for storing these values in via casting. For more information - see the now closed related issue: - https://github.com/curl/curl/issues/608 - -1.2 Disabling HTTP Pipelining - - Disabling HTTP Pipelining when there are ongoing transfers can lead to - heap corruption and crash. https://curl.haxx.se/bug/view.cgi?id=1411 - - Similarly, removing a handle when pipelining corrupts data: - https://github.com/curl/curl/issues/2101 + The HTTP responses headers WWW-Authenticate: can provide information about + multiple authentication methods as multiple headers or as several methods + within a single header. The latter way, several methods in the same physical + line, is not supported by libcurl's parser. (For no good reason.) 1.3 STARTTRANSFER time is wrong for HTTP POSTs @@ -154,7 +157,7 @@ problems may have been fixed or changed somewhat since this was written! 1.6 Unnecessary close when 401 received waiting for 100 libcurl closes the connection if an HTTP 401 reply is received while it is - waiting for the the 100-continue response. + waiting for the 100-continue response. https://curl.haxx.se/mail/lib-2008-08/0462.html 1.7 Deflate error after all content was received @@ -166,6 +169,16 @@ problems may have been fixed or changed somewhat since this was written! See https://github.com/curl/curl/issues/2719 +1.8 DoH isn't used for all name resolves when enabled + + Even if DoH is specified to be used, there are some name resolves that are + done without it. This should be fixed. When the internal function + `Curl_resolver_wait_resolv()` is called, it doesn't use DoH to complete the + resolve as it otherwise should. + + See https://github.com/curl/curl/pull/3857 and + https://github.com/curl/curl/pull/3850 + 1.9 HTTP/2 frames while in the connection pool kill reuse If the server sends HTTP/2 frames (like for example an HTTP/2 PING frame) to @@ -176,42 +189,6 @@ problems may have been fixed or changed somewhat since this was written! This is *best* fixed by adding monitoring to connections while they are kept in the pool so that pings can be responded to appropriately. -1.10 Strips trailing dot from host name - - When given a URL with a trailing dot for the host name part: - "https://example.com./", libcurl will strip off the dot and use the name - without a dot internally and send it dot-less in HTTP Host: headers and in - the TLS SNI field. For the purpose of resolving the name to an address - the hostname is used as is without any change. - - The HTTP part violates RFC 7230 section 5.4 but the SNI part is accordance - with RFC 6066 section 3. - - URLs using these trailing dots are very rare in the wild and we have not seen - or gotten any real-world problems with such URLs reported. The popular - browsers seem to have stayed with not stripping the dot for both uses (thus - they violate RFC 6066 instead of RFC 7230). - - Daniel took the discussion to the HTTPbis mailing list in March 2016: - https://lists.w3.org/Archives/Public/ietf-http-wg/2016JanMar/0430.html but - there was not major rush or interest to fix this. The impression I get is - that most HTTP people rather not rock the boat now and instead prioritize web - compatibility rather than to strictly adhere to these RFCs. - - Our current approach allows a knowing client to send a custom HTTP header - with the dot added. - - In a few cases there is a difference in name resolving to IP addresses with - a trailing dot, but it can be noted that many HTTP servers will not happily - accept the trailing dot there unless that has been specifically configured - to be a fine virtual host. - - If URLs with trailing dots for host names become more popular or even just - used more than for just plain fun experiments, I'm sure we will have reason - to go back and reconsider. - - See https://github.com/curl/curl/issues/716 for the discussion. - 1.11 CURLOPT_SEEKFUNCTION not called with CURLFORM_STREAM I'm using libcurl to POST form data using a FILE* with the CURLFORM_STREAM @@ -228,21 +205,19 @@ problems may have been fixed or changed somewhat since this was written! 2.1 CURLINFO_SSL_VERIFYRESULT has limited support - CURLINFO_SSL_VERIFYRESULT is only implemented for the OpenSSL and NSS - backends, so relying on this information in a generic app is flaky. + CURLINFO_SSL_VERIFYRESULT is only implemented for the OpenSSL, NSS and + GnuTLS backends, so relying on this information in a generic app is flaky. 2.2 DER in keychain Curl doesn't recognize certificates in DER format in keychain, but it works with PEM. https://curl.haxx.se/bug/view.cgi?id=1065 -2.3 GnuTLS backend skips really long certificate fields +2.3 Unable to use PKCS12 certificate with Secure Transport - libcurl calls gnutls_x509_crt_get_dn() with a fixed buffer size and if the - field is too long in the cert, it'll just return an error and the field will - be displayed blank. + See https://github.com/curl/curl/issues/5403 -2.4 DarwinSSL won't import PKCS#12 client certificates without a password +2.4 Secure Transport won't import PKCS#12 client certificates without a password libcurl calls SecPKCS12Import with the PKCS#12 client certificate, but that function rejects certificates that do not have a password. @@ -288,6 +263,33 @@ problems may have been fixed or changed somewhat since this was written! See https://github.com/curl/curl/issues/3145 +2.8 Schannel disable CURLOPT_SSL_VERIFYPEER and verify hostname + + This seems to be a limitation in the underlying Schannel API. + + https://github.com/curl/curl/issues/3284 + +2.9 TLS session cache doesn't work with TFO + + See https://github.com/curl/curl/issues/4301 + +2.10 Store TLS context per transfer instead of per connection + + The GnuTLS `backend->cred` and the OpenSSL `backend->ctx` data and their + proxy versions (and possibly other TLS backends), could be better moved to be + stored in the Curl_easy handle instead of in per connection so that a single + transfer that makes multiple connections can reuse the context and reduce + memory consumption. + + https://github.com/curl/curl/issues/5102 + +2.11 Schannel TLS 1.2 handshake bug in old Windows versions + + In old versions of Windows such as 7 and 8.1 the Schannel TLS 1.2 handshake + implementation likely has a bug that can rarely cause the key exchange to + fail, resulting in error SEC_E_BUFFER_TOO_SMALL or SEC_E_MESSAGE_ALTERED. + + https://github.com/curl/curl/issues/5488 3. Email protocols @@ -303,20 +305,18 @@ problems may have been fixed or changed somewhat since this was written! The disconnect commands (LOGOUT and QUIT) may not be sent by IMAP, POP3 and SMTP if a failure occurs during the authentication phase of a connection. -3.3 SMTP to multiple recipients - - When sending data to multiple recipients, curl will abort and return failure - if one of the recipients indicate failure (on the "RCPT TO" - command). Ordinary mail programs would proceed and still send to the ones - that can receive data. This is subject for change in the future. - https://curl.haxx.se/bug/view.cgi?id=1116 - -3.4 POP3 expects "CRLF.CRLF" eob for some single-line responses +3.3 POP3 expects "CRLF.CRLF" eob for some single-line responses You have to tell libcurl not to expect a body, when dealing with one line response commands. Please see the POP3 examples and test cases which show this for the NOOP and DELE commands. https://curl.haxx.se/bug/?i=740 +3.4 AUTH PLAIN for SMTP is not working on all servers + + Specifying "--login-options AUTH=PLAIN" on the command line doesn't seem to + work correctly. + + See https://github.com/curl/curl/issues/4080 4. Command line @@ -354,15 +354,7 @@ problems may have been fixed or changed somewhat since this was written! https://curl.haxx.se/mail/lib-2008-01/0080.html and Mandriva bug report https://qa.mandriva.com/show_bug.cgi?id=22565 -4.4 --upload-file . hangs if delay in STDIN - - "(echo start; sleep 1; echo end) | curl --upload-file . http://mywebsite -vv" - - ... causes a hang when it shouldn't. - - See https://github.com/curl/curl/issues/2051 - -4.5 Improve --data-urlencode space encoding +4.4 Improve --data-urlencode space encoding ASCII space characters in --data-urlencode are currently encoded as %20 rather than +, which RFC 1866 says should be used. @@ -371,6 +363,14 @@ problems may have been fixed or changed somewhat since this was written! 5. Build and portability issues +5.1 OS400 port requires deprecated IBM library + + curl for OS400 requires QADRT to build, which provides ASCII wrappers for + libc/POSIX functions in the ILE, but IBM no longer supports or even offers + this library to download. + + See https://github.com/curl/curl/issues/5176 + 5.2 curl-config --libs contains private details "curl-config --libs" will include details set in LDFLAGS when configure is @@ -381,20 +381,40 @@ problems may have been fixed or changed somewhat since this was written! See https://github.com/curl/curl/issues/2905 -5.4 Cannot compile against a static build of OpenLDAP +5.4 Build with statically built dependency - See https://github.com/curl/curl/issues/2367 + The build scripts in curl (autotools, cmake and others) are primarily done to + work with shared/dynamic third party dependencies. When linking with shared + libraries, the dependency "chain" is handled automatically by the library + loader - on all modern systems. -5.5 can't handle Unicode arguments in Windows + If you instead link with a static library, we need to provide all the + dependency libraries already at the link command line. + + Figuring out all the dependency libraries for a given library is hard, as it + might also involve figuring out the dependencies of the dependencies and they + may vary between platforms and even change between versions. + + When using static dependencies, the build scripts will mostly assume that + you, the user, will provide all the necessary additional dependency libraries + as additional arguments in the build. With configure, by setting LIBS/LDFLAGS + on the command line. + + We welcome help to improve curl's ability to link with static libraries, but + it is likely a task that we can never fully support. + +5.5 can't handle Unicode arguments in non-Unicode builds on Windows If a URL or filename can't be encoded using the user's current codepage then it can only be encoded properly in the Unicode character set. Windows uses UTF-16 encoding for Unicode and stores it in wide characters, however curl - and libcurl are not equipped for that at the moment. And, except for Cygwin, - Windows can't use UTF-8 as a locale. + and libcurl are not equipped for that at the moment except when built with + _UNICODE and UNICODE defined. And, except for Cygwin, Windows can't use UTF-8 + as a locale. https://curl.haxx.se/bug/?i=345 https://curl.haxx.se/bug/?i=731 + https://curl.haxx.se/bug/?i=3747 5.6 cmake support gaps @@ -447,17 +467,23 @@ problems may have been fixed or changed somewhat since this was written! https://github.com/curl/curl/issues/864 +5.11 configure --with-gssapi with Heimdal is ignored on macOS + + ... unless you also pass --with-gssapi-libs + + https://github.com/curl/curl/issues/3841 + 6. Authentication 6.1 NTLM authentication and unicode NTLM authentication involving unicode user name or password only works - properly if built with UNICODE defined together with the WinSSL/Schannel + properly if built with UNICODE defined together with the Schannel backend. The original problem was mentioned in: https://curl.haxx.se/mail/lib-2009-10/0024.html https://curl.haxx.se/bug/view.cgi?id=896 - The WinSSL/Schannel version verified to work as mentioned in + The Schannel version verified to work as mentioned in https://curl.haxx.se/mail/lib-2012-07/0073.html 6.2 MIT Kerberos for Windows build @@ -499,6 +525,19 @@ problems may have been fixed or changed somewhat since this was written! https://github.com/curl/curl/issues/876 +6.7 Don't clear digest for single realm + + https://github.com/curl/curl/issues/3267 + +6.8 RTSP authentication breaks without redirect support + + RTSP authentication broke in 7.66.0. A work-around is to enable RTSP in + CURLOPT_REDIR_PROTOCOLS. Authentication should however not be considered an + actual redirect so a "proper" fix needs to be different and not require users + to allow redirects to RTSP to work. + + See https://github.com/curl/curl/pull/4750 + 7. FTP 7.1 FTP without or slow 220 response @@ -588,15 +627,13 @@ problems may have been fixed or changed somewhat since this was written! See https://github.com/curl/curl/issues/1508 -7.10 Stick to same family over SOCKS proxy +7.10 FTPS needs session reuse - When asked to do FTP over a SOCKS proxy, it might connect to the proxy (and - then subsequently to the remote server) using for example IPv4. When doing - the second connection, curl should make sure that the second connection is - using the same IP protocol version as the first connection did and not try - others, since the remote server will only accept the same. + When the control connection is reused for a subsequent transfer, some FTPS + servers complain about "missing session reuse" for the data channel for the + second transfer. - See https://curl.haxx.se/mail/archive-2018-07/0000.html + https://github.com/curl/curl/issues/4654 8. TELNET @@ -622,23 +659,15 @@ problems may have been fixed or changed somewhat since this was written! report but it cannot be accepted as-is. See https://curl.haxx.se/bug/view.cgi?id=748 +9.2 wolfssh: publickey auth doesn't work -10. SOCKS - -10.1 SOCKS proxy connections are done blocking + When building curl to use the wolfSSH backend for SFTP, the publickey + authentication doesn't work. This is simply functionality not written for curl + yet, the necessary API for make this work is provided by wolfSSH. - Both SOCKS5 and SOCKS4 proxy connections are done blocking, which is very bad - when used with the multi interface. + See https://github.com/curl/curl/issues/4820 -10.2 SOCKS don't support timeouts - - The SOCKS4 connection codes don't properly acknowledge (connect) timeouts. - According to bug #1556528, even the SOCKS5 connect code does not do it right: - https://curl.haxx.se/bug/view.cgi?id=604 - - When connecting to a SOCK proxy, the (connect) timeout is not properly - acknowledged after the actual TCP connect (during the SOCKS "negotiate" - phase). +10. SOCKS 10.3 FTPS over SOCKS @@ -690,7 +719,8 @@ problems may have been fixed or changed somewhat since this was written! CURLINFO_LOCAL_PORT (and possibly a few other) fails when TCP Fast Open is enabled. - See https://github.com/curl/curl/issues/1332 + See https://github.com/curl/curl/issues/1332 and + https://github.com/curl/curl/issues/4296 11.6 slow connect to localhost on Windows @@ -706,6 +736,45 @@ problems may have been fixed or changed somewhat since this was written! https://github.com/curl/curl/issues/2281 +11.7 signal-based resolver timeouts + + libcurl built without an asynchronous resolver library uses alarm() to time + out DNS lookups. When a timeout occurs, this causes libcurl to jump from the + signal handler back into the library with a sigsetjmp, which effectively + causes libcurl to continue running within the signal handler. This is + non-portable and could cause problems on some platforms. A discussion on the + problem is available at https://curl.haxx.se/mail/lib-2008-09/0197.html + + Also, alarm() provides timeout resolution only to the nearest second. alarm + ought to be replaced by setitimer on systems that support it. + +11.8 DoH leaks memory after followlocation + + https://github.com/curl/curl/issues/4592 + +11.9 DoH doesn't inherit all transfer options + + https://github.com/curl/curl/issues/4578 + +11.10 Blocking socket operations in non-blocking API + + The list of blocking socket operations is in TODO section "More non-blocking". + +11.11 A shared connection cache is not thread-safe + + The share interface offers CURL_LOCK_DATA_CONNECT to have multiple easy + handle share a connection cache, but due to how connections are used they are + still not thread-safe when used shared. + + See https://github.com/curl/curl/issues/4915 and lib1541.c + +11.12 'no_proxy' string-matches IPv6 numerical addreses + + This has the downside that "::1" for example doesn't match "::0:1" even + though they are in fact the same address. + + See https://github.com/curl/curl/issues/5745 + 12. LDAP and OpenLDAP 12.1 OpenLDAP hangs after returning results @@ -727,6 +796,16 @@ problems may have been fixed or changed somewhat since this was written! See https://github.com/curl/curl/issues/622 and https://curl.haxx.se/mail/lib-2016-01/0101.html +12.2 LDAP on Windows does authentication wrong? + + https://github.com/curl/curl/issues/3116 + +12.3 LDAP on Windows doesn't work + + A simple curl command line getting "ldap://ldap.forumsys.com" returns an + error that says "no memory" ! + + https://github.com/curl/curl/issues/4261 13. TCP/IP diff --git a/docs/LICENSE-MIXING.md b/docs/LICENSE-MIXING.md deleted file mode 100644 index e4f6759e40d341..00000000000000 --- a/docs/LICENSE-MIXING.md +++ /dev/null @@ -1,123 +0,0 @@ -License Mixing -============== - -libcurl can be built to use a fair amount of various third party libraries, -libraries that are written and provided by other parties that are distributed -using their own licenses. Even libcurl itself contains code that may cause -problems to some. This document attempts to describe what licenses libcurl and -the other libraries use and what possible dilemmas linking and mixing them all -can lead to for end users. - -I am not a lawyer and this is not legal advice! - -One common dilemma is that [GPL](https://www.gnu.org/licenses/gpl.html) -licensed code is not allowed to be linked with code licensed under the -[Original BSD license](https://spdx.org/licenses/BSD-4-Clause.html) (with the -announcement clause). You may still build your own copies that use them all, -but distributing them as binaries would be to violate the GPL license - unless -you accompany your license with an -[exception](https://www.gnu.org/licenses/gpl-faq.html#GPLIncompatibleLibs). This -particular problem was addressed when the [Modified BSD -license](https://opensource.org/licenses/BSD-3-Clause) was created, which does -not have the announcement clause that collides with GPL. - -## libcurl - - Uses an [MIT style license](https://curl.haxx.se/docs/copyright.html) that is - very liberal. - -## OpenSSL - - (May be used for SSL/TLS support) Uses an Original BSD-style license with an - announcement clause that makes it "incompatible" with GPL. You are not - allowed to ship binaries that link with OpenSSL that includes GPL code - (unless that specific GPL code includes an exception for OpenSSL - a habit - that is growing more and more common). If OpenSSL's licensing is a problem - for you, consider using another TLS library. - -## GnuTLS - - (May be used for SSL/TLS support) Uses the - [LGPL](https://www.gnu.org/licenses/lgpl.html) license. If this is a problem - for you, consider using another TLS library. Also note that GnuTLS itself - depends on and uses other libs (libgcrypt and libgpg-error) and they too are - LGPL- or GPL-licensed. - -## WolfSSL - - (May be used for SSL/TLS support) Uses the GPL license or a proprietary - license. If this is a problem for you, consider using another TLS library. - -## NSS - - (May be used for SSL/TLS support) Is covered by the - [MPL](https://www.mozilla.org/MPL/) license, the GPL license and the LGPL - license. You may choose to license the code under MPL terms, GPL terms, or - LGPL terms. These licenses grant you different permissions and impose - different obligations. You should select the license that best meets your - needs. - -## mbedTLS - - (May be used for SSL/TLS support) Uses the [Apache 2.0 - license](https://opensource.org/licenses/Apache-2.0) or the GPL license. - You may choose to license the code under Apache 2.0 terms or GPL terms. - These licenses grant you different permissions and impose different - obligations. You should select the license that best meets your needs. - -## BoringSSL - - (May be used for SSL/TLS support) As an OpenSSL fork, it has the same - license as that. - -## libressl - - (May be used for SSL/TLS support) As an OpenSSL fork, it has the same - license as that. - -## c-ares - - (Used for asynchronous name resolves) Uses an MIT license that is very - liberal and imposes no restrictions on any other library or part you may link - with. - -## zlib - - (Used for compressed Transfer-Encoding support) Uses an MIT-style license - that shouldn't collide with any other library. - -## MIT Kerberos - - (May be used for GSS support) MIT licensed, that shouldn't collide with any - other parts. - -## Heimdal - - (May be used for GSS support) Heimdal is Original BSD licensed with the - announcement clause. - -## GNU GSS - - (May be used for GSS support) GNU GSS is GPL licensed. Note that you may not - distribute binary curl packages that uses this if you build curl to also link - and use any Original BSD licensed libraries! - -## libidn - - (Used for IDNA support) Uses the GNU Lesser General Public License [3]. LGPL - is a variation of GPL with slightly less aggressive "copyleft". This license - requires more requirements to be met when distributing binaries, see the - license for details. Also note that if you distribute a binary that includes - this library, you must also include the full LGPL license text. Please - properly point out what parts of the distributed package that the license - addresses. - -## OpenLDAP - - (Used for LDAP support) Uses a Modified BSD-style license. Since libcurl uses - OpenLDAP as a shared library only, I have not heard of anyone that ships - OpenLDAP linked with libcurl in an app. - -## libssh2 - - (Used for scp and sftp support) libssh2 uses a Modified BSD-style license. diff --git a/docs/MAIL-ETIQUETTE b/docs/MAIL-ETIQUETTE index 07660a001d433b..9d210a655f2fd3 100644 --- a/docs/MAIL-ETIQUETTE +++ b/docs/MAIL-ETIQUETTE @@ -170,7 +170,7 @@ MAIL ETIQUETTE send your email to. Your email as sent to a curl mailing list will end up in mail archives, on - the curl web site and elsewhere, for others to see and read. Today and in + the curl website and elsewhere, for others to see and read. Today and in the future. In addition to the archives, the mail is sent out to thousands of individuals. There is no way to undo a sent email. diff --git a/docs/MANUAL b/docs/MANUAL deleted file mode 100644 index 59b97427c7be9a..00000000000000 --- a/docs/MANUAL +++ /dev/null @@ -1,1058 +0,0 @@ -LATEST VERSION - - You always find news about what's going on as well as the latest versions - from the curl web pages, located at: - - https://curl.haxx.se - -SIMPLE USAGE - - Get the main page from Netscape's web-server: - - curl http://www.netscape.com/ - - Get the README file the user's home directory at funet's ftp-server: - - curl ftp://ftp.funet.fi/README - - Get a web page from a server using port 8000: - - curl http://www.weirdserver.com:8000/ - - Get a directory listing of an FTP site: - - curl ftp://cool.haxx.se/ - - Get the definition of curl from a dictionary: - - curl dict://dict.org/m:curl - - Fetch two documents at once: - - curl ftp://cool.haxx.se/ http://www.weirdserver.com:8000/ - - Get a file off an FTPS server: - - curl ftps://files.are.secure.com/secrets.txt - - or use the more appropriate FTPS way to get the same file: - - curl --ftp-ssl ftp://files.are.secure.com/secrets.txt - - Get a file from an SSH server using SFTP: - - curl -u username sftp://example.com/etc/issue - - Get a file from an SSH server using SCP using a private key - (not password-protected) to authenticate: - - curl -u username: --key ~/.ssh/id_rsa \ - scp://example.com/~/file.txt - - Get a file from an SSH server using SCP using a private key - (password-protected) to authenticate: - - curl -u username: --key ~/.ssh/id_rsa --pass private_key_password \ - scp://example.com/~/file.txt - - Get the main page from an IPv6 web server: - - curl "http://[2001:1890:1112:1::20]/" - - Get a file from an SMB server: - - curl -u "domain\username:passwd" smb://server.example.com/share/file.txt - -DOWNLOAD TO A FILE - - Get a web page and store in a local file with a specific name: - - curl -o thatpage.html http://www.netscape.com/ - - Get a web page and store in a local file, make the local file get the name - of the remote document (if no file name part is specified in the URL, this - will fail): - - curl -O http://www.netscape.com/index.html - - Fetch two files and store them with their remote names: - - curl -O www.haxx.se/index.html -O curl.haxx.se/download.html - -USING PASSWORDS - - FTP - - To ftp files using name+passwd, include them in the URL like: - - curl ftp://name:passwd@machine.domain:port/full/path/to/file - - or specify them with the -u flag like - - curl -u name:passwd ftp://machine.domain:port/full/path/to/file - - FTPS - - It is just like for FTP, but you may also want to specify and use - SSL-specific options for certificates etc. - - Note that using FTPS:// as prefix is the "implicit" way as described in the - standards while the recommended "explicit" way is done by using FTP:// and - the --ftp-ssl option. - - SFTP / SCP - - This is similar to FTP, but you can use the --key option to specify a - private key to use instead of a password. Note that the private key may - itself be protected by a password that is unrelated to the login password - of the remote system; this password is specified using the --pass option. - Typically, curl will automatically extract the public key from the private - key file, but in cases where curl does not have the proper library support, - a matching public key file must be specified using the --pubkey option. - - HTTP - - Curl also supports user and password in HTTP URLs, thus you can pick a file - like: - - curl http://name:passwd@machine.domain/full/path/to/file - - or specify user and password separately like in - - curl -u name:passwd http://machine.domain/full/path/to/file - - HTTP offers many different methods of authentication and curl supports - several: Basic, Digest, NTLM and Negotiate (SPNEGO). Without telling which - method to use, curl defaults to Basic. You can also ask curl to pick the - most secure ones out of the ones that the server accepts for the given URL, - by using --anyauth. - - NOTE! According to the URL specification, HTTP URLs can not contain a user - and password, so that style will not work when using curl via a proxy, even - though curl allows it at other times. When using a proxy, you _must_ use - the -u style for user and password. - - HTTPS - - Probably most commonly used with private certificates, as explained below. - -PROXY - - curl supports both HTTP and SOCKS proxy servers, with optional authentication. - It does not have special support for FTP proxy servers since there are no - standards for those, but it can still be made to work with many of them. You - can also use both HTTP and SOCKS proxies to transfer files to and from FTP - servers. - - Get an ftp file using an HTTP proxy named my-proxy that uses port 888: - - curl -x my-proxy:888 ftp://ftp.leachsite.com/README - - Get a file from an HTTP server that requires user and password, using the - same proxy as above: - - curl -u user:passwd -x my-proxy:888 http://www.get.this/ - - Some proxies require special authentication. Specify by using -U as above: - - curl -U user:passwd -x my-proxy:888 http://www.get.this/ - - A comma-separated list of hosts and domains which do not use the proxy can - be specified as: - - curl --noproxy localhost,get.this -x my-proxy:888 http://www.get.this/ - - If the proxy is specified with --proxy1.0 instead of --proxy or -x, then - curl will use HTTP/1.0 instead of HTTP/1.1 for any CONNECT attempts. - - curl also supports SOCKS4 and SOCKS5 proxies with --socks4 and --socks5. - - See also the environment variables Curl supports that offer further proxy - control. - - Most FTP proxy servers are set up to appear as a normal FTP server from the - client's perspective, with special commands to select the remote FTP server. - curl supports the -u, -Q and --ftp-account options that can be used to - set up transfers through many FTP proxies. For example, a file can be - uploaded to a remote FTP server using a Blue Coat FTP proxy with the - options: - - curl -u "Remote-FTP-Username@remote.ftp.server Proxy-Username:Remote-Pass" \ - --ftp-account Proxy-Password --upload-file local-file \ - ftp://my-ftp.proxy.server:21/remote/upload/path/ - - See the manual for your FTP proxy to determine the form it expects to set up - transfers, and curl's -v option to see exactly what curl is sending. - -RANGES - - HTTP 1.1 introduced byte-ranges. Using this, a client can request - to get only one or more subparts of a specified document. Curl supports - this with the -r flag. - - Get the first 100 bytes of a document: - - curl -r 0-99 http://www.get.this/ - - Get the last 500 bytes of a document: - - curl -r -500 http://www.get.this/ - - Curl also supports simple ranges for FTP files as well. Then you can only - specify start and stop position. - - Get the first 100 bytes of a document using FTP: - - curl -r 0-99 ftp://www.get.this/README - -UPLOADING - - FTP / FTPS / SFTP / SCP - - Upload all data on stdin to a specified server: - - curl -T - ftp://ftp.upload.com/myfile - - Upload data from a specified file, login with user and password: - - curl -T uploadfile -u user:passwd ftp://ftp.upload.com/myfile - - Upload a local file to the remote site, and use the local file name at the remote - site too: - - curl -T uploadfile -u user:passwd ftp://ftp.upload.com/ - - Upload a local file to get appended to the remote file: - - curl -T localfile -a ftp://ftp.upload.com/remotefile - - Curl also supports ftp upload through a proxy, but only if the proxy is - configured to allow that kind of tunneling. If it does, you can run curl in - a fashion similar to: - - curl --proxytunnel -x proxy:port -T localfile ftp.upload.com - -SMB / SMBS - - curl -T file.txt -u "domain\username:passwd" \ - smb://server.example.com/share/ - - HTTP - - Upload all data on stdin to a specified HTTP site: - - curl -T - http://www.upload.com/myfile - - Note that the HTTP server must have been configured to accept PUT before - this can be done successfully. - - For other ways to do HTTP data upload, see the POST section below. - -VERBOSE / DEBUG - - If curl fails where it isn't supposed to, if the servers don't let you in, - if you can't understand the responses: use the -v flag to get verbose - fetching. Curl will output lots of info and what it sends and receives in - order to let the user see all client-server interaction (but it won't show - you the actual data). - - curl -v ftp://ftp.upload.com/ - - To get even more details and information on what curl does, try using the - --trace or --trace-ascii options with a given file name to log to, like - this: - - curl --trace trace.txt www.haxx.se - - -DETAILED INFORMATION - - Different protocols provide different ways of getting detailed information - about specific files/documents. To get curl to show detailed information - about a single file, you should use -I/--head option. It displays all - available info on a single file for HTTP and FTP. The HTTP information is a - lot more extensive. - - For HTTP, you can get the header information (the same as -I would show) - shown before the data by using -i/--include. Curl understands the - -D/--dump-header option when getting files from both FTP and HTTP, and it - will then store the headers in the specified file. - - Store the HTTP headers in a separate file (headers.txt in the example): - - curl --dump-header headers.txt curl.haxx.se - - Note that headers stored in a separate file can be very useful at a later - time if you want curl to use cookies sent by the server. More about that in - the cookies section. - -POST (HTTP) - - It's easy to post data using curl. This is done using the -d - option. The post data must be urlencoded. - - Post a simple "name" and "phone" guestbook. - - curl -d "name=Rafael%20Sagula&phone=3320780" \ - http://www.where.com/guest.cgi - - How to post a form with curl, lesson #1: - - Dig out all the tags in the form that you want to fill in. - - If there's a "normal" post, you use -d to post. -d takes a full "post - string", which is in the format - - =&=&... - - The 'variable' names are the names set with "name=" in the tags, and - the data is the contents you want to fill in for the inputs. The data *must* - be properly URL encoded. That means you replace space with + and that you - replace weird letters with %XX where XX is the hexadecimal representation of - the letter's ASCII code. - - Example: - - (page located at http://www.formpost.com/getthis/ - -
- - - - -
- - We want to enter user 'foobar' with password '12345'. - - To post to this, you enter a curl command line like: - - curl -d "user=foobar&pass=12345&id=blablabla&ding=submit" \ - http://www.formpost.com/getthis/post.cgi - - - While -d uses the application/x-www-form-urlencoded mime-type, generally - understood by CGI's and similar, curl also supports the more capable - multipart/form-data type. This latter type supports things like file upload. - - -F accepts parameters like -F "name=contents". If you want the contents to - be read from a file, use <@filename> as contents. When specifying a file, - you can also specify the file content type by appending ';type=' - to the file name. You can also post the contents of several files in one - field. For example, the field name 'coolfiles' is used to send three files, - with different content types using the following syntax: - - curl -F "coolfiles=@fil1.gif;type=image/gif,fil2.txt,fil3.html" \ - http://www.post.com/postit.cgi - - If the content-type is not specified, curl will try to guess from the file - extension (it only knows a few), or use the previously specified type (from - an earlier file if several files are specified in a list) or else it will - use the default type 'application/octet-stream'. - - Emulate a fill-in form with -F. Let's say you fill in three fields in a - form. One field is a file name which to post, one field is your name and one - field is a file description. We want to post the file we have written named - "cooltext.txt". To let curl do the posting of this data instead of your - favourite browser, you have to read the HTML source of the form page and - find the names of the input fields. In our example, the input field names - are 'file', 'yourname' and 'filedescription'. - - curl -F "file=@cooltext.txt" -F "yourname=Daniel" \ - -F "filedescription=Cool text file with cool text inside" \ - http://www.post.com/postit.cgi - - To send two files in one post you can do it in two ways: - - 1. Send multiple files in a single "field" with a single field name: - - curl -F "pictures=@dog.gif,cat.gif" - - 2. Send two fields with two field names: - - curl -F "docpicture=@dog.gif" -F "catpicture=@cat.gif" - - To send a field value literally without interpreting a leading '@' - or '<', or an embedded ';type=', use --form-string instead of - -F. This is recommended when the value is obtained from a user or - some other unpredictable source. Under these circumstances, using - -F instead of --form-string would allow a user to trick curl into - uploading a file. - -REFERRER - - An HTTP request has the option to include information about which address - referred it to the actual page. Curl allows you to specify the - referrer to be used on the command line. It is especially useful to - fool or trick stupid servers or CGI scripts that rely on that information - being available or contain certain data. - - curl -e www.coolsite.com http://www.showme.com/ - - NOTE: The Referer: [sic] field is defined in the HTTP spec to be a full URL. - -USER AGENT - - An HTTP request has the option to include information about the browser - that generated the request. Curl allows it to be specified on the command - line. It is especially useful to fool or trick stupid servers or CGI - scripts that only accept certain browsers. - - Example: - - curl -A 'Mozilla/3.0 (Win95; I)' http://www.nationsbank.com/ - - Other common strings: - 'Mozilla/3.0 (Win95; I)' Netscape Version 3 for Windows 95 - 'Mozilla/3.04 (Win95; U)' Netscape Version 3 for Windows 95 - 'Mozilla/2.02 (OS/2; U)' Netscape Version 2 for OS/2 - 'Mozilla/4.04 [en] (X11; U; AIX 4.2; Nav)' NS for AIX - 'Mozilla/4.05 [en] (X11; U; Linux 2.0.32 i586)' NS for Linux - - Note that Internet Explorer tries hard to be compatible in every way: - 'Mozilla/4.0 (compatible; MSIE 4.01; Windows 95)' MSIE for W95 - - Mozilla is not the only possible User-Agent name: - 'Konqueror/1.0' KDE File Manager desktop client - 'Lynx/2.7.1 libwww-FM/2.14' Lynx command line browser - -COOKIES - - Cookies are generally used by web servers to keep state information at the - client's side. The server sets cookies by sending a response line in the - headers that looks like 'Set-Cookie: ' where the data part then - typically contains a set of NAME=VALUE pairs (separated by semicolons ';' - like "NAME1=VALUE1; NAME2=VALUE2;"). The server can also specify for what - path the "cookie" should be used for (by specifying "path=value"), when the - cookie should expire ("expire=DATE"), for what domain to use it - ("domain=NAME") and if it should be used on secure connections only - ("secure"). - - If you've received a page from a server that contains a header like: - Set-Cookie: sessionid=boo123; path="/foo"; - - it means the server wants that first pair passed on when we get anything in - a path beginning with "/foo". - - Example, get a page that wants my name passed in a cookie: - - curl -b "name=Daniel" www.sillypage.com - - Curl also has the ability to use previously received cookies in following - sessions. If you get cookies from a server and store them in a file in a - manner similar to: - - curl --dump-header headers www.example.com - - ... you can then in a second connect to that (or another) site, use the - cookies from the 'headers' file like: - - curl -b headers www.example.com - - While saving headers to a file is a working way to store cookies, it is - however error-prone and not the preferred way to do this. Instead, make curl - save the incoming cookies using the well-known netscape cookie format like - this: - - curl -c cookies.txt www.example.com - - Note that by specifying -b you enable the "cookie awareness" and with -L - you can make curl follow a location: (which often is used in combination - with cookies). So that if a site sends cookies and a location, you can - use a non-existing file to trigger the cookie awareness like: - - curl -L -b empty.txt www.example.com - - The file to read cookies from must be formatted using plain HTTP headers OR - as netscape's cookie file. Curl will determine what kind it is based on the - file contents. In the above command, curl will parse the header and store - the cookies received from www.example.com. curl will send to the server the - stored cookies which match the request as it follows the location. The - file "empty.txt" may be a nonexistent file. - - To read and write cookies from a netscape cookie file, you can set both -b - and -c to use the same file: - - curl -b cookies.txt -c cookies.txt www.example.com - -PROGRESS METER - - The progress meter exists to show a user that something actually is - happening. The different fields in the output have the following meaning: - - % Total % Received % Xferd Average Speed Time Curr. - Dload Upload Total Current Left Speed - 0 151M 0 38608 0 0 9406 0 4:41:43 0:00:04 4:41:39 9287 - - From left-to-right: - % - percentage completed of the whole transfer - Total - total size of the whole expected transfer - % - percentage completed of the download - Received - currently downloaded amount of bytes - % - percentage completed of the upload - Xferd - currently uploaded amount of bytes - Average Speed - Dload - the average transfer speed of the download - Average Speed - Upload - the average transfer speed of the upload - Time Total - expected time to complete the operation - Time Current - time passed since the invoke - Time Left - expected time left to completion - Curr.Speed - the average transfer speed the last 5 seconds (the first - 5 seconds of a transfer is based on less time of course.) - - The -# option will display a totally different progress bar that doesn't - need much explanation! - -SPEED LIMIT - - Curl allows the user to set the transfer speed conditions that must be met - to let the transfer keep going. By using the switch -y and -Y you - can make curl abort transfers if the transfer speed is below the specified - lowest limit for a specified time. - - To have curl abort the download if the speed is slower than 3000 bytes per - second for 1 minute, run: - - curl -Y 3000 -y 60 www.far-away-site.com - - This can very well be used in combination with the overall time limit, so - that the above operation must be completed in whole within 30 minutes: - - curl -m 1800 -Y 3000 -y 60 www.far-away-site.com - - Forcing curl not to transfer data faster than a given rate is also possible, - which might be useful if you're using a limited bandwidth connection and you - don't want your transfer to use all of it (sometimes referred to as - "bandwidth throttle"). - - Make curl transfer data no faster than 10 kilobytes per second: - - curl --limit-rate 10K www.far-away-site.com - - or - - curl --limit-rate 10240 www.far-away-site.com - - Or prevent curl from uploading data faster than 1 megabyte per second: - - curl -T upload --limit-rate 1M ftp://uploadshereplease.com - - When using the --limit-rate option, the transfer rate is regulated on a - per-second basis, which will cause the total transfer speed to become lower - than the given number. Sometimes of course substantially lower, if your - transfer stalls during periods. - -CONFIG FILE - - Curl automatically tries to read the .curlrc file (or _curlrc file on win32 - systems) from the user's home dir on startup. - - The config file could be made up with normal command line switches, but you - can also specify the long options without the dashes to make it more - readable. You can separate the options and the parameter with spaces, or - with = or :. Comments can be used within the file. If the first letter on a - line is a '#'-symbol the rest of the line is treated as a comment. - - If you want the parameter to contain spaces, you must enclose the entire - parameter within double quotes ("). Within those quotes, you specify a - quote as \". - - NOTE: You must specify options and their arguments on the same line. - - Example, set default time out and proxy in a config file: - - # We want a 30 minute timeout: - -m 1800 - # ... and we use a proxy for all accesses: - proxy = proxy.our.domain.com:8080 - - White spaces ARE significant at the end of lines, but all white spaces - leading up to the first characters of each line are ignored. - - Prevent curl from reading the default file by using -q as the first command - line parameter, like: - - curl -q www.thatsite.com - - Force curl to get and display a local help page in case it is invoked - without URL by making a config file similar to: - - # default url to get - url = "http://help.with.curl.com/curlhelp.html" - - You can specify another config file to be read by using the -K/--config - flag. If you set config file name to "-" it'll read the config from stdin, - which can be handy if you want to hide options from being visible in process - tables etc: - - echo "user = user:passwd" | curl -K - http://that.secret.site.com - -EXTRA HEADERS - - When using curl in your own very special programs, you may end up needing - to pass on your own custom headers when getting a web page. You can do - this by using the -H flag. - - Example, send the header "X-you-and-me: yes" to the server when getting a - page: - - curl -H "X-you-and-me: yes" www.love.com - - This can also be useful in case you want curl to send a different text in a - header than it normally does. The -H header you specify then replaces the - header curl would normally send. If you replace an internal header with an - empty one, you prevent that header from being sent. To prevent the Host: - header from being used: - - curl -H "Host:" www.server.com - -FTP and PATH NAMES - - Do note that when getting files with the ftp:// URL, the given path is - relative the directory you enter. To get the file 'README' from your home - directory at your ftp site, do: - - curl ftp://user:passwd@my.site.com/README - - But if you want the README file from the root directory of that very same - site, you need to specify the absolute file name: - - curl ftp://user:passwd@my.site.com//README - - (I.e with an extra slash in front of the file name.) - -SFTP and SCP and PATH NAMES - - With sftp: and scp: URLs, the path name given is the absolute name on the - server. To access a file relative to the remote user's home directory, - prefix the file with /~/ , such as: - - curl -u $USER sftp://home.example.com/~/.bashrc - -FTP and firewalls - - The FTP protocol requires one of the involved parties to open a second - connection as soon as data is about to get transferred. There are two ways to - do this. - - The default way for curl is to issue the PASV command which causes the - server to open another port and await another connection performed by the - client. This is good if the client is behind a firewall that doesn't allow - incoming connections. - - curl ftp.download.com - - If the server, for example, is behind a firewall that doesn't allow connections - on ports other than 21 (or if it just doesn't support the PASV command), the - other way to do it is to use the PORT command and instruct the server to - connect to the client on the given IP number and port (as parameters to the - PORT command). - - The -P flag to curl supports a few different options. Your machine may have - several IP-addresses and/or network interfaces and curl allows you to select - which of them to use. Default address can also be used: - - curl -P - ftp.download.com - - Download with PORT but use the IP address of our 'le0' interface (this does - not work on windows): - - curl -P le0 ftp.download.com - - Download with PORT but use 192.168.0.10 as our IP address to use: - - curl -P 192.168.0.10 ftp.download.com - -NETWORK INTERFACE - - Get a web page from a server using a specified port for the interface: - - curl --interface eth0:1 http://www.netscape.com/ - - or - - curl --interface 192.168.1.10 http://www.netscape.com/ - -HTTPS - - Secure HTTP requires SSL libraries to be installed and used when curl is - built. If that is done, curl is capable of retrieving and posting documents - using the HTTPS protocol. - - Example: - - curl https://www.secure-site.com - - Curl is also capable of using your personal certificates to get/post files - from sites that require valid certificates. The only drawback is that the - certificate needs to be in PEM-format. PEM is a standard and open format to - store certificates with, but it is not used by the most commonly used - browsers (Netscape and MSIE both use the so called PKCS#12 format). If you - want curl to use the certificates you use with your (favourite) browser, you - may need to download/compile a converter that can convert your browser's - formatted certificates to PEM formatted ones. This kind of converter is - included in recent versions of OpenSSL, and for older versions Dr Stephen - N. Henson has written a patch for SSLeay that adds this functionality. You - can get his patch (that requires an SSLeay installation) from his site at: - https://web.archive.org/web/20170715155512/www.drh-consultancy.demon.co.uk/ - - Example on how to automatically retrieve a document using a certificate with - a personal password: - - curl -E /path/to/cert.pem:password https://secure.site.com/ - - If you neglect to specify the password on the command line, you will be - prompted for the correct password before any data can be received. - - Many older SSL-servers have problems with SSLv3 or TLS, which newer versions - of OpenSSL etc use, therefore it is sometimes useful to specify what - SSL-version curl should use. Use -3, -2 or -1 to specify that exact SSL - version to use (for SSLv3, SSLv2 or TLSv1 respectively): - - curl -2 https://secure.site.com/ - - Otherwise, curl will first attempt to use v3 and then v2. - - To use OpenSSL to convert your favourite browser's certificate into a PEM - formatted one that curl can use, do something like this: - - In Netscape, you start with hitting the 'Security' menu button. - - Select 'certificates->yours' and then pick a certificate in the list - - Press the 'Export' button - - enter your PIN code for the certs - - select a proper place to save it - - Run the 'openssl' application to convert the certificate. If you cd to the - openssl installation, you can do it like: - - # ./apps/openssl pkcs12 -in [file you saved] -clcerts -out [PEMfile] - - In Firefox, select Options, then Advanced, then the Encryption tab, - View Certificates. This opens the Certificate Manager, where you can - Export. Be sure to select PEM for the Save as type. - - In Internet Explorer, select Internet Options, then the Content tab, then - Certificates. Then you can Export, and depending on the format you may - need to convert to PEM. - - In Chrome, select Settings, then Show Advanced Settings. Under HTTPS/SSL - select Manage Certificates. - -RESUMING FILE TRANSFERS - - To continue a file transfer where it was previously aborted, curl supports - resume on HTTP(S) downloads as well as FTP uploads and downloads. - - Continue downloading a document: - - curl -C - -o file ftp://ftp.server.com/path/file - - Continue uploading a document(*1): - - curl -C - -T file ftp://ftp.server.com/path/file - - Continue downloading a document from a web server(*2): - - curl -C - -o file http://www.server.com/ - - (*1) = This requires that the FTP server supports the non-standard command - SIZE. If it doesn't, curl will say so. - - (*2) = This requires that the web server supports at least HTTP/1.1. If it - doesn't, curl will say so. - -TIME CONDITIONS - - HTTP allows a client to specify a time condition for the document it - requests. It is If-Modified-Since or If-Unmodified-Since. Curl allows you to - specify them with the -z/--time-cond flag. - - For example, you can easily make a download that only gets performed if the - remote file is newer than a local copy. It would be made like: - - curl -z local.html http://remote.server.com/remote.html - - Or you can download a file only if the local file is newer than the remote - one. Do this by prepending the date string with a '-', as in: - - curl -z -local.html http://remote.server.com/remote.html - - You can specify a "free text" date as condition. Tell curl to only download - the file if it was updated since January 12, 2012: - - curl -z "Jan 12 2012" http://remote.server.com/remote.html - - Curl will then accept a wide range of date formats. You always make the date - check the other way around by prepending it with a dash '-'. - -DICT - - For fun try - - curl dict://dict.org/m:curl - curl dict://dict.org/d:heisenbug:jargon - curl dict://dict.org/d:daniel:web1913 - - Aliases for 'm' are 'match' and 'find', and aliases for 'd' are 'define' - and 'lookup'. For example, - - curl dict://dict.org/find:curl - - Commands that break the URL description of the RFC (but not the DICT - protocol) are - - curl dict://dict.org/show:db - curl dict://dict.org/show:strat - - Authentication is still missing (but this is not required by the RFC) - -LDAP - - If you have installed the OpenLDAP library, curl can take advantage of it - and offer ldap:// support. - On Windows, curl will use WinLDAP from Platform SDK by default. - - Default protocol version used by curl is LDAPv3. LDAPv2 will be used as - fallback mechanism in case if LDAPv3 will fail to connect. - - LDAP is a complex thing and writing an LDAP query is not an easy task. I do - advise you to dig up the syntax description for that elsewhere. One such - place might be: - - RFC 2255, "The LDAP URL Format" https://curl.haxx.se/rfc/rfc2255.txt - - To show you an example, this is how I can get all people from my local LDAP - server that has a certain sub-domain in their email address: - - curl -B "ldap://ldap.frontec.se/o=frontec??sub?mail=*sth.frontec.se" - - If I want the same info in HTML format, I can get it by not using the -B - (enforce ASCII) flag. - - You also can use authentication when accessing LDAP catalog: - - curl -u user:passwd "ldap://ldap.frontec.se/o=frontec??sub?mail=*" - curl "ldap://user:passwd@ldap.frontec.se/o=frontec??sub?mail=*" - - By default, if user and password provided, OpenLDAP/WinLDAP will use basic - authentication. On Windows you can control this behavior by providing - one of --basic, --ntlm or --digest option in curl command line - - curl --ntlm "ldap://user:passwd@ldap.frontec.se/o=frontec??sub?mail=*" - - On Windows, if no user/password specified, auto-negotiation mechanism will - be used with current logon credentials (SSPI/SPNEGO). - -ENVIRONMENT VARIABLES - - Curl reads and understands the following environment variables: - - http_proxy, HTTPS_PROXY, FTP_PROXY - - They should be set for protocol-specific proxies. General proxy should be - set with - - ALL_PROXY - - A comma-separated list of host names that shouldn't go through any proxy is - set in (only an asterisk, '*' matches all hosts) - - NO_PROXY - - If the host name matches one of these strings, or the host is within the - domain of one of these strings, transactions with that node will not be - proxied. When a domain is used, it needs to start with a period. A user can - specify that both www.example.com and foo.example.com should not use a - proxy by setting NO_PROXY to ".example.com". By including the full name you - can exclude specific host names, so to make www.example.com not use a proxy - but still have foo.example.com do it, set NO_PROXY to "www.example.com" - - The usage of the -x/--proxy flag overrides the environment variables. - -NETRC - - Unix introduced the .netrc concept a long time ago. It is a way for a user - to specify name and password for commonly visited FTP sites in a file so - that you don't have to type them in each time you visit those sites. You - realize this is a big security risk if someone else gets hold of your - passwords, so therefore most unix programs won't read this file unless it is - only readable by yourself (curl doesn't care though). - - Curl supports .netrc files if told to (using the -n/--netrc and - --netrc-optional options). This is not restricted to just FTP, - so curl can use it for all protocols where authentication is used. - - A very simple .netrc file could look something like: - - machine curl.haxx.se login iamdaniel password mysecret - -CUSTOM OUTPUT - - To better allow script programmers to get to know about the progress of - curl, the -w/--write-out option was introduced. Using this, you can specify - what information from the previous transfer you want to extract. - - To display the amount of bytes downloaded together with some text and an - ending newline: - - curl -w 'We downloaded %{size_download} bytes\n' www.download.com - -KERBEROS FTP TRANSFER - - Curl supports kerberos4 and kerberos5/GSSAPI for FTP transfers. You need - the kerberos package installed and used at curl build time for it to be - available. - - First, get the krb-ticket the normal way, like with the kinit/kauth tool. - Then use curl in way similar to: - - curl --krb private ftp://krb4site.com -u username:fakepwd - - There's no use for a password on the -u switch, but a blank one will make - curl ask for one and you already entered the real password to kinit/kauth. - -TELNET - - The curl telnet support is basic and very easy to use. Curl passes all data - passed to it on stdin to the remote server. Connect to a remote telnet - server using a command line similar to: - - curl telnet://remote.server.com - - And enter the data to pass to the server on stdin. The result will be sent - to stdout or to the file you specify with -o. - - You might want the -N/--no-buffer option to switch off the buffered output - for slow connections or similar. - - Pass options to the telnet protocol negotiation, by using the -t option. To - tell the server we use a vt100 terminal, try something like: - - curl -tTTYPE=vt100 telnet://remote.server.com - - Other interesting options for it -t include: - - - XDISPLOC= Sets the X display location. - - - NEW_ENV= Sets an environment variable. - - NOTE: The telnet protocol does not specify any way to login with a specified - user and password so curl can't do that automatically. To do that, you need - to track when the login prompt is received and send the username and - password accordingly. - -PERSISTENT CONNECTIONS - - Specifying multiple files on a single command line will make curl transfer - all of them, one after the other in the specified order. - - libcurl will attempt to use persistent connections for the transfers so that - the second transfer to the same host can use the same connection that was - already initiated and was left open in the previous transfer. This greatly - decreases connection time for all but the first transfer and it makes a far - better use of the network. - - Note that curl cannot use persistent connections for transfers that are used - in subsequence curl invokes. Try to stuff as many URLs as possible on the - same command line if they are using the same host, as that'll make the - transfers faster. If you use an HTTP proxy for file transfers, practically - all transfers will be persistent. - -MULTIPLE TRANSFERS WITH A SINGLE COMMAND LINE - - As is mentioned above, you can download multiple files with one command line - by simply adding more URLs. If you want those to get saved to a local file - instead of just printed to stdout, you need to add one save option for each - URL you specify. Note that this also goes for the -O option (but not - --remote-name-all). - - For example: get two files and use -O for the first and a custom file - name for the second: - - curl -O http://url.com/file.txt ftp://ftp.com/moo.exe -o moo.jpg - - You can also upload multiple files in a similar fashion: - - curl -T local1 ftp://ftp.com/moo.exe -T local2 ftp://ftp.com/moo2.txt - -IPv6 - - curl will connect to a server with IPv6 when a host lookup returns an IPv6 - address and fall back to IPv4 if the connection fails. The --ipv4 and --ipv6 - options can specify which address to use when both are available. IPv6 - addresses can also be specified directly in URLs using the syntax: - - http://[2001:1890:1112:1::20]/overview.html - - When this style is used, the -g option must be given to stop curl from - interpreting the square brackets as special globbing characters. Link local - and site local addresses including a scope identifier, such as fe80::1234%1, - may also be used, but the scope portion must be numeric or match an existing - network interface on Linux and the percent character must be URL escaped. The - previous example in an SFTP URL might look like: - - sftp://[fe80::1234%251]/ - - IPv6 addresses provided other than in URLs (e.g. to the --proxy, --interface - or --ftp-port options) should not be URL encoded. - -METALINK - - Curl supports Metalink (both version 3 and 4 (RFC 5854) are supported), a way - to list multiple URIs and hashes for a file. Curl will make use of the mirrors - listed within for failover if there are errors (such as the file or server not - being available). It will also verify the hash of the file after the download - completes. The Metalink file itself is downloaded and processed in memory and - not stored in the local file system. - - Example to use a remote Metalink file: - - curl --metalink http://www.example.com/example.metalink - - To use a Metalink file in the local file system, use FILE protocol (file://): - - curl --metalink file://example.metalink - - Please note that if FILE protocol is disabled, there is no way to use a local - Metalink file at the time of this writing. Also note that if --metalink and - --include are used together, --include will be ignored. This is because including - headers in the response will break Metalink parser and if the headers are included - in the file described in Metalink file, hash check will fail. - -MAILING LISTS - - For your convenience, we have several open mailing lists to discuss curl, - its development and things relevant to this. Get all info at - https://curl.haxx.se/mail/. Some of the lists available are: - - curl-users - - Users of the command line tool. How to use it, what doesn't work, new - features, related tools, questions, news, installations, compilations, - running, porting etc. - - curl-library - - Developers using or developing libcurl. Bugs, extensions, improvements. - - curl-announce - - Low-traffic. Only receives announcements of new public versions. At worst, - that makes something like one or two mails per month, but usually only one - mail every second month. - - curl-and-php - - Using the curl functions in PHP. Everything curl with a PHP angle. Or PHP - with a curl angle. - - curl-and-python - - Python hackers using curl with or without the python binding pycurl. - - Please direct curl questions, feature requests and trouble reports to one of - these mailing lists instead of mailing any individual. diff --git a/docs/MANUAL.md b/docs/MANUAL.md new file mode 100644 index 00000000000000..5721436007c979 --- /dev/null +++ b/docs/MANUAL.md @@ -0,0 +1,1011 @@ +# curl tutorial + +## Simple Usage + +Get the main page from a web-server: + + curl https://www.example.com/ + +Get the README file the user's home directory at funet's ftp-server: + + curl ftp://ftp.funet.fi/README + +Get a web page from a server using port 8000: + + curl http://www.weirdserver.com:8000/ + +Get a directory listing of an FTP site: + + curl ftp://ftp.funet.fi + +Get the definition of curl from a dictionary: + + curl dict://dict.org/m:curl + +Fetch two documents at once: + + curl ftp://ftp.funet.fi/ http://www.weirdserver.com:8000/ + +Get a file off an FTPS server: + + curl ftps://files.are.secure.com/secrets.txt + +or use the more appropriate FTPS way to get the same file: + + curl --ftp-ssl ftp://files.are.secure.com/secrets.txt + +Get a file from an SSH server using SFTP: + + curl -u username sftp://example.com/etc/issue + +Get a file from an SSH server using SCP using a private key (not +password-protected) to authenticate: + + curl -u username: --key ~/.ssh/id_rsa scp://example.com/~/file.txt + +Get a file from an SSH server using SCP using a private key +(password-protected) to authenticate: + + curl -u username: --key ~/.ssh/id_rsa --pass private_key_password + scp://example.com/~/file.txt + +Get the main page from an IPv6 web server: + + curl "http://[2001:1890:1112:1::20]/" + +Get a file from an SMB server: + + curl -u "domain\username:passwd" smb://server.example.com/share/file.txt + +## Download to a File + +Get a web page and store in a local file with a specific name: + + curl -o thatpage.html http://www.example.com/ + +Get a web page and store in a local file, make the local file get the name of +the remote document (if no file name part is specified in the URL, this will +fail): + + curl -O http://www.example.com/index.html + +Fetch two files and store them with their remote names: + + curl -O www.haxx.se/index.html -O curl.haxx.se/download.html + +## Using Passwords + +### FTP + +To ftp files using name+passwd, include them in the URL like: + + curl ftp://name:passwd@machine.domain:port/full/path/to/file + +or specify them with the -u flag like + + curl -u name:passwd ftp://machine.domain:port/full/path/to/file + +### FTPS + +It is just like for FTP, but you may also want to specify and use SSL-specific +options for certificates etc. + +Note that using `FTPS://` as prefix is the "implicit" way as described in the +standards while the recommended "explicit" way is done by using FTP:// and the +`--ftp-ssl` option. + +### SFTP / SCP + +This is similar to FTP, but you can use the `--key` option to specify a +private key to use instead of a password. Note that the private key may itself +be protected by a password that is unrelated to the login password of the +remote system; this password is specified using the `--pass` option. +Typically, curl will automatically extract the public key from the private key +file, but in cases where curl does not have the proper library support, a +matching public key file must be specified using the `--pubkey` option. + +### HTTP + +Curl also supports user and password in HTTP URLs, thus you can pick a file +like: + + curl http://name:passwd@machine.domain/full/path/to/file + +or specify user and password separately like in + + curl -u name:passwd http://machine.domain/full/path/to/file + +HTTP offers many different methods of authentication and curl supports +several: Basic, Digest, NTLM and Negotiate (SPNEGO). Without telling which +method to use, curl defaults to Basic. You can also ask curl to pick the most +secure ones out of the ones that the server accepts for the given URL, by +using `--anyauth`. + +**Note**! According to the URL specification, HTTP URLs can not contain a user +and password, so that style will not work when using curl via a proxy, even +though curl allows it at other times. When using a proxy, you _must_ use the +`-u` style for user and password. + +### HTTPS + +Probably most commonly used with private certificates, as explained below. + +## Proxy + +curl supports both HTTP and SOCKS proxy servers, with optional authentication. +It does not have special support for FTP proxy servers since there are no +standards for those, but it can still be made to work with many of them. You +can also use both HTTP and SOCKS proxies to transfer files to and from FTP +servers. + +Get an ftp file using an HTTP proxy named my-proxy that uses port 888: + + curl -x my-proxy:888 ftp://ftp.leachsite.com/README + +Get a file from an HTTP server that requires user and password, using the +same proxy as above: + + curl -u user:passwd -x my-proxy:888 http://www.get.this/ + +Some proxies require special authentication. Specify by using -U as above: + + curl -U user:passwd -x my-proxy:888 http://www.get.this/ + +A comma-separated list of hosts and domains which do not use the proxy can be +specified as: + + curl --noproxy localhost,get.this -x my-proxy:888 http://www.get.this/ + +If the proxy is specified with `--proxy1.0` instead of `--proxy` or `-x`, then +curl will use HTTP/1.0 instead of HTTP/1.1 for any `CONNECT` attempts. + +curl also supports SOCKS4 and SOCKS5 proxies with `--socks4` and `--socks5`. + +See also the environment variables Curl supports that offer further proxy +control. + +Most FTP proxy servers are set up to appear as a normal FTP server from the +client's perspective, with special commands to select the remote FTP server. +curl supports the `-u`, `-Q` and `--ftp-account` options that can be used to +set up transfers through many FTP proxies. For example, a file can be uploaded +to a remote FTP server using a Blue Coat FTP proxy with the options: + + curl -u "username@ftp.server Proxy-Username:Remote-Pass" + --ftp-account Proxy-Password --upload-file local-file + ftp://my-ftp.proxy.server:21/remote/upload/path/ + +See the manual for your FTP proxy to determine the form it expects to set up +transfers, and curl's `-v` option to see exactly what curl is sending. + +## Ranges + +HTTP 1.1 introduced byte-ranges. Using this, a client can request to get only +one or more subparts of a specified document. Curl supports this with the `-r` +flag. + +Get the first 100 bytes of a document: + + curl -r 0-99 http://www.get.this/ + +Get the last 500 bytes of a document: + + curl -r -500 http://www.get.this/ + +Curl also supports simple ranges for FTP files as well. Then you can only +specify start and stop position. + +Get the first 100 bytes of a document using FTP: + + curl -r 0-99 ftp://www.get.this/README + +## Uploading + +### FTP / FTPS / SFTP / SCP + +Upload all data on stdin to a specified server: + + curl -T - ftp://ftp.upload.com/myfile + +Upload data from a specified file, login with user and password: + + curl -T uploadfile -u user:passwd ftp://ftp.upload.com/myfile + +Upload a local file to the remote site, and use the local file name at the +remote site too: + + curl -T uploadfile -u user:passwd ftp://ftp.upload.com/ + +Upload a local file to get appended to the remote file: + + curl -T localfile -a ftp://ftp.upload.com/remotefile + +Curl also supports ftp upload through a proxy, but only if the proxy is +configured to allow that kind of tunneling. If it does, you can run curl in a +fashion similar to: + + curl --proxytunnel -x proxy:port -T localfile ftp.upload.com + +### SMB / SMBS + + curl -T file.txt -u "domain\username:passwd" + smb://server.example.com/share/ + +### HTTP + +Upload all data on stdin to a specified HTTP site: + + curl -T - http://www.upload.com/myfile + +Note that the HTTP server must have been configured to accept PUT before this +can be done successfully. + +For other ways to do HTTP data upload, see the POST section below. + +## Verbose / Debug + +If curl fails where it isn't supposed to, if the servers don't let you in, if +you can't understand the responses: use the `-v` flag to get verbose +fetching. Curl will output lots of info and what it sends and receives in +order to let the user see all client-server interaction (but it won't show you +the actual data). + + curl -v ftp://ftp.upload.com/ + +To get even more details and information on what curl does, try using the +`--trace` or `--trace-ascii` options with a given file name to log to, like +this: + + curl --trace trace.txt www.haxx.se + + +## Detailed Information + +Different protocols provide different ways of getting detailed information +about specific files/documents. To get curl to show detailed information about +a single file, you should use `-I`/`--head` option. It displays all available +info on a single file for HTTP and FTP. The HTTP information is a lot more +extensive. + +For HTTP, you can get the header information (the same as `-I` would show) +shown before the data by using `-i`/`--include`. Curl understands the +`-D`/`--dump-header` option when getting files from both FTP and HTTP, and it +will then store the headers in the specified file. + +Store the HTTP headers in a separate file (headers.txt in the example): + + curl --dump-header headers.txt curl.haxx.se + +Note that headers stored in a separate file can be very useful at a later time +if you want curl to use cookies sent by the server. More about that in the +cookies section. + +## POST (HTTP) + +It's easy to post data using curl. This is done using the `-d ` option. +The post data must be urlencoded. + +Post a simple "name" and "phone" guestbook. + + curl -d "name=Rafael%20Sagula&phone=3320780" http://www.where.com/guest.cgi + +How to post a form with curl, lesson #1: + +Dig out all the `` tags in the form that you want to fill in. + +If there's a "normal" post, you use `-d` to post. `-d` takes a full "post +string", which is in the format + + =&=&... + +The 'variable' names are the names set with `"name="` in the `` tags, +and the data is the contents you want to fill in for the inputs. The data +*must* be properly URL encoded. That means you replace space with + and that +you replace weird letters with %XX where XX is the hexadecimal representation +of the letter's ASCII code. + +Example: + +(page located at `http://www.formpost.com/getthis/`) + +
+ + + + +
+ +We want to enter user 'foobar' with password '12345'. + +To post to this, you enter a curl command line like: + + curl -d "user=foobar&pass=12345&id=blablabla&ding=submit" + http://www.formpost.com/getthis/post.cgi + +While `-d` uses the application/x-www-form-urlencoded mime-type, generally +understood by CGI's and similar, curl also supports the more capable +multipart/form-data type. This latter type supports things like file upload. + +`-F` accepts parameters like `-F "name=contents"`. If you want the contents to +be read from a file, use `@filename` as contents. When specifying a file, you +can also specify the file content type by appending `;type=` to the +file name. You can also post the contents of several files in one field. For +example, the field name 'coolfiles' is used to send three files, with +different content types using the following syntax: + + curl -F "coolfiles=@fil1.gif;type=image/gif,fil2.txt,fil3.html" + http://www.post.com/postit.cgi + +If the content-type is not specified, curl will try to guess from the file +extension (it only knows a few), or use the previously specified type (from an +earlier file if several files are specified in a list) or else it will use the +default type 'application/octet-stream'. + +Emulate a fill-in form with `-F`. Let's say you fill in three fields in a +form. One field is a file name which to post, one field is your name and one +field is a file description. We want to post the file we have written named +"cooltext.txt". To let curl do the posting of this data instead of your +favourite browser, you have to read the HTML source of the form page and find +the names of the input fields. In our example, the input field names are +'file', 'yourname' and 'filedescription'. + + curl -F "file=@cooltext.txt" -F "yourname=Daniel" + -F "filedescription=Cool text file with cool text inside" + http://www.post.com/postit.cgi + +To send two files in one post you can do it in two ways: + +Send multiple files in a single "field" with a single field name: + + curl -F "pictures=@dog.gif,cat.gif" $URL + +Send two fields with two field names + + curl -F "docpicture=@dog.gif" -F "catpicture=@cat.gif" $URL + +To send a field value literally without interpreting a leading `@` or `<`, or +an embedded `;type=`, use `--form-string` instead of `-F`. This is recommended +when the value is obtained from a user or some other unpredictable +source. Under these circumstances, using `-F` instead of `--form-string` could +allow a user to trick curl into uploading a file. + +## Referrer + +An HTTP request has the option to include information about which address +referred it to the actual page. Curl allows you to specify the referrer to be +used on the command line. It is especially useful to fool or trick stupid +servers or CGI scripts that rely on that information being available or +contain certain data. + + curl -e www.coolsite.com http://www.showme.com/ + +## User Agent + +An HTTP request has the option to include information about the browser that +generated the request. Curl allows it to be specified on the command line. It +is especially useful to fool or trick stupid servers or CGI scripts that only +accept certain browsers. + +Example: + + curl -A 'Mozilla/3.0 (Win95; I)' http://www.nationsbank.com/ + +Other common strings: + +- `Mozilla/3.0 (Win95; I)` - Netscape Version 3 for Windows 95 +- `Mozilla/3.04 (Win95; U)` - Netscape Version 3 for Windows 95 +- `Mozilla/2.02 (OS/2; U)` - Netscape Version 2 for OS/2 +- `Mozilla/4.04 [en] (X11; U; AIX 4.2; Nav)` - Netscape for AIX +- `Mozilla/4.05 [en] (X11; U; Linux 2.0.32 i586)` - Netscape for Linux + +Note that Internet Explorer tries hard to be compatible in every way: + +- `Mozilla/4.0 (compatible; MSIE 4.01; Windows 95)` - MSIE for W95 + +Mozilla is not the only possible User-Agent name: + +- `Konqueror/1.0` - KDE File Manager desktop client +- `Lynx/2.7.1 libwww-FM/2.14` - Lynx command line browser + +## Cookies + +Cookies are generally used by web servers to keep state information at the +client's side. The server sets cookies by sending a response line in the +headers that looks like `Set-Cookie: ` where the data part then +typically contains a set of `NAME=VALUE` pairs (separated by semicolons `;` +like `NAME1=VALUE1; NAME2=VALUE2;`). The server can also specify for what path +the "cookie" should be used for (by specifying `path=value`), when the cookie +should expire (`expire=DATE`), for what domain to use it (`domain=NAME`) and +if it should be used on secure connections only (`secure`). + +If you've received a page from a server that contains a header like: + + Set-Cookie: sessionid=boo123; path="/foo"; + +it means the server wants that first pair passed on when we get anything in a +path beginning with "/foo". + +Example, get a page that wants my name passed in a cookie: + + curl -b "name=Daniel" www.sillypage.com + +Curl also has the ability to use previously received cookies in following +sessions. If you get cookies from a server and store them in a file in a +manner similar to: + + curl --dump-header headers www.example.com + +... you can then in a second connect to that (or another) site, use the +cookies from the 'headers' file like: + + curl -b headers www.example.com + +While saving headers to a file is a working way to store cookies, it is +however error-prone and not the preferred way to do this. Instead, make curl +save the incoming cookies using the well-known netscape cookie format like +this: + + curl -c cookies.txt www.example.com + +Note that by specifying `-b` you enable the "cookie awareness" and with `-L` +you can make curl follow a location: (which often is used in combination with +cookies). So that if a site sends cookies and a location, you can use a +non-existing file to trigger the cookie awareness like: + + curl -L -b empty.txt www.example.com + +The file to read cookies from must be formatted using plain HTTP headers OR as +netscape's cookie file. Curl will determine what kind it is based on the file +contents. In the above command, curl will parse the header and store the +cookies received from www.example.com. curl will send to the server the +stored cookies which match the request as it follows the location. The file +"empty.txt" may be a nonexistent file. + +To read and write cookies from a netscape cookie file, you can set both `-b` +and `-c` to use the same file: + + curl -b cookies.txt -c cookies.txt www.example.com + +## Progress Meter + +The progress meter exists to show a user that something actually is +happening. The different fields in the output have the following meaning: + + % Total % Received % Xferd Average Speed Time Curr. + Dload Upload Total Current Left Speed + 0 151M 0 38608 0 0 9406 0 4:41:43 0:00:04 4:41:39 9287 + +From left-to-right: + + - % - percentage completed of the whole transfer + - Total - total size of the whole expected transfer + - % - percentage completed of the download + - Received - currently downloaded amount of bytes + - % - percentage completed of the upload + - Xferd - currently uploaded amount of bytes + - Average Speed Dload - the average transfer speed of the download + - Average Speed Upload - the average transfer speed of the upload + - Time Total - expected time to complete the operation + - Time Current - time passed since the invoke + - Time Left - expected time left to completion + - Curr.Speed - the average transfer speed the last 5 seconds (the first + 5 seconds of a transfer is based on less time of course.) + +The `-#` option will display a totally different progress bar that doesn't +need much explanation! + +## Speed Limit + +Curl allows the user to set the transfer speed conditions that must be met to +let the transfer keep going. By using the switch `-y` and `-Y` you can make +curl abort transfers if the transfer speed is below the specified lowest limit +for a specified time. + +To have curl abort the download if the speed is slower than 3000 bytes per +second for 1 minute, run: + + curl -Y 3000 -y 60 www.far-away-site.com + +This can very well be used in combination with the overall time limit, so +that the above operation must be completed in whole within 30 minutes: + + curl -m 1800 -Y 3000 -y 60 www.far-away-site.com + +Forcing curl not to transfer data faster than a given rate is also possible, +which might be useful if you're using a limited bandwidth connection and you +don't want your transfer to use all of it (sometimes referred to as +"bandwidth throttle"). + +Make curl transfer data no faster than 10 kilobytes per second: + + curl --limit-rate 10K www.far-away-site.com + +or + + curl --limit-rate 10240 www.far-away-site.com + +Or prevent curl from uploading data faster than 1 megabyte per second: + + curl -T upload --limit-rate 1M ftp://uploadshereplease.com + +When using the `--limit-rate` option, the transfer rate is regulated on a +per-second basis, which will cause the total transfer speed to become lower +than the given number. Sometimes of course substantially lower, if your +transfer stalls during periods. + +## Config File + +Curl automatically tries to read the `.curlrc` file (or `_curlrc` file on +Microsoft Windows systems) from the user's home dir on startup. + +The config file could be made up with normal command line switches, but you +can also specify the long options without the dashes to make it more +readable. You can separate the options and the parameter with spaces, or with +`=` or `:`. Comments can be used within the file. If the first letter on a +line is a `#`-symbol the rest of the line is treated as a comment. + +If you want the parameter to contain spaces, you must enclose the entire +parameter within double quotes (`"`). Within those quotes, you specify a quote +as `\"`. + +NOTE: You must specify options and their arguments on the same line. + +Example, set default time out and proxy in a config file: + + # We want a 30 minute timeout: + -m 1800 + # ... and we use a proxy for all accesses: + proxy = proxy.our.domain.com:8080 + +Whitespaces ARE significant at the end of lines, but all whitespace leading +up to the first characters of each line are ignored. + +Prevent curl from reading the default file by using -q as the first command +line parameter, like: + + curl -q www.thatsite.com + +Force curl to get and display a local help page in case it is invoked without +URL by making a config file similar to: + + # default url to get + url = "http://help.with.curl.com/curlhelp.html" + +You can specify another config file to be read by using the `-K`/`--config` +flag. If you set config file name to `-` it'll read the config from stdin, +which can be handy if you want to hide options from being visible in process +tables etc: + + echo "user = user:passwd" | curl -K - http://that.secret.site.com + +## Extra Headers + +When using curl in your own very special programs, you may end up needing +to pass on your own custom headers when getting a web page. You can do +this by using the `-H` flag. + +Example, send the header `X-you-and-me: yes` to the server when getting a +page: + + curl -H "X-you-and-me: yes" www.love.com + +This can also be useful in case you want curl to send a different text in a +header than it normally does. The `-H` header you specify then replaces the +header curl would normally send. If you replace an internal header with an +empty one, you prevent that header from being sent. To prevent the `Host:` +header from being used: + + curl -H "Host:" www.server.com + +## FTP and Path Names + +Do note that when getting files with a `ftp://` URL, the given path is +relative the directory you enter. To get the file `README` from your home +directory at your ftp site, do: + + curl ftp://user:passwd@my.site.com/README + +But if you want the README file from the root directory of that very same +site, you need to specify the absolute file name: + + curl ftp://user:passwd@my.site.com//README + +(I.e with an extra slash in front of the file name.) + +## SFTP and SCP and Path Names + +With sftp: and scp: URLs, the path name given is the absolute name on the +server. To access a file relative to the remote user's home directory, prefix +the file with `/~/` , such as: + + curl -u $USER sftp://home.example.com/~/.bashrc + +## FTP and Firewalls + +The FTP protocol requires one of the involved parties to open a second +connection as soon as data is about to get transferred. There are two ways to +do this. + +The default way for curl is to issue the PASV command which causes the server +to open another port and await another connection performed by the +client. This is good if the client is behind a firewall that doesn't allow +incoming connections. + + curl ftp.download.com + +If the server, for example, is behind a firewall that doesn't allow +connections on ports other than 21 (or if it just doesn't support the `PASV` +command), the other way to do it is to use the `PORT` command and instruct the +server to connect to the client on the given IP number and port (as parameters +to the PORT command). + +The `-P` flag to curl supports a few different options. Your machine may have +several IP-addresses and/or network interfaces and curl allows you to select +which of them to use. Default address can also be used: + + curl -P - ftp.download.com + +Download with `PORT` but use the IP address of our `le0` interface (this does +not work on windows): + + curl -P le0 ftp.download.com + +Download with `PORT` but use 192.168.0.10 as our IP address to use: + + curl -P 192.168.0.10 ftp.download.com + +## Network Interface + +Get a web page from a server using a specified port for the interface: + + curl --interface eth0:1 http://www.example.com/ + +or + + curl --interface 192.168.1.10 http://www.example.com/ + +## HTTPS + +Secure HTTP requires a TLS library to be installed and used when curl is +built. If that is done, curl is capable of retrieving and posting documents +using the HTTPS protocol. + +Example: + + curl https://www.secure-site.com + +curl is also capable of using client certificates to get/post files from sites +that require valid certificates. The only drawback is that the certificate +needs to be in PEM-format. PEM is a standard and open format to store +certificates with, but it is not used by the most commonly used browsers. If +you want curl to use the certificates you use with your (favourite) browser, +you may need to download/compile a converter that can convert your browser's +formatted certificates to PEM formatted ones. + +Example on how to automatically retrieve a document using a certificate with a +personal password: + + curl -E /path/to/cert.pem:password https://secure.site.com/ + +If you neglect to specify the password on the command line, you will be +prompted for the correct password before any data can be received. + +Many older HTTPS servers have problems with specific SSL or TLS versions, +which newer versions of OpenSSL etc use, therefore it is sometimes useful to +specify what SSL-version curl should use. Use -3, -2 or -1 to specify that +exact SSL version to use (for SSLv3, SSLv2 or TLSv1 respectively): + + curl -2 https://secure.site.com/ + +Otherwise, curl will attempt to use a sensible TLS default version. + +## Resuming File Transfers + +To continue a file transfer where it was previously aborted, curl supports +resume on HTTP(S) downloads as well as FTP uploads and downloads. + +Continue downloading a document: + + curl -C - -o file ftp://ftp.server.com/path/file + +Continue uploading a document: + + curl -C - -T file ftp://ftp.server.com/path/file + +Continue downloading a document from a web server + + curl -C - -o file http://www.server.com/ + +## Time Conditions + +HTTP allows a client to specify a time condition for the document it requests. +It is `If-Modified-Since` or `If-Unmodified-Since`. curl allows you to specify +them with the `-z`/`--time-cond` flag. + +For example, you can easily make a download that only gets performed if the +remote file is newer than a local copy. It would be made like: + + curl -z local.html http://remote.server.com/remote.html + +Or you can download a file only if the local file is newer than the remote +one. Do this by prepending the date string with a `-`, as in: + + curl -z -local.html http://remote.server.com/remote.html + +You can specify a "free text" date as condition. Tell curl to only download +the file if it was updated since January 12, 2012: + + curl -z "Jan 12 2012" http://remote.server.com/remote.html + +Curl will then accept a wide range of date formats. You always make the date +check the other way around by prepending it with a dash (`-`). + +## DICT + +For fun try + + curl dict://dict.org/m:curl + curl dict://dict.org/d:heisenbug:jargon + curl dict://dict.org/d:daniel:gcide + +Aliases for 'm' are 'match' and 'find', and aliases for 'd' are 'define' and +'lookup'. For example, + + curl dict://dict.org/find:curl + +Commands that break the URL description of the RFC (but not the DICT +protocol) are + + curl dict://dict.org/show:db + curl dict://dict.org/show:strat + +Authentication support is still missing + +## LDAP + +If you have installed the OpenLDAP library, curl can take advantage of it and +offer `ldap://` support. On Windows, curl will use WinLDAP from Platform SDK +by default. + +Default protocol version used by curl is LDAPv3. LDAPv2 will be used as +fallback mechanism in case if LDAPv3 will fail to connect. + +LDAP is a complex thing and writing an LDAP query is not an easy task. I do +advise you to dig up the syntax description for that elsewhere. One such place +might be: [RFC 2255, The LDAP URL +Format](https://curl.haxx.se/rfc/rfc2255.txt) + +To show you an example, this is how I can get all people from my local LDAP +server that has a certain sub-domain in their email address: + + curl -B "ldap://ldap.frontec.se/o=frontec??sub?mail=*sth.frontec.se" + +If I want the same info in HTML format, I can get it by not using the `-B` +(enforce ASCII) flag. + +You also can use authentication when accessing LDAP catalog: + + curl -u user:passwd "ldap://ldap.frontec.se/o=frontec??sub?mail=*" + curl "ldap://user:passwd@ldap.frontec.se/o=frontec??sub?mail=*" + +By default, if user and password provided, OpenLDAP/WinLDAP will use basic +authentication. On Windows you can control this behavior by providing one of +`--basic`, `--ntlm` or `--digest` option in curl command line + + curl --ntlm "ldap://user:passwd@ldap.frontec.se/o=frontec??sub?mail=*" + +On Windows, if no user/password specified, auto-negotiation mechanism will be +used with current logon credentials (SSPI/SPNEGO). + +## Environment Variables + +Curl reads and understands the following environment variables: + + http_proxy, HTTPS_PROXY, FTP_PROXY + +They should be set for protocol-specific proxies. General proxy should be set +with + + ALL_PROXY + +A comma-separated list of host names that shouldn't go through any proxy is +set in (only an asterisk, `*` matches all hosts) + + NO_PROXY + +If the host name matches one of these strings, or the host is within the +domain of one of these strings, transactions with that node will not be +proxied. When a domain is used, it needs to start with a period. A user can +specify that both www.example.com and foo.example.com should not use a proxy +by setting `NO_PROXY` to `.example.com`. By including the full name you can +exclude specific host names, so to make `www.example.com` not use a proxy but +still have `foo.example.com` do it, set `NO_PROXY` to `www.example.com`. + +The usage of the `-x`/`--proxy` flag overrides the environment variables. + +## Netrc + +Unix introduced the `.netrc` concept a long time ago. It is a way for a user +to specify name and password for commonly visited FTP sites in a file so that +you don't have to type them in each time you visit those sites. You realize +this is a big security risk if someone else gets hold of your passwords, so +therefore most unix programs won't read this file unless it is only readable +by yourself (curl doesn't care though). + +Curl supports `.netrc` files if told to (using the `-n`/`--netrc` and +`--netrc-optional` options). This is not restricted to just FTP, so curl can +use it for all protocols where authentication is used. + +A very simple `.netrc` file could look something like: + + machine curl.haxx.se login iamdaniel password mysecret + +## Custom Output + +To better allow script programmers to get to know about the progress of curl, +the `-w`/`--write-out` option was introduced. Using this, you can specify what +information from the previous transfer you want to extract. + +To display the amount of bytes downloaded together with some text and an +ending newline: + + curl -w 'We downloaded %{size_download} bytes\n' www.download.com + +## Kerberos FTP Transfer + +Curl supports kerberos4 and kerberos5/GSSAPI for FTP transfers. You need the +kerberos package installed and used at curl build time for it to be available. + +First, get the krb-ticket the normal way, like with the kinit/kauth tool. +Then use curl in way similar to: + + curl --krb private ftp://krb4site.com -u username:fakepwd + +There's no use for a password on the `-u` switch, but a blank one will make +curl ask for one and you already entered the real password to kinit/kauth. + +## TELNET + +The curl telnet support is basic and very easy to use. Curl passes all data +passed to it on stdin to the remote server. Connect to a remote telnet server +using a command line similar to: + + curl telnet://remote.server.com + +And enter the data to pass to the server on stdin. The result will be sent to +stdout or to the file you specify with `-o`. + +You might want the `-N`/`--no-buffer` option to switch off the buffered output +for slow connections or similar. + +Pass options to the telnet protocol negotiation, by using the `-t` option. To +tell the server we use a vt100 terminal, try something like: + + curl -tTTYPE=vt100 telnet://remote.server.com + +Other interesting options for it `-t` include: + + - `XDISPLOC=` Sets the X display location. + - `NEW_ENV=` Sets an environment variable. + +NOTE: The telnet protocol does not specify any way to login with a specified +user and password so curl can't do that automatically. To do that, you need to +track when the login prompt is received and send the username and password +accordingly. + +## Persistent Connections + +Specifying multiple files on a single command line will make curl transfer all +of them, one after the other in the specified order. + +libcurl will attempt to use persistent connections for the transfers so that +the second transfer to the same host can use the same connection that was +already initiated and was left open in the previous transfer. This greatly +decreases connection time for all but the first transfer and it makes a far +better use of the network. + +Note that curl cannot use persistent connections for transfers that are used +in subsequence curl invokes. Try to stuff as many URLs as possible on the same +command line if they are using the same host, as that'll make the transfers +faster. If you use an HTTP proxy for file transfers, practically all transfers +will be persistent. + +## Multiple Transfers With A Single Command Line + +As is mentioned above, you can download multiple files with one command line +by simply adding more URLs. If you want those to get saved to a local file +instead of just printed to stdout, you need to add one save option for each +URL you specify. Note that this also goes for the `-O` option (but not +`--remote-name-all`). + +For example: get two files and use `-O` for the first and a custom file +name for the second: + + curl -O http://url.com/file.txt ftp://ftp.com/moo.exe -o moo.jpg + +You can also upload multiple files in a similar fashion: + + curl -T local1 ftp://ftp.com/moo.exe -T local2 ftp://ftp.com/moo2.txt + +## IPv6 + +curl will connect to a server with IPv6 when a host lookup returns an IPv6 +address and fall back to IPv4 if the connection fails. The `--ipv4` and +`--ipv6` options can specify which address to use when both are +available. IPv6 addresses can also be specified directly in URLs using the +syntax: + + http://[2001:1890:1112:1::20]/overview.html + +When this style is used, the `-g` option must be given to stop curl from +interpreting the square brackets as special globbing characters. Link local +and site local addresses including a scope identifier, such as `fe80::1234%1`, +may also be used, but the scope portion must be numeric or match an existing +network interface on Linux and the percent character must be URL escaped. The +previous example in an SFTP URL might look like: + + sftp://[fe80::1234%251]/ + +IPv6 addresses provided other than in URLs (e.g. to the `--proxy`, +`--interface` or `--ftp-port` options) should not be URL encoded. + +## Metalink + +Curl supports Metalink (both version 3 and 4 (RFC 5854) are supported), a way +to list multiple URIs and hashes for a file. Curl will make use of the mirrors +listed within for failover if there are errors (such as the file or server not +being available). It will also verify the hash of the file after the download +completes. The Metalink file itself is downloaded and processed in memory and +not stored in the local file system. + +Example to use a remote Metalink file: + + curl --metalink http://www.example.com/example.metalink + +To use a Metalink file in the local file system, use FILE protocol +(`file://`): + + curl --metalink file://example.metalink + +Please note that if FILE protocol is disabled, there is no way to use a local +Metalink file at the time of this writing. Also note that if `--metalink` and +`--include` are used together, `--include` will be ignored. This is because +including headers in the response will break Metalink parser and if the +headers are included in the file described in Metalink file, hash check will +fail. + +## Mailing Lists + +For your convenience, we have several open mailing lists to discuss curl, its +development and things relevant to this. Get all info at +https://curl.haxx.se/mail/. + +Please direct curl questions, feature requests and trouble reports to one of +these mailing lists instead of mailing any individual. + +Available lists include: + +### curl-users + +Users of the command line tool. How to use it, what doesn't work, new +features, related tools, questions, news, installations, compilations, +running, porting etc. + +### curl-library + +Developers using or developing libcurl. Bugs, extensions, improvements. + +### curl-announce + +Low-traffic. Only receives announcements of new public versions. At worst, +that makes something like one or two mails per month, but usually only one +mail every second month. + +### curl-and-php + +Using the curl functions in PHP. Everything curl with a PHP angle. Or PHP with +a curl angle. + +### curl-and-python + +Python hackers using curl with or without the python binding pycurl. + diff --git a/docs/MQTT.md b/docs/MQTT.md new file mode 100644 index 00000000000000..741b0729709926 --- /dev/null +++ b/docs/MQTT.md @@ -0,0 +1,29 @@ +# MQTT in curl + +## Usage + +A plain "GET" subscribes to the topic and prints all published messages. +Doing a "POST" publishes the post data to the topic and exits. + +Example subscribe: + + curl mqtt://host/home/bedroom/temp + +Example publish: + + curl -d 75 mqtt://host/home/bedroom/dimmer + +## What does curl deliver as a response to a subscribe + +It outputs two bytes topic length (MSB | LSB), the topic followed by the +payload. + +## Caveats + +Remaining limitations: + - No username support + - Only QoS level 0 is implemented for publish + - No way to set retain flag for publish + - No username/password support + - No TLS (mqtts) support + - Naive EAGAIN handling won't handle split messages diff --git a/docs/Makefile.am b/docs/Makefile.am index 8eeabd478ae9fa..b7d17922848d18 100644 --- a/docs/Makefile.am +++ b/docs/Makefile.am @@ -5,7 +5,7 @@ # | (__| |_| | _ <| |___ # \___|\___/|_| \_\_____| # -# Copyright (C) 1998 - 2019, Daniel Stenberg, , et al. +# Copyright (C) 1998 - 2020, Daniel Stenberg, , et al. # # This software is licensed as described in the file COPYING, which # you should have received as part of this distribution. The terms @@ -44,14 +44,20 @@ EXTRA_DIST = \ $(noinst_man_MANS) \ ALTSVC.md \ BINDINGS.md \ - BUGS \ + BUG-BOUNTY.md \ + BUGS.md \ CHECKSRC.md \ CIPHERS.md \ CMakeLists.txt \ CODE_OF_CONDUCT.md \ + CODE_REVIEW.md \ CODE_STYLE.md \ CONTRIBUTE.md \ + CURL-DISABLE.md \ DEPRECATE.md \ + DYNBUF.md \ + ECH.md \ + EXPERIMENTAL.md \ FAQ \ FEATURES \ GOVERNANCE.md \ @@ -59,27 +65,26 @@ EXTRA_DIST = \ HISTORY.md \ HTTP-COOKIES.md \ HTTP2.md \ + HTTP3.md \ INSTALL \ INSTALL.cmake \ INSTALL.md \ INTERNALS.md \ KNOWN_BUGS \ - LICENSE-MIXING.md \ MAIL-ETIQUETTE \ - README.cmake \ + MQTT.md \ + options-in-versions \ + PARALLEL-TRANSFERS.md \ README.md \ - README.netware \ - README.win32 \ RELEASE-PROCEDURE.md \ - RESOURCES \ ROADMAP.md \ SECURITY-PROCESS.md \ SSL-PROBLEMS.md \ SSLCERTS.md \ THANKS \ TODO \ - TheArtOfHttpScripting \ - VERSIONS + TheArtOfHttpScripting.md \ + VERSIONS.md MAN2HTML= roffit $< >$@ diff --git a/docs/PARALLEL-TRANSFERS.md b/docs/PARALLEL-TRANSFERS.md new file mode 100644 index 00000000000000..da688ea050ef1e --- /dev/null +++ b/docs/PARALLEL-TRANSFERS.md @@ -0,0 +1,58 @@ +# Parallel transfers + +curl 7.66.0 introduces support for doing multiple transfers simultaneously; in +parallel. + +## -Z, --parallel + +When this command line option is used, curl will perform the transfers given +to it at the same time. It will do up to `--parallel-max` concurrent +transfers, with a default value of 50. + +## Progress meter + +The progress meter that is displayed when doing parallel transfers is +completely different than the regular one used for each single transfer. + + It shows: + + o percent download (if known, which means *all* transfers need to have a + known size) + o percent upload (if known, with the same caveat as for download) + o total amount of downloaded data + o total amount of uploaded data + o number of transfers to perform + o number of concurrent transfers being transferred right now + o number of transfers queued up waiting to start + o total time all transfers are expected to take (if sizes are known) + o current time the transfers have spent so far + o estimated time left (if sizes are known) + o current transfer speed (the faster of UL/DL speeds measured over the last + few seconds) + +Example: + + DL% UL% Dled Uled Xfers Live Qd Total Current Left Speed + 72 -- 37.9G 0 101 30 23 0:00:55 0:00:34 0:00:22 2752M + +## Behavior differences + +Connections are shared fine between different easy handles, but the +"authentication contexts" are not. So for example doing HTTP Digest auth with +one handle for a particular transfer and then continue on with another handle +that reuses the same connection, the second handle can't send the necessary +Authorization header at once since the context is only kept in the original +easy handle. + +To fix this, the authorization state could be made possible to share with the +share API as well, as a context per origin + path (realm?) basically. + +Visible in test 153, 1412 and more. + +## Feedback! + +This is early days for parallel transfer support. Keep your eyes open for +unintended side effects or downright bugs. + +Tell us what you think and how you think we could improve this feature! + diff --git a/docs/README.cmake b/docs/README.cmake deleted file mode 100644 index 084c1de6d56ac4..00000000000000 --- a/docs/README.cmake +++ /dev/null @@ -1,16 +0,0 @@ - _ _ ____ _ - ___| | | | _ \| | - / __| | | | |_) | | - | (__| |_| | _ <| |___ - \___|\___/|_| \_\_____| - -README.cmake - Read the README file first. - - Curl contains CMake build files that provide a way to build Curl with the - CMake build tool (www.cmake.org). CMake is a cross platform meta build tool - that generates native makefiles and IDE project files. The CMake build - system can be used to build Curl on any of its supported platforms. - - Read the INSTALL.cmake file for instructions on how to compile curl with - CMake. diff --git a/docs/README.md b/docs/README.md index 6ee42aad33184a..0521937d1552e7 100644 --- a/docs/README.md +++ b/docs/README.md @@ -7,6 +7,6 @@ subdirectories, using several different formats. Some of them are not ideal for reading directly in your browser. If you'd rather see the rendered version of the documentation, check out the -curl web site's [documentation section](https://curl.haxx.se/docs/) for +curl website's [documentation section](https://curl.haxx.se/docs/) for general curl stuff or the [libcurl section](https://curl.haxx.se/libcurl/) for libcurl related documentation. diff --git a/docs/README.netware b/docs/README.netware deleted file mode 100644 index e6e1e0002e224b..00000000000000 --- a/docs/README.netware +++ /dev/null @@ -1,24 +0,0 @@ - _ _ ____ _ - ___| | | | _ \| | - / __| | | | |_) | | - | (__| |_| | _ <| |___ - \___|\___/|_| \_\_____| - -README.netware - - Read the README file first. - - Curl has been successfully compiled with gcc / nlmconv on different flavours - of Linux as well as with the official Metrowerks CodeWarrior compiler. - While not being the main development target, a continuously growing share of - curl users are NetWare-based, especially also consuming the lib from PHP. - - The unix-style man pages are tricky to read on windows, so therefore all - those pages are also provided as web pages on the curl web site. - - The main curl.1 man page is also "built-in" in the command line tool. Use a - command line similar to this in order to extract a separate text file: - - curl -M >manual.txt - - Read the INSTALL file for instructions on how to compile curl self. diff --git a/docs/README.win32 b/docs/README.win32 deleted file mode 100644 index ca34dd162507c6..00000000000000 --- a/docs/README.win32 +++ /dev/null @@ -1,23 +0,0 @@ - _ _ ____ _ - ___| | | | _ \| | - / __| | | | |_) | | - | (__| |_| | _ <| |___ - \___|\___/|_| \_\_____| - -README.win32 - - Read the README file first. - - Curl has been compiled, built and run on all sorts of Windows and win32 - systems. While not being the main develop target, a fair share of curl users - are win32-based. - - The unix-style man pages are tricky to read on windows, so therefore all - those pages are also provided as web pages on the curl web site. - - The main curl.1 man page is also "built-in" in the command line tool. Use a - command line similar to this in order to extract a separate text file: - - curl -M >manual.txt - - Read the INSTALL file for instructions on how to compile curl self. diff --git a/docs/RELEASE-PROCEDURE.md b/docs/RELEASE-PROCEDURE.md index c7a88335728790..b53e5a39723edc 100644 --- a/docs/RELEASE-PROCEDURE.md +++ b/docs/RELEASE-PROCEDURE.md @@ -4,6 +4,8 @@ curl release procedure - how to do a release in the source code repo ----------------------- +- run `./scripts/copyright.pl` and correct possible omissions + - edit `RELEASE-NOTES` to be accurate - update `docs/THANKS` @@ -16,7 +18,7 @@ in the source code repo - run "./maketgz 7.34.0" to build the release tarballs. It is important that you run this on a machine with the correct set of autotools etc installed - as this is what then will be shipped and used by most users on *nix like + as this is what then will be shipped and used by most users on \*nix like systems. - push the git commits and the new tag @@ -40,7 +42,7 @@ in the curl-www repo - make sure all relevant changes are committed and pushed on the master branch - (the web site then updates its contents automatically) + (the website then updates its contents automatically) on github --------- @@ -61,12 +63,12 @@ celebrate curl release scheduling ======================= -Basics ------- +Release Cycle +------------- We do releases every 8 weeks on Wednesdays. If critical problems arise, we can insert releases outside of the schedule or we can move the release date - but -this is very rare. +this is rare. Each 8 week release cycle is split in two 4-week periods. @@ -78,14 +80,31 @@ Each 8 week release cycle is split in two 4-week periods. then only focus on fixing bugs and polishing things to make a solid coming release. +- After a regular procedure-following release (made on Wednesdays), the + feature window remains closed until the following Monday in case of special + actions or patch releases etc. + +If a future release date happens to end up on a "bad date", like in the middle +of common public holidays or when the lead release manager is away traveling, +the release date can be moved forwards or backwards a full week. This is then +advertised well in advance. + Coming dates ------------ Based on the description above, here are some planned release dates (at the time of this writing): -- March 27, 2019 -- May 22, 2019 -- July 17, 2019 -- September 11, 2019 -- November 6, 2019 +- August 19, 2020 (7.72.0) +- October 14, 2020 +- December 9, 2020 +- February 3, 2021 +- March 31, 2021 +- May 26, 2021 +- July 21, 2021 +- September 15, 2021 +- November 10, 2021 + +The above (and more) curl-related dates are published in +[iCalendar format](https://calendar.google.com/calendar/ical/c9u5d64odop9js55oltfarjk6g%40group.calendar.google.com/public/basic.ics) +as well. diff --git a/docs/RESOURCES b/docs/RESOURCES deleted file mode 100644 index 55f75df770e7f9..00000000000000 --- a/docs/RESOURCES +++ /dev/null @@ -1,85 +0,0 @@ - _ _ ____ _ - Project ___| | | | _ \| | - / __| | | | |_) | | - | (__| |_| | _ <| |___ - \___|\___/|_| \_\_____| - - -This document lists documents and standards used by curl. - - RFC 959 - FTP Protocol - - RFC 1635 - How to Use Anonymous FTP - - RFC 1738 - Uniform Resource Locators - - RFC 1777 - Lightweight Directory Access Protocol (LDAP) - - RFC 1808 - Relative Uniform Resource Locators - - RFC 1867 - Form-based File Upload in HTML - - RFC 1950 - ZLIB Compressed Data Format Specification - - RFC 1951 - DEFLATE Compressed Data Format Specification - - RFC 1952 - GZIP File Format Specification - - RFC 1959 - LDAP URL Syntax - - RFC 2045-2049 - Everything you need to know about MIME! (needed for form - based upload) - - RFC 2068 - HTTP 1.1 (obsoleted by RFC 2616) - - RFC 2104 - Keyed-Hashing for Message Authentication - - RFC 2109 - HTTP State Management Mechanism (cookie stuff) - - Also, read Netscape's specification at - https://curl.haxx.se/rfc/cookie_spec.html - - RFC 2183 - The Content-Disposition Header Field - - RFC 2195 - CRAM-MD5 Authentication - - RFC 2229 - A Dictionary Server Protocol - - RFC 2255 - Newer LDAP URL Format - - RFC 2231 - MIME Parameter Value and Encoded Word Extensions: - Character Sets, Languages, and Continuations - - RFC 2388 - "Returning Values from Forms: multipart/form-data" - Use this as an addition to the RFC1867 - - RFC 2396 - "Uniform Resource Identifiers: Generic Syntax and Semantics" This - one obsoletes RFC 1738, but since RFC 1738 is often mentioned - I've left it in this list. - - RFC 2428 - FTP Extensions for IPv6 and NATs - - RFC 2577 - FTP Security Considerations - - RFC 2616 - HTTP 1.1, the latest - - RFC 2617 - HTTP Authentication - - RFC 2718 - Guidelines for new URL Schemes - - RFC 2732 - Format for Literal IPv6 Addresses in URL's - - RFC 2818 - HTTP Over TLS (TLS is the successor to SSL) - - RFC 2821 - Simple Mail Transfer Protocol (SMTP) - - RFC 2964 - Use of HTTP State Management - - RFC 2965 - HTTP State Management Mechanism. Cookies. Obsoletes RFC2109 - - RFC 3207 - SMTP Over TLS - - RFC 4616 - PLAIN Authentication - - RFC 4954 - SMTP Authentication - - RFC 7932 - Brotli Compressed Data Format diff --git a/docs/ROADMAP.md b/docs/ROADMAP.md index 10e7effee8ee67..574bd8103d59a6 100644 --- a/docs/ROADMAP.md +++ b/docs/ROADMAP.md @@ -5,55 +5,15 @@ Roadmap of things Daniel Stenberg wants to work on next. It is intended to serve as a guideline for others for information, feedback and possible participation. -HTTP/3 ------- - - See the [QUIC and HTTP/3 wiki page](https://github.com/curl/curl/wiki/QUIC). - -ESNI (Encrypted SNI) --------------------- - - See Daniel's post on [Support of Encrypted - SNI](https://curl.haxx.se/mail/lib-2019-03/0000.html) on the mailing list. - HSTS ---- -Complete and merge [the existing PR](https://github.com/curl/curl/pull/2682). - -Parallel transfers for the curl tool ------------------------------------- - -This will require several new command line options to enable and control. - - 1. switch to creating a list of all the transfers first before any transfer - is done - 2. make the transfers using the multi interface - 3. optionally fire up more transfers before the previous has completed + Merge [the existing PR](https://github.com/curl/curl/pull/5896). -Option to refuse HTTPS => HTTP redirects ----------------------------------------- +ECH (Encrypted Client Hello - formerly known as ESNI) +----------------------------------------------------- -Possibly as a new bit to `CURLOPT_FOLLOWLOCATION` ? - -Option to let CURLOPT_CUSTOMREQUEST be overridden on redirect -------------------------------------------------------------- - -(This is a common problem for people using `-X` and `-L` together.) - -Possibly as a new bit to `CURLOPT_FOLLOWLOCATION` ? - -Hardcode “localhost” --------------------- - -No need to resolve it. Avoid a risk where this is resolved over the network -and actually responds with something else than a local address. Some operating -systems already do this. Also: -https://tools.ietf.org/html/draft-ietf-dnsop-let-localhost-be-localhost-02 - -Consider "menu config"-style build feature selection ----------------------------------------------------- + See Daniel's post on [Support of Encrypted + SNI](https://curl.haxx.se/mail/lib-2019-03/0000.html) on the mailing list. -Allow easier building of custom libcurl versions with only a selected feature -where the available features are easily browsable and toggle-able ON/OFF or -similar. + Initial work exists in https://github.com/curl/curl/pull/4011 diff --git a/docs/SECURITY-PROCESS.md b/docs/SECURITY-PROCESS.md index 6cae5036b43092..d2ac1fd8461a96 100644 --- a/docs/SECURITY-PROCESS.md +++ b/docs/SECURITY-PROCESS.md @@ -8,11 +8,10 @@ Publishing Information ---------------------- All known and public curl or libcurl related vulnerabilities are listed on -[the curl web site security page](https://curl.haxx.se/docs/security.html). +[the curl website security page](https://curl.haxx.se/docs/security.html). -Security vulnerabilities should not be entered in the project's public bug -tracker unless the necessary configuration is in place to limit access to the -issue to only the reporter and the project's security team. +Security vulnerabilities **should not** be entered in the project's public bug +tracker. Vulnerability Handling ---------------------- @@ -23,20 +22,20 @@ No information should be made public about a vulnerability until it is formally announced at the end of this process. That means, for example that a bug tracker entry must NOT be created to track the issue since that will make the issue public and it should not be discussed on any of the project's public -mailing lists. Also messages associated with any commits should not make -any reference to the security nature of the commit if done prior to the public +mailing lists. Also messages associated with any commits should not make any +reference to the security nature of the commit if done prior to the public announcement. -- The person discovering the issue, the reporter, reports the vulnerability - privately to `curl-security@haxx.se`. That's an email alias that reaches a - handful of selected and trusted people. +- The person discovering the issue, the reporter, reports the vulnerability on + [https://hackerone.com/curl](https://hackerone.com/curl). Issues filed there + reach a handful of selected and trusted people. - Messages that do not relate to the reporting or managing of an undisclosed security vulnerability in curl or libcurl are ignored and no further action is required. -- A person in the security team sends an e-mail to the original reporter to - acknowledge the report. +- A person in the security team responds to the original report to acknowledge + that a human has seen the report. - The security team investigates the report and either rejects it or accepts it. @@ -51,9 +50,9 @@ announcement. should involve the reporter as much as possible. - The release of the information should be "as soon as possible" and is most - often synced with an upcoming release that contains the fix. If the - reporter, or anyone else, thinks the next planned release is too far away - then a separate earlier release for security reasons should be considered. + often synchronized with an upcoming release that contains the fix. If the + reporter, or anyone else involved, thinks the next planned release is too + far away, then a separate earlier release should be considered. - Write a security advisory draft about the problem that explains what the problem is, its impact, which versions it affects, solutions or workarounds, @@ -61,12 +60,14 @@ announcement. Figure out the CWE (Common Weakness Enumeration) number for the flaw. - Request a CVE number from + [HackerOne](https://docs.hackerone.com/programs/cve-requests.html) + +- Consider informing [distros@openwall](https://oss-security.openwall.org/wiki/mailing-lists/distros) - when also informing and preparing them for the upcoming public security - vulnerability announcement - attach the advisory draft for information. Note - that 'distros' won't accept an embargo longer than 14 days and they do not - care for Windows-specific flaws. For windows-specific flaws, request CVE - directly from MITRE. + to prepare them about the upcoming public security vulnerability + announcement - attach the advisory draft for information. Note that + 'distros' won't accept an embargo longer than 14 days and they do not care + for Windows-specific flaws. - Update the "security advisory" with the CVE number. @@ -87,12 +88,15 @@ announcement. the same manner we always announce releases. It gets sent to the curl-announce, curl-library and curl-users mailing lists. -- The security web page on the web site should get the new vulnerability +- The security web page on the website should get the new vulnerability mentioned. curl-security (at haxx dot se) ------------------------------ +This is a private mailing list for discussions on and about curl security +issues. + Who is on this list? There are a couple of criteria you must meet, and then we might ask you to join the list or you can ask to join it. It really isn't very formal. We basically only require that you have a long-term presence in the @@ -121,15 +125,8 @@ Publishing Security Advisories 6. On security advisory release day, push the changes on the curl-www repository's remote master branch. -Hackerone Internet Bug Bounty ------------------------------ - -The curl project does not run any bounty program on its own, but there are -outside organizations that do. First report your issue the normal way and -proceed as described in this document. - -Then, if the issue is [critical](https://hackerone.com/ibb-data), you are -eligible to apply for a bounty from Hackerone for your find. +Bug Bounty +---------- -Once your reported vulnerability has been publicly disclosed by the curl -project, you can submit a [report to them](https://hackerone.com/ibb-data). \ No newline at end of file +See [BUG-BOUNTY](https://curl.haxx.se/docs/bugbounty.html) for details on the +bug bounty program. diff --git a/docs/SSL-PROBLEMS.md b/docs/SSL-PROBLEMS.md index 91803e22dd5dce..35000cf765c1a6 100644 --- a/docs/SSL-PROBLEMS.md +++ b/docs/SSL-PROBLEMS.md @@ -53,9 +53,9 @@ Note that these weak ciphers are identified as flawed. For example, this includes symmetric ciphers with less than 128 bit keys and RC4. - WinSSL in Windows XP is not able to connect to servers that no longer + Schannel in Windows XP is not able to connect to servers that no longer support the legacy handshakes and algorithms used by those versions, so we - advice against building curl to use WinSSL on really old Windows versions. + advice against building curl to use Schannel on really old Windows versions. References: @@ -77,9 +77,9 @@ Some SSL backends may do certificate revocation checks (CRL, OCSP, etc) depending on the OS or build configuration. The --ssl-no-revoke option was introduced in 7.44.0 to disable revocation checking but currently is only - supported for WinSSL (the native Windows SSL library), with an exception in - the case of Windows' Untrusted Publishers blacklist which it seems can't be - bypassed. This option may have broader support to accommodate other SSL + supported for Schannel (the native Windows SSL library), with an exception + in the case of Windows' Untrusted Publishers block list which it seems can't + be bypassed. This option may have broader support to accommodate other SSL backends in the future. References: diff --git a/docs/SSLCERTS.md b/docs/SSLCERTS.md index 2c5be68e6ae4c2..c991f67dd12d0d 100644 --- a/docs/SSLCERTS.md +++ b/docs/SSLCERTS.md @@ -14,7 +14,7 @@ If libcurl was built with Schannel or Secure Transport support (the native SSL libraries included in Windows and Mac OS X), then this does not apply to you. Scroll down for details on how the OS-native engines handle SSL certificates. If you're not sure, then run "curl -V" and read the results. If -the version string says "WinSSL" in it, then it was built with Schannel +the version string says `Schannel` in it, then it was built with Schannel support. It is about trust @@ -55,13 +55,13 @@ server, do one of the following: 2. Get a CA certificate that can verify the remote server and use the proper option to point out this CA cert for verification when connecting. For - libcurl hackers: `curl_easy_setopt(curl, CURLOPT_CAPATH, capath);` + libcurl hackers: `curl_easy_setopt(curl, CURLOPT_CAINFO, cacert);` With the curl command line tool: --cacert [file] 3. Add the CA cert for your server to the existing default CA certificate - store. The default CA certificate store can changed at compile time with the - following configure options: + store. The default CA certificate store can be changed at compile time with + the following configure options: --with-ca-bundle=FILE: use the specified file as CA certificate store. CA certificates need to be concatenated in PEM format into this file. @@ -104,7 +104,7 @@ server, do one of the following: the security is no better than the way you obtained the certificate. 4. If you're using the curl command line tool, you can specify your own CA - cert path by setting the environment variable `CURL_CA_BUNDLE` to the path + cert file by setting the environment variable `CURL_CA_BUNDLE` to the path of your choice. If you're using the curl command line tool on Windows, curl will search diff --git a/docs/THANKS b/docs/THANKS index bf6ad755c212cc..9e037eecf10d70 100644 --- a/docs/THANKS +++ b/docs/THANKS @@ -4,13 +4,16 @@ If you have contributed but are missing here, please let us know! -"Captain Basil" -"Spoon Man" +1ocalhost on github +3dyd on github Aaro Koskinen Aaron Oneal Aaron Orenstein Aaron Scarisbrick +aasivov on github Abram Pousada +accountantM on github +AceCrow on Github Adam Barclay Adam Brown Adam Coyne @@ -21,16 +24,21 @@ Adam Marcionek Adam Piggott Adam Sampson Adam Tkac +Adnan Khan +adnn on github Adrian Burcea Adrian Peniak Adrian Schuur Adriano Meirelles +afrind on github +ahodesuka on github Ajit Dhumale Akhil Kedia Aki Koskinen Akos Pasztory Akshay Vernekar Alain Danteny +Alain Miniussi Alan Jenkins Alan Pinstein Albert Chin-A-Young @@ -43,20 +51,26 @@ Ales Mlakar Ales Novak Alessandro Ghedini Alessandro Vesely +Alex aka WindEagle Alex Baines Alex Bligh Alex Chan Alex Fishman +Alex Gaynor +Alex Grebenschikov Alex Gruz +Alex Kiernan +Alex Konev Alex Malinovich +Alex Mayorga Alex McLellan Alex Neblett Alex Nichols Alex Potapenko Alex Rousskov +Alex Samorukov Alex Suykov Alex Vinnik -Alex aka WindEagle Alexander Beedie Alexander Dyagilev Alexander Elgert @@ -68,7 +82,9 @@ Alexander Pepper Alexander Peslyak Alexander Sinditskiy Alexander Traud +Alexander V. Tikhonov Alexander Zhuravlev +Alexandre Pion Alexey Borzov Alexey Eremikhin Alexey Melnichuk @@ -81,19 +97,24 @@ Alfonso Martone Alfred Gebert Allen Pulsifer Alona Rossen +amishmm on github +Amit Katyal Amol Pattekar Amr Shahin Anatol Belski Anatoli Tubman Anders Bakken +Anders Berg Anders Gustafsson Anders Havn Anders Roxell +Anderson Sasaki Anderson Toshiyuki Sasaki Andi Jahja Andre Guibert de Bruet Andre Heinecke Andreas Damm +Andreas Falkenhahn Andreas Farber Andreas Kostyrka Andreas Malzahn @@ -111,27 +132,36 @@ Andrei Karas Andrei Kurushin Andrei Neculau Andrei Sedoi +Andrei Valeriu BICA Andrei Virtosu Andrej E Baranov +Andrew Barnes Andrew Benham Andrew Biggs Andrew Bushnell +Andrew de los Reyes Andrew Francis Andrew Fuller +Andrew Ishchuk Andrew Krieger Andrew Kurushin Andrew Lambert Andrew Moise +Andrew Potter Andrew Robbins Andrew Wansink -Andrew de los Reyes Andrey Labunets Andrii Moiseiev +Andrius Merkys Andrés García Andy Cedilnik +Andy Fiddaman Andy Serpa Andy Tsouladze Angus Mackay +anio on github +anshnd on github +Antarpreet Singh Anthon Pang Anthony Avina Anthony Bryan @@ -147,6 +177,7 @@ Antoni Villalonga Antonio Larrosa Antony74 on github Antti Hätälä +arainchik on github Archangel_SDY on github Arkadiusz Miskiewicz Armel Asselin @@ -154,27 +185,38 @@ Arnaud Compan Arnaud Ebalard Arnaud Rebillout Aron Bergman +Aron Rotteveel Artak Galoyan Arthur Murray Arve Knudsen Arvid Norberg +asavah on github Ashish Shukla +Ashwin Metpalli Ask Bjørn Hansen Askar Safin Ates Goral Augustus Saunders +Austin Green Avery Fay Axel Tillequin Ayoub Boudhar Balaji Parasuram Balaji S Rao Balaji Salunke +Balazs Kovacsics Balint Szilakszi Barry Abrahamson +Barry Pollard Bart Whiteley +Baruch Siach Bas Mevissen Bas van Schaik +Bastien Bouclet Basuke Suzuki +baumanj on github +bdry on github +beckenc on github Ben Boeckel Ben Darnell Ben Greear @@ -182,6 +224,7 @@ Ben Kohler Ben Madsen Ben Noordhuis Ben Van Hof +Ben Voris Ben Winslow Benbuck Nason Benjamin Gerard @@ -202,20 +245,26 @@ Bernhard Walle Bert Huijben Bertrand Demiddelaer Bertrand Simonnet +Bevan Weiss Bill Doyle Bill Egert Bill Hoffman Bill Middlecamp Bill Nagel Bill Pyne +Billyzou0741326 on github +Bjarni Ingi Gislason +Bjoern Franke Bjoern Sikora Bjorn Augustsson Bjorn Reese Björn Stenberg Blaise Potard +bnfp on github Bob Relyea Bob Richmond Bob Schader +bobmitchell1956 on github Bogdan Nicula Brad Burdick Brad Fitzpatrick @@ -224,17 +273,21 @@ Brad Hards Brad King Brad Spencer Bradford Bruce +bramus on github Brandon Casey +Brandon Dong Brandon Wang Brendan Jurd Brent Beardsley Brian Akins +Brian Bergeron Brian Carpenter Brian Chaplin Brian Childs Brian Chrisman Brian Dessent Brian E. Gallew +Brian Inglis Brian J. Murrell Brian Prodoehl Brian R Duffy @@ -243,30 +296,46 @@ Brock Noland Bru Rom Bruce Mitchener Bruce Stephens +BrumBrum on hackerone +Bruno de Carvalho Bruno Grasselli Bruno Thomsen -Bruno de Carvalho Bryan Henderson Bryan Kemp +bsammon on github +buzo-ffm on github +bxac on github +Bylon2 on github Byrial Jensen +Caleb Raitto +Calvin Buckley +Cameron Cawley Cameron Kaiser Cameron MacMinn Camille Moncelier Caolan McNamara +Captain Basil Carie Pointer Carlo Cannas +Carlo Marcelo Arenas Belón Carlo Teubner Carlo Wood +Carlos ORyan Carsten Lange Casey O'Donnell Catalin Patulea +causal-agent on github +cbartl on github +cclauss on github Chad Monroe Chandrakant Bagul Charles Kerr Charles Romestant Chen Prog +Cherish98 on github Chester Liu Chih-Chung Chang +Chih-Hsuan Yen Chris "Bob Bob" Chris Araman Chris Carlmar @@ -278,6 +347,8 @@ Chris Flerackers Chris Gaukroger Chris Maltby Chris Mumford +Chris Paulson-Ellis +Chris Roberts Chris Smowton Chris Young Christian Fillion @@ -291,23 +362,34 @@ Christian Schmitz Christian Stewart Christian Vogt Christian Weisgerber +Christoph Krey +Christoph M. Becker Christophe Demory +Christophe Dervieux Christophe Legry Christopher Conroy Christopher Head Christopher Palow Christopher R. Palmer +Christopher Reid Christopher Stone Chungtsun Li Ciprian Badescu Claes Jakobsson Clarence Gardner Claudio Neves +clbr on github Clemens Gruber +Cliff Crosland Clifford Wolf Clint Clayton +Clément Notin +cmfrolick on github +codesniffer13 on github Cody Jones Cody Mack +COFFEETALES on github +coinhubs on github Colby Ranger Colin Blair Colin Hogben @@ -318,20 +400,27 @@ Cory Benfield Cory Nelson Costya Shulyupin Craig A West +Craig Andrews Craig Davison -Craig Markwardt Craig de Stigter +Craig Markwardt +crazydef on github Cris Bailiff +Cristian Greco Cristian Rodríguez Curt Bogmine +Cynthia Coan Cyril B Cyrill Osterwalder Cédric Connes Cédric Deltheil D. Flinkmann +d912e3 on github Da-Yoon Chung +daboul on github Dag Ekengren Dagobert Michelsen +Daiki Ueno Dair Grant Dambaev Alexander Damian Dixon @@ -343,12 +432,14 @@ Dan Donahue Dan Fandrich Dan Jacobson Dan Johnson +Dan Kenigsberg Dan Locks Dan McNulty Dan Nelson Dan Petitt Dan Torop Dan Zitter +Daniel at touchtunes Daniel Bankhead Daniel Black Daniel Cater @@ -361,22 +452,24 @@ Daniel Kahn Gillmor Daniel Krügler Daniel Lee Hwang Daniel Lublin +Daniel Marjamäki Daniel Melani Daniel Mentz Daniel Romero Daniel Schauenberg Daniel Seither Daniel Shahaf +Daniel Silverstone Daniel Steinberg Daniel Stenberg Daniel Theron -Daniel at touchtunes Daphne Luong Dario Nieuwenhuis Dario Weißer Darryl House Darshan Mody Darío Hereñú +dasimx on github Dave Dribin Dave Halbakken Dave Hamilton @@ -404,6 +497,7 @@ David Kimdon David L. David Lang David LeBlanc +David Lopes David Lord David McCreedy David Odin @@ -420,12 +514,19 @@ David Walser David Woodhouse David Wright David Yan +davidedec on github +dbrowndan on github Dengminwen +Denis Baručić +Denis Chaplygin Denis Feklushkin +Denis Goleshchikhin Denis Ollier Dennis Clarke +Dennis Felsing Derek Higgins Desmond O. Chang +destman on github Detlef Schmier Dheeraj Sangamkar Didier Brisebourg @@ -444,9 +545,15 @@ Dinar Dirk Eddelbuettel Dirk Feytons Dirk Manske +Dirkjan Bussink +Diven Qi +divinity76 on github +dkjjr89 on github +dkwolfe4 on github Dmitri Shubin Dmitri Tikhonov Dmitriy Sergeyev +dmitrmax on github Dmitry Bartsevich Dmitry Eremin-Solenikov Dmitry Falko @@ -456,6 +563,7 @@ Dmitry Mikhirev Dmitry Popov Dmitry Rechkin Dmitry S. Baikov +dnivras on github Dolbneff A.V Domenico Andreoli Dominick Meglio @@ -473,9 +581,12 @@ Douglas Mencken Douglas R. Horner Douglas Steinwand Dov Murik +dpull on github Drake Arconis +dtmsecurity on github Duane Cathey Duncan Mac-Vicar Prett +Duncan Wilcox Dustin Boswell Dusty Mabe Duy Phan Thanh @@ -488,7 +599,9 @@ Earnestly on github Eason-Yu on github Ebenezer Ikonne Ed Morley +Edgaras Janušauskas Edin Kadribasic +Edmond Yu Eduard Bloch Edward Kimmel Edward Rudd @@ -497,12 +610,18 @@ Edward Thomson Eelco Dolstra Eetu Ojanen Egon Eckert +Ehren Bendler Eldar Zaitov +elelel on github +elephoenix on github +Eli Schwartz Elia Tufarolo Elliot Saba Ellis Pritchard Elmira A Semenova +elsamuko on github Emanuele Bovisio +Emil Engler Emil Lerner Emil Romanus Emiliano Ida @@ -524,9 +643,11 @@ Eric Rescorla Eric Ridge Eric Rosenquist Eric S. Raymond +Eric Sauvageau Eric Thelin Eric Vergnaud Eric Wong +Eric Wu Eric Young Erick Nuwendam Erik Jacobsen @@ -537,6 +658,7 @@ Ernest Beinrohr Ernst Sjöstrand Erwan Legrand Erwin Authried +Estanislau Augé-Pujadas Ethan Glasser Camp Etienne Simard Eugene Kotlyarov @@ -545,7 +667,9 @@ Even Rouault Evert Pot Evgeny Grin Evgeny Turnaev +eXeC64 on github Eygene Ryabinkin +Eylem Ugurel Fabian Frank Fabian Hiernaux Fabian Keil @@ -553,15 +677,20 @@ Fabian Ruff Fabrice Fontaine Fabrizio Ammollo Fahim Chandurwala +Faizur Rahman +fds242 on github Federico Bianchi Fedor Karpelevitch +Fedor Korotkov Feist Josselin +Felipe Gasper Felix Hädicke Felix Kaiser -Felix Yan Felix von Leitner +Felix Yan Feng Tu Fernando Muñoz +Filip Salomonsson Flavio Medeiros Florian Pritz Florian Schoppmann @@ -571,6 +700,7 @@ Forrest Cahoon Francisco Moraes Francisco Sedano Francois Petitjean +Francois Rivard Frank Denis Frank Gevaerts Frank Hempel @@ -581,13 +711,17 @@ Frank Ticheler Frank Van Uffelen František Kučera François Charlier +François Rigault Fred Machado Fred New Fred Noz Fred Stluka Frederic Lepied Frederik B +Frederik Wedel-Heinen Fredrik Thulin +FuccDucc on github +fullincome on github Gabriel Kuri Gabriel Sjoberg Garrett Holmstrom @@ -595,6 +729,7 @@ Gary Maxwell Gaurav Malhotra Gautam Kachroo Gautam Mani +Gavin Wong Gavrie Philipson Gaz Iqbal Gaël Portay @@ -604,6 +739,7 @@ Georg Horn Georg Huettenegger Georg Lippitsch Georg Wicherski +George Liu Gerd v. Egidy Gergely Nagy Gerhard Herre @@ -615,6 +751,7 @@ Gil Weber Gilad Gilbert Ramirez Jr. Gilles Blanc +Gilles Vollant Giorgos Oikonomou Gisle Vanem GitYuanQu on github @@ -637,22 +774,28 @@ Greg Onufer Greg Pratt Greg Rowe Greg Zavertnik +Gregory Jefferis Gregory Nicholls Gregory Szorc +Griffin Downs Grigory Entin Guenole Bescon Guido Berhoerster Guillaume Arluison +guitared on github Gunter Knauf Gustaf Hui Gustavo Grieco +Guy Poizat GwanYeong Kim Gwenole Beauchesne Gökhan Şengün Götz Babin-Ebell +H3RSKO on github Hagai Auro Haibo Huang Hamish Mackenzie +hamstergene on github Han Han Han Qiao Hang Kin Lau @@ -662,6 +805,7 @@ Hanno Böck Hanno Kranzhoff Hans Steegers Hans-Jurgen May +Hao Wu Hardeep Singh Haris Okanovic Harold Stuart @@ -688,9 +832,15 @@ Hoi-Ho Chan Hongli Lai Howard Blaise Howard Chu +hsiao yi +htasta on github Hubert Kario +Hugo van Kemenade Huzaifa Sidhpurwala +huzunhao on github +hydra3333 on github Hzhijun +iammrtau on github Ian D Allen Ian Fette Ian Ford @@ -701,32 +851,44 @@ Ian Wilkes Ignacio Vazquez-Abrams Igor Franchuk Igor Khristophorov +Igor Makarov Igor Novoseltsev Igor Polyakov Ihor Karpenko +ihsinme on github Iida Yosiaki Ilguiz Latypov Ilja van Sprundel +Ilya Kosarev +imilli on github Immanuel Gregoire Inca R +infinnovation-dev on github Ingmar Runge Ingo Ralf Blum Ingo Wilken +Ionuț-Francisc Oancea Irfan Adilovic +Ironbars13 on github Irving Wolfe Isaac Boukris +Isaiah Norton Ishan SinghLevett Ithubg on github Ivan Avdeev +IvanoG on github Ivo Bellin Salarin +iz8mbw on github Jack Zhang Jackarain on github Jacky Lam +Jacob Barthelmeh Jacob Meuser Jacob Moshenko Jactry Zeng Jad Chamcham Jaime Fullaondo +jakirkham on github Jakub Wilk Jakub Zakrzewski James Atwill @@ -736,36 +898,43 @@ James Cheng James Clancy James Cone James Dury +James Fuller James Gallagher James Griffiths James Housley James Knight +James Le Cuirot James MacMillan James Slaughter Jamie Lokier Jamie Newton Jamie Wilkinson Jan Alexander Steffens +Jan Chren Jan Ehrhardt Jan Koen Annot Jan Kunder Jan Schaumann Jan Schmidt Jan Van Boghout +JanB on github Janne Johansson Jared Jennings Jared Lundell Jari Aalto Jari Sundell +jasal82 on github Jason Baietto Jason Glasgow Jason Juang +Jason Lee Jason Liu Jason McDonald Jason S. Priebe Javier Barroso Javier Blazquez Javier G. Sogo +Javier Navarro Javier Sixto Jay Austin Jayesh A Shah @@ -785,16 +954,20 @@ Jeff Hodges Jeff Johnson Jeff King Jeff Lawson +Jeff Mears Jeff Phillips Jeff Pohlmeyer Jeff Weber Jeffrey Walton +Jens Finkhaeuser Jens Rantil Jens Schleusener Jeremie Rapin Jeremy Friesner Jeremy Huddleston +Jeremy Lainé Jeremy Lin +Jeremy Maitin-Shepard Jeremy Pearson Jeremy Tan Jeroen Koekkoek @@ -809,22 +982,27 @@ Jesper Jensen Jesse Chisholm Jesse Noller Jesse Tan +jethrogb on github Jie He Jim Drash Jim Freeman Jim Fuller Jim Hollinger Jim Meyering +Jimmy Gaussen Jiri Dvorak Jiri Hruska Jiri Jaburek Jiří Malák +jmdavitt on github +jnbr on github Jocelyn Jaubert Joe Halpin Joe Malicki Joe Mason Joel Chen Joel Depooter +joey-l-us on github Jofell Gallardo Johan Anderson Johan Lantz @@ -857,6 +1035,8 @@ John Marino John Marshall John McGowan John P. McCaskey +John Schroeder +John Simpson John Starks John Suprock John V. Chow @@ -865,9 +1045,12 @@ John Weismiller John Wilkinson John-Mark Bell Johnny Luong +Jojojov on github Jon DeVree Jon Grubbs +Jon Johnson Jr Jon Nelson +Jon Rumsey Jon Sargeant Jon Seymour Jon Spencer @@ -877,27 +1060,35 @@ Jon Turner Jonas Forsman Jonas Minnberg Jonas Schnelli +Jonas Vautherin Jonatan Lander Jonatan Vela Jonathan Cardoso Machado Jonathan Hseu +Jonathan Moerman Jonathan Nieder Jongki Suwandi +jonrumsey on github +Joombalaya on github Joonas Kuorilehto Jose Alf Jose Kahan Josef Wolf Josh Bialkowski Josh Kapell +joshhe on github Joshua Kwan Joshua Swink +Josie Huddleston Josue Andrade Gomes Jozef Kralik +JP Mens Juan Barreto Juan F. Codagnone Juan Ignacio Hervás Juan RP Judson Bishop +Juergen Hoetzel Juergen Wilke Jukka Pihl Julian Noble @@ -909,6 +1100,8 @@ Julien Chaffraix Julien Nabet Julien Royer Jun-ichiro itojun Hagino +jungle-boogie on github +Junho Choi Jurij Smakov Juro Bystricky Justin Clift @@ -916,19 +1109,24 @@ Justin Ehlert Justin Fletcher Justin Karneges Justin Maggard +jveazey on github +jzinn on github János Fekete Jérémy Rocher Jörg Mueller-Tolk Jörn Hartroth K. R. Walker +ka7 on github Kai Engert Kai Noda Kai Sommerfeld Kai-Uwe Rommel Kalle Vahlman Kamil Dudka +Kane York Kang Lin Kang-Jin Lee +Karl Chen Karl Moerder Karol Pietrzak Kartik Mahajan @@ -941,6 +1139,7 @@ Kees Dekker Keith MacDonald Keith McGuigan Keith Mok +Ken Brown Ken Hirsch Ken Rastatter Kenny To @@ -964,21 +1163,36 @@ Kjetil Jacobsen Klaus Stein Klevtsov Vadim Kobi Gurkan +Koen Dergent Konstantin Isakov Konstantin Kushnir +kotoriのねこ +kouzhudong on github +kreshano on github Kris Kennaway Krishnendu Majumdar Krister Johansen Kristian Gunstone Kristian Köhntopp +Kristian Mide Kristiyan Tsaklev +Kristoffer Gleditsch +Kunal Chandarana +Kunal Ekawde Kurt Fankhauser +Kwon-Young Choi +Kyle Abramowitz +Kyle Edwards Kyle J. McKay Kyle L. Huff Kyle Sallee +Kyohei Kadota Kyselgov E.N +l00p3r on Hackerone Lachlan O'Dea Ladar Levison +Lance Ware +Laramie Leavitt Larry Campbell Larry Fahnoe Larry Lin @@ -1005,15 +1219,19 @@ Len Krause Len Marinaccio Lenaic Lefever Lenny Rachitsky +Leo Neat Leon Breedt Leon Winter Leonardo Rosati Leonardo Taccari Liam Healy +lijian996 on github Lijo Antony +lilongyan-huawei on github Linas Vepstas Lindley French Ling Thio +Linos Giannopoulos Linus Lewandowski Linus Nielsen Feltzing Linus Nordberg @@ -1031,11 +1249,14 @@ Luca Altea Luca Boccassi Lucas Adamski Lucas Pardue +Lucas Severo +Lucien Zürcher Ludek Finstrle Ludovico Cavedon Ludwig Nussel Lukas Ruzicka Lukasz Czekierda +lukaszgn on github Luke Amery Luke Call Luke Dashjr @@ -1045,10 +1266,11 @@ Luz Paz Luật Nguyễn Lyman Epp Lyndon Hill -MAntoniak on github +M.R.T on github Maciej Karpiuk Maciej Puzio Maciej W. Rozycki +madblobfish on github Mahmoud Samir Fayed Maks Naumov Maksim Kuzevanov @@ -1057,6 +1279,7 @@ Mamoru Tasaka Mamta Upadhyay Mandy Wu Manfred Schwarb +MAntoniak on github Manuel Massing Marc Aldorasi Marc Boucher @@ -1068,6 +1291,8 @@ Marc Kleine-Budde Marc Renault Marc Schlatter Marc-Antoine Perennou +marc-groundctl on github +Marcel Hernandez Marcel Raad Marcel Roelofs Marcelo Echeverria @@ -1103,15 +1328,19 @@ Markus Elfring Markus Koetter Markus Moeller Markus Oberhumer +Markus Olsson Markus Westerlind +Maros Priputen Marquis de Muesli Martijn Koster Martin Ankerl +Martin Bašti Martin C. Martin Martin Drasar Martin Dreher Martin Frodl Martin Galvan +Martin Gartner Martin Hager Martin Hedenfalk Martin Jansen @@ -1121,13 +1350,18 @@ Martin Lemke Martin Skinner Martin Staael Martin Storsjö +Martin V Martin Vejnár Marty Kuhrt Maruko +Masaya Suzuki +masbug on github +Massimiliano Fantuzzi Massimiliano Ziccardi Massimo Callegari Mateusz Loskot Mathias Axelsson +Mathias Gumz Mathieu Legare Mats Lidell Matt Arsenault @@ -1146,6 +1380,7 @@ Matthew Hall Matthew Kerwin Matthew Whitehead Matthias Bolte +Matthias Naegler Mattias Fornander Matus Uzak Maurice Barnum @@ -1153,24 +1388,32 @@ Mauro Iorio Mauro Rappa Max Dymond Max Katsev +Max Kellermann Max Khon +Max Peal Max Savenkov Maxim Ivanov Maxim Perenesenko Maxim Prohorov Maxime Larocque Maxime Legros +mbeifuss on github +mccormickt12 on github Mehmet Bozkurt Mekonikum Melissa Mears +Mert Yazıcıoğlu Mettgut Jamalla Michael Anti +Michael Baentsch Michael Benedict +Michael Brehm Michael Calmer Michael Cronenworth Michael Curtis Michael Day Michael Felt +Michael Forney Michael Gmelin Michael Goffioul Michael Jahn @@ -1180,19 +1423,24 @@ Michael Kaufmann Michael Kilburn Michael Kujawa Michael König +Michael Lee Michael Maltese Michael Mealling Michael Mueller +Michael Musset +Michael Olbrich Michael Osipov Michael Schmid Michael Smith Michael Stapelberg Michael Steuer Michael Stillwell +Michael Vittiglio Michael Wallner Michal Bonino Michal Marek Michal Trybus +Michal Čaplygin Michał Antoniak Michał Fita Michał Górny @@ -1203,6 +1451,7 @@ Michel Promonet Michele Bini Miguel Angel Miguel Diaz +migueljcrum on github Mihai Ionescu Mikael Johansson Mikael Sennerholm @@ -1211,43 +1460,65 @@ Mike Bytnar Mike Crowe Mike Dobbs Mike Dowell +Mike Frysinger Mike Giancola Mike Hasselberg Mike Henshaw Mike Hommey Mike Mio +Mike Norton Mike Power Mike Protts Mike Revi Miklos Nemeth Miloš Ljumović Mingliang Zhu +Mingtao Yang Miroslav Franc Miroslav Spousta +Mischa Salle Mitz Wark +mkzero on github Mohamed Lrhazi +Mohamed Osama Mohammad AlSaleh +Mohammad Hasbini Mohun Biswas +momala454 on github +moohoorama on github Mostyn Bramley-Moore Moti Avrahami +MrdUkk on github +MrSorcus on github +Muhammad Herdiansyah +Murugan Balraj Muz Dima Myk Taylor -NTMan on Github Nach M. S. Nagai H +naost3rn on github Nate Prewitt Nathan Coulter Nathan O'Sullivan Nathanael Nerode +Nathaniel J. Smith +Nathaniel R. Lewis Nathaniel Waisbrot Naveen Chandran Naveen Noel Neal Poole +nedres on github +neex on github Nehal J Wani +neheb on github Neil Bowers Neil Dunbar Neil Kolban Neil Spring +nevv on HackerOne/curl +Niall O'Reilly +niallor on github +nianxuejie on github Nic Roets Nicholas Maniscalco Nick Draffen @@ -1257,26 +1528,35 @@ Nick Miyake Nick Zitzmann Nicklas Avén Nico Baggus +nico-abram on github Nicolas Berloquin Nicolas Croiset Nicolas François Nicolas Grekas +Nicolas Guillier Nicolas Morey-Chaisemartin +Nicolas Sterchele Niels van Tongeren Nikita Schmidt Nikitinskit Dmitriy Niklas Angebrand +Niklas Hambüchen Nikolai Kondrashov Nikos Mavrogiannopoulos Nikos Tsipinakis +niner on github Ning Dong Nir Soffer Nis Jorgensen +nk +NobodyXu on github Nobuhiro Ban Nodak Sodak +nopjmp on github Norbert Frese Norbert Kett Norbert Novotny +NTMan on Github Octavio Schroeder Ofer Okhin Vasilij @@ -1284,6 +1564,8 @@ Ola Mork Olaf Flebbe Olaf Stüben Oleg Pudeyev +Olen Andoni +olesteban on github Oli Kingshott Oliver Gondža Oliver Graute @@ -1291,17 +1573,22 @@ Oliver Kuckertz Oliver Schindler Olivier Berger Olivier Brunel +Omar Ramadan +omau on github Orange Tsai Oren Souroujon Oren Tirosh Orgad Shaneh Ori Avtalion +osabc on github Oscar Koeroo Oscar Norlander Oskar Liljeblad Oumph on github +ovidiu-benea on github P R Schaffner Palo Markovic +Paolo Mossino Paolo Piacentini Paras Sethia Pascal Gaudette @@ -1309,6 +1596,8 @@ Pascal Terjan Pasha Kuznetsov Pasi Karkkainen Pat Ray +patelvivekv1993 on github +patnyb on github Patrice Guerin Patricia Muscalu Patrick Bihan-Faou @@ -1322,10 +1611,13 @@ Patrick Smith Patrick Watson Patrik Thunstrom Pau Garcia i Quiles +Paul B. Omta Paul Donohue +Paul Dreik Paul Groke Paul Harrington Paul Harris +Paul Hoffman Paul Howarth Paul Joyce Paul Marks @@ -1335,6 +1627,8 @@ Paul Nolan Paul Oliver Paul Querna Paul Saab +Paul Vixie +Paulo Roberto Tomasi Pavel Cenek Pavel Gushchin Pavel Löbl @@ -1342,11 +1636,14 @@ Pavel Orehov Pavel Pavlov Pavel Raiskup Pavel Rochnyak +Pavel Volgarev Pavol Markovic Pawel A. Gajda Pawel Kierski Pedro Larroy +Pedro Monreal Pedro Neves +pendrek at hackerone Peng Li Per Lundberg Per Malmberg @@ -1358,6 +1655,7 @@ Peter Gal Peter Heuchert Peter Hjalmarsson Peter Korsgaard +Peter Körner Peter Lamare Peter Lamberg Peter Laser @@ -1365,7 +1663,9 @@ Peter O'Gorman Peter Pentchev Peter Piekarski Peter Silva +Peter Simonyi Peter Su +Peter Sumatra Peter Sylvester Peter Todd Peter Varga @@ -1389,6 +1689,7 @@ Philip Langdale Philip Prindeville Philipp Waehnert Philippe Hameau +Philippe Marguinaud Philippe Raoult Philippe Vaucher Pierre @@ -1396,18 +1697,27 @@ Pierre Brico Pierre Chapuis Pierre Joye Pierre Ynard +Pierre-Yves Bigourdan Piotr Dobrogost +Piotr Komborski +Po-Chuan Hsieh Pooyan McSporran +Poul T Lomholt Pramod Sharma Prash Dush Praveen Pvs Priyanka Shah Przemysław Tomaszewski +pszemus on github +puckipedia on github Puneet Pawaia +qiandu2006 on github Quagmire Quanah Gibson-Mount +Quentin Balland Quinn Slack R. Dennis Steed +Radoslav Georgiev Radu Simionescu Rafa Muyo Rafael Antonio @@ -1423,16 +1733,21 @@ Rajkumar Mandal Ralf S. Engelschall Ralph Beckmann Ralph Mitchell +Ram Krushna Mishra +ramsay-jones on github Ran Mozes Randall S. Becker Randy Armstrong Randy McMurchy Raphael Gozzo +Rasmus Melchior Jacobsen Ravi Pratap Ray Dassen Ray Pekowski Ray Satiro Razvan Cojocaru +rcombs on github +Reed Loden Reinhard Max Reinout van Schouwen Remco van Hooff @@ -1449,14 +1764,18 @@ Rene Rebe Reuven Wachtfogel Reza Arbab Ricardo Cadime +Ricardo Gomes Rich Burridge Rich Gray +Rich Mirch Rich Rauenzahn +Rich Salz Rich Turner Richard Adams Richard Alcock Richard Archer Richard Atterer +Richard Bowker Richard Bramante Richard Clayton Richard Cooper @@ -1470,14 +1789,18 @@ Richard Prescott Richard Silverman Richard van den Berg Richy Kim +Rici Lake Rick Deist Rick Jones Rick Richardson Rick Welykochy +Rickard Hallerbäck Ricki Hirner +Ricky Leverence Ricky-Tigg on github Rider Linden Rikard Falkeborn +rl1987 on github Rob Cotrone Rob Crittenden Rob Davies @@ -1488,6 +1811,7 @@ Rob Ward Robert A. Monat Robert B. Harris Robert D. Young +Robert Dunaj Robert Foreman Robert Iakobashvili Robert Kolcun @@ -1498,6 +1822,7 @@ Robert Schumann Robert Weaver Robert Wruck Robin Cornelius +Robin Douine Robin Johnson Robin Kay Robson Braga Araujo @@ -1507,9 +1832,12 @@ Rodney Simmons Rodric Glaser Rodrigo Silva Roger Leigh +Roger Orr Roland Blom +Roland Hieber Roland Krikava Roland Zimmermann +Rolf Eike Beer Rolland Dudemaine Romain Coltel Romain Fliedel @@ -1522,12 +1850,15 @@ Ron Parker Ron Zapp Ronnie Mose Rosimildo da Silva +Ross Burton +Roy Bellingan Roy Shan Rune Kleveland Ruslan Baratov Ruslan Gazizov Rutger Hofman Ruurd Beerstra +RuurdBeerstra on github Ryan Braud Ryan Chan Ryan Nelson @@ -1537,28 +1868,34 @@ Ryan Winograd Ryuichi KAWAMATA Rémy Léone S. Moonesamy -SBKarr on github Salah-Eddin Shaban +Saleem Abdulrasool Salvador Dávila Salvatore Sorrentino Sam Deane Sam Hurst Sam Roth Sam Schanken +Samanta Navarro Sampo Kellomaki Samuel Díaz García Samuel Listopad +Samuel Marks Samuel Surtees Samuel Thibault +Samuel Tranchet Sander Gates Sandor Feldi Santhana Todatry +Santino Keupp Saqib Ali Sara Golemon Saran Neti Sascha Swiercy Saul good Saurav Babu +sayrer on github +SBKarr on github Scott Bailey Scott Barrett Scott Cantor @@ -1569,12 +1906,14 @@ Sean Burford Sean MacLennan Sean Miller Sebastiaan van Erk +Sebastian Haglund Sebastian Mundry Sebastian Pohlschmidt Sebastian Rasmussen Senthil Raja Velu Sergei Kuzmin Sergei Nikulov +Sergey Ogryzkov Sergey Tatarincev Sergii Kavunenko Sergii Pylypenko @@ -1587,6 +1926,8 @@ Seth Mos Sevan Janiyan Sh Diao Shachaf Ben-Kiki +Shailesh Kapse +Shankar Jadhavar Shao Shuchao Sharad Gupta Shard @@ -1600,22 +1941,34 @@ Shmulik Regev Siddhartha Prakash Jain Sidney San Martín Siegfried Gyuricsko +silveja1 on github +Simon Chalifoux Simon Dick Simon H. Simon Josefsson Simon Legner Simon Liu Simon Warta +Siva Sivaraman +SLDiggie on github +smuellerDD on github +sn on hackerone +sofaboss on github Somnath Kundu Song Ma Sonia Subramanian Spacen Jasset +Spezifant on github Spiridonoff A.V +Spoon Man Spork Schivago +sspiri on github +sstruchtrup on github Stadler Stephan Stan van de Burgt Stanislav Ivochkin Stanislav Zidek +steelman on github Stefan Agner Stefan Bühler Stefan Eissing @@ -1624,21 +1977,26 @@ Stefan Grether Stefan Kanthak Stefan Krause Stefan Neis +Stefan Strogin Stefan Teleman Stefan Tomanek Stefan Ulrich +Stefan Yohansson +Stefano Simonelli Steinar H. Gunderson +steini2000 on github Stepan Broz +Stepan Efremov Stephan Bergmann Stephan Lagerholm Stephan Mühlstrasser +Stephan Szabo Stephen Brokenshire Stephen Collyer Stephen Kick Stephen More Stephen Toub Sterling Hughes -Steve Brokenshire Steve Green Steve H Truong Steve Havelka @@ -1654,43 +2012,60 @@ Steven G. Johnson Steven Gu Steven M. Schweda Steven Parkes +Steven Penny +Stian Soiland-Reyes Stoned Elipot +stootill on github Stuart Henderson +SumatraPeter on github Sune Ahlgren +Sunny Bean Sunny Purushe Sven Anders Sven Blumenstein Sven Neuhaus Sven Wegener Svyatoslav Mishyn +swalkaus at yahoo.com Sylvestre Ledru Symeon Paraschoudis Sébastien Willemijns T. Bharath T. Yamada -TJ Saunders +Tadej Vengust Tae Hyoung Ahn Tae Wong +Taiyu Len Taneli Vähäkangas Tanguy Fautre +tarek112 on github Tatsuhiro Tsujikawa +tbugfinder on github Teemu Yli-Elsila Temprimus Terri Oda Terry Wu +thanhchungbtc on github +The Infinnovation team TheAssassin on github Theodore Dubois +therealhirudo on github +tholin on github +Thomas Bouzerar Thomas Braun +Thomas Gamper Thomas Glanzmann Thomas J. Moore Thomas Klausner Thomas L. Shinnick Thomas Lopatic +Thomas M. DuBuisson Thomas Petazzoni Thomas Ruecker Thomas Schwinge Thomas Tonino Thomas van Hesteren +Thomas Vegas Thorsten Schöning Tiit Pikma Till Maas @@ -1704,6 +2079,7 @@ Tim Heckman Tim Mcdonough Tim Newsome Tim Rühsen +Tim Sedlmeyer Tim Sneddon Tim Stack Tim Starling @@ -1714,7 +2090,10 @@ Timotej Lazar Timothe Litt Timothy Polich Tinus van den Berg +TJ Saunders +tmkk on github Tobias Blomberg +Tobias Hieta Tobias Hintze Tobias Lindgren Tobias Markus @@ -1729,6 +2108,7 @@ Todd Vierling Tom Benoist Tom Donovan Tom Grace +Tom Greenslade Tom Lee Tom Mattison Tom Moers @@ -1736,9 +2116,10 @@ Tom Mueller Tom Regner Tom Seddon Tom Sparrow +Tom van der Woerdt Tom Wright Tom Zerucha -Tom van der Woerdt +Tomas Berger Tomas Hoger Tomas Jakobsson Tomas Mlcoch @@ -1749,21 +2130,27 @@ Tomas Tomecek Tomasz Kojm Tomasz Lacki Tommie Gannert +tommink[at]post.pl +Tommy Petty Tommy Tam Ton Voon Toni Moreno Tony Kelman +tonystz on Github Toon Verwaest Tor Arntsen Torben Dannhauer Torsten Foertsch Toshio Kuratomi Toshiyuki Maezawa +tpaukrt on github Traian Nicolescu Travis Burtrum Travis Obenhaus +Trivikram Kamat Troels Walsted Hansen Troy Engel +Tseng Jun Tuomo Rinne Tupone Alfredo Tyler Hall @@ -1774,15 +2161,21 @@ Ulrich Doehner Ulrich Telle Ulrich Zadow Valentin David +Valentyn Korniienko +Valerii Zapodovnikov +vanillajonathan on github Vasiliy Faronov +Vasily Lobaskin Vasy Okhin Venkat Akella Venkataramana Mokkapati Vicente Garcia +Victor Magierski Victor Snezhko Vijay Panghal Vikram Saxena Viktor Szakats +Vilhelm Prytz Ville Skyttä Vilmos Nebehaj Vincas Razma @@ -1791,145 +2184,93 @@ Vincent Le Normand Vincent Penquerc'h Vincent Sanders Vincent Torri +vitaha85 on github Vlad Grachov Vlad Ureche Vladimir Grishchenko Vladimir Kotal Vladimir Lazarenko +Vlastimil Ovčáčík Vojtech Janota Vojtech Minarik Vojtěch Král Volker Schmid Vsevolod Novikov +vshmuk on hackerone +Vyron Tsingaras W. Mark Kubacki Waldek Kozba Walter J. Mack Ward Willats Warren Menzer Wayne Haigh +Wenchao Li Wenxiang Qian Werner Koch +Werner Stolz +wesinator on github Wesley Laxton Wesley Miaw Wez Furlong Wham Bang Wilfredo Sanchez Will Dietz +Will Roberts Willem Sparreboom William A. Rowe Jr William Ahern +wmsch on github +wncboy on github Wojciech Zwiefka Wouter Van Rooy Wu Yongzheng Wyatt O'Day Xavier Bouchoux XhstormR on github +Xiang Xiao Xiangbin Li +Xiaoyin Liu +XmiliaH on github +xwxbug on github Yaakov Selkowitz Yang Tse Yarram Sunil Yasuharu Yamada Yasuhiro Matsumoto +Yechiel Kalmenson Yehezkel Horowitz Yehoshua Hershberg +ygthien on github Yi Huang Yiming Jing Yingwei Liu Yonggang Luo +youngchopin on github Yousuke Kimoto Yu Xin Yukihiro Kawada Yun SangHo +Yuri Slobodyanyuk Yuriy Sosov Yves Arrouye Yves Lejeune Zachary Seguin Zdenek Pavlas Zekun Ni +zelinchen on github Zenju on github Zero King Zhao Yisha Zhaoyang Wu Zhibiao Wu Zhouyihai Ding +zloi-user on github Zmey Petroff Zvi Har'El -accountantM on github -adnn on github -afrind on github -ahodesuka on github -anshnd on github -arainchik on github -asavah on github -baumanj on github -bobmitchell1956 on github -bsammon on github -buzo-ffm on github -cbartl on github -clbr on github -cmfrolick on github -d912e3 on github -daboul on github -dasimx on github -destman on github -dkjjr89 on github -dnivras on github -dpull on github -dtmsecurity on github -eXeC64 on github -elelel on github -elephoenix on github -guitared on github -hsiao yi -imilli on github -infinnovation-dev on github -iz8mbw on github -jakirkham on github -jasal82 on github -jnbr on github -jonrumsey on github -joshhe on github -jungle-boogie on github -jveazey on github -ka7 on github -kreshano on github -lijian996 on github -lukaszgn on github -madblobfish on github -marc-groundctl on github -masbug on github -mccormickt12 on github -mkzero on github -moohoorama on github -nedres on github -neex on github -neheb on github -nianxuejie on github -nk -nopjmp on github -olesteban on github -omau on github -ovidiu-benea on github -patelvivekv1993 on github -pszemus on github -silveja1 on github -steelman on github -steini2000 on github -stootill on github -swalkaus at yahoo.com -tarek112 on github -tholin on github -tommink[at]post.pl -tonystz on Github -tpaukrt on github -vanillajonathan on github -wesinator on github -wmsch on github -wncboy on github -youngchopin on github -zelinchen on github zzq1015 on github İsmail Dönmez Łukasz Domeradzki Štefan Kremeň +Коваленко Анатолий Викторович Никита Дорохин +加藤郁之 diff --git a/docs/THANKS-filter b/docs/THANKS-filter index 29dc24c8a42022..e11ad6d279b55c 100644 --- a/docs/THANKS-filter +++ b/docs/THANKS-filter @@ -1,3 +1,25 @@ +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 1998 - 2020, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.haxx.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +########################################################################### +# # This is a list of names we have recorded that already are thanked # appropriately in THANKS. This list contains variations of their names and # their "canonical" name. This file is used for scripting purposes to avoid @@ -25,7 +47,7 @@ s/upstream tests 305 and 404// s/Gaël PORTAY/Gaël Portay/ s/Romulo Ceccon/Romulo A. Ceccon/ s/Nach M. S$/Nach M. S./ -s/Jay Satiro/Ray Satiro/ +s/Ja[yt] Satiro/Ray Satiro/ s/Richard J. Moore/Richard Moore/ s/Sergey Nikulov/Sergei Nikulov/ s/Petr Písař/Petr Pisar/ @@ -98,3 +120,5 @@ s/Jason Priebe/Jason S. Priebe/ s/Ale Vesely/Alessandro Vesely/ s/Yamada Yasuharu/Yasuharu Yamada/ s/Jim Gallagher/James Gallagher/ +s/Steve Brokenshire/Stephen Brokenshire/ +s/wangp on github/Peter Wang/ diff --git a/docs/TODO b/docs/TODO index b6a35a77729fb0..d45319d7b66b9a 100644 --- a/docs/TODO +++ b/docs/TODO @@ -18,11 +18,11 @@ 1. libcurl 1.1 TFO support on Windows - 1.2 More data sharing + 1.2 Consult %APPDATA% also for .netrc 1.3 struct lifreq - 1.4 signal-based resolver timeouts + 1.4 alt-svc sharing 1.5 get rid of PATH_MAX - 1.6 Modified buffer size approach + 1.6 native IDN support on macOS 1.7 Support HTTP/2 for HTTP(S) proxies 1.8 CURLOPT_RESOLVE for any port number 1.9 Cache negative name resolves @@ -35,14 +35,11 @@ 1.16 Try to URL encode given URL 1.17 Add support for IRIs 1.18 try next proxy if one doesn't work - 1.19 Timeout idle connections from the pool 1.20 SRV and URI DNS records - 1.21 Have the URL API offer IDN decoding 1.22 CURLINFO_PAUSE_STATE 1.23 Offer API to flush the connection pool 1.24 TCP Fast Open for windows 1.25 Expose tried IP addresses that failed - 1.26 CURL_REFUSE_CLEARTEXT 1.27 hardcode the "localhost" addresses 1.28 FD_CLOEXEC 1.29 Upgrade to websockets @@ -55,6 +52,8 @@ 2.4 Split connect and authentication process 2.5 Edge-triggered sockets should work 2.6 multi upkeep + 2.7 Virtual external sockets + 2.8 dynamically decide to use socketpair 3. Documentation 3.2 Provide cmake config-file @@ -63,7 +62,6 @@ 4.1 HOST 4.2 Alter passive/active on failure and retry 4.3 Earlier bad letter detection - 4.4 REST for large files 4.5 ASCII support 4.6 GSSAPI via Windows SSPI 4.7 STAT for LIST without data connection @@ -71,12 +69,10 @@ 5. HTTP 5.1 Better persistency for HTTP 1.0 - 5.2 support FF3 sqlite cookie files + 5.2 Set custom client ip when using haproxy protocol 5.3 Rearrange request header order 5.4 Allow SAN names in HTTP/2 server push 5.5 auth= in URLs - 5.6 Refuse "downgrade" redirects - 5.7 QUIC 6. TELNET 6.1 ditch stdin @@ -84,12 +80,10 @@ 6.3 feature negotiation debug data 7. SMTP - 7.1 Pipelining 7.2 Enhanced capability support 7.3 Add CURLOPT_MAIL_CLIENT option 8. POP3 - 8.1 Pipelining 8.2 Enhanced capability support 9. IMAP @@ -97,6 +91,8 @@ 10. LDAP 10.1 SASL based authentication mechanisms + 10.2 CURLOPT_SSL_CTX_FUNCTION for LDAPS + 10.3 Paged searches on LDAP server 11. SMB 11.1 File listing support @@ -105,10 +101,9 @@ 11.4 Create remote directories 12. New protocols - 12.1 RSYNC 13. SSL - 13.1 Disable specific versions + 13.1 TLS-PSK with OpenSSL 13.2 Provide mutex locking API 13.3 Support in-memory certs/ca certs/keys 13.4 Cache/share OpenSSL contexts @@ -116,21 +111,21 @@ 13.6 Provide callback for cert verification 13.7 improve configure --with-ssl 13.8 Support DANE - 13.9 Configurable loading of OpenSSL configuration file + 13.9 TLS record padding 13.10 Support Authority Information Access certificate extension (AIA) 13.11 Support intermediate & root pinning for PINNEDPUBLICKEY 13.12 Support HSTS - 13.13 Support HPKP + 13.13 Make sure we forbid TLS 1.3 post-handshake authentication 13.14 Support the clienthello extension 14. GnuTLS - 14.1 SSL engine stuff 14.2 check connection - 15. WinSSL/SChannel - 15.1 Add support for client certificate authentication - 15.3 Add support for the --ciphers option - 15.4 Add option to disable client certificate auto-send + 15. Schannel + 15.1 Extend support for client certificate authentication + 15.2 Extend support for the --ciphers option + 15.3 Add option to disable client certificate auto-send + 15.4 Add option to allow abrupt server closure 16. SASL 16.1 Other authentication mechanisms @@ -139,36 +134,40 @@ 17. SSH protocols 17.1 Multiplexing - 17.2 SFTP performance + 17.2 Handle growing SFTP files 17.3 Support better than MD5 hostkey hash 17.4 Support CURLOPT_PREQUOTE + 17.5 SSH over HTTPS proxy with more backends 18. Command line tool 18.1 sync 18.2 glob posts 18.3 prevent file overwriting - 18.4 simultaneous parallel transfers + 18.4 --proxycommand 18.5 UTF-8 filenames in Content-Disposition - 18.6 warning when setting an option - 18.7 warning if curl version is not in sync with libcurl version - 18.8 offer color-coded HTTP header output + 18.6 Option to make -Z merge lined based outputs on stdout + 18.7 at least N milliseconds between requests + 18.8 Consider convenience options for JSON and XML? 18.9 Choose the name of file in braces for complex URLs 18.10 improve how curl works in a windows console window 18.11 Windows: set attribute 'archive' for completed downloads 18.12 keep running, read instructions from pipe/socket - 18.13 support metalink in http headers - 18.14 --fail without --location should treat 3xx as a failure + 18.13 Ratelimit or wait between serial requests + 18.14 --dry-run 18.15 --retry should resume 18.16 send only part of --data 18.17 consider file name from the redirected URL with -O ? 18.18 retry on network is unreachable 18.19 expand ~/ in config files 18.20 host name sections in config files + 18.21 retry on the redirected-to URL + 18.23 Set the modification date on an uploaded file + 18.24 Use multiple parallel transfers for a single download 19. Build 19.1 roffit 19.2 Enable PIE and RELRO by default - 19.3 cmake test suite improvements + 19.3 Don't use GNU libtool on OpenBSD 20. Test suite 20.1 SSL tunnel @@ -178,6 +177,8 @@ 20.5 Add support for concurrent connections 20.6 Use the RFC6265 test suite 20.7 Support LD_PRELOAD on macOS + 20.8 Run web-platform-tests url tests + 20.9 Use "random" ports for the test servers 21. Next SONAME bump 21.1 http-style HEAD output for FTP @@ -204,10 +205,11 @@ See https://github.com/curl/curl/pull/3378 -1.2 More data sharing +1.2 Consult %APPDATA% also for .netrc - curl_share_* functions already exist and work, and they can be extended to - share more. For example, enable sharing of the ares channel. + %APPDATA%\.netrc is not considered when running on Windows. Shouldn't it? + + See https://github.com/curl/curl/issues/4016 1.3 struct lifreq @@ -215,52 +217,38 @@ SIOCGIFADDR on newer Solaris versions as they claim the latter is obsolete. To support IPv6 interface addresses for network interfaces properly. -1.4 signal-based resolver timeouts +1.4 alt-svc sharing - libcurl built without an asynchronous resolver library uses alarm() to time - out DNS lookups. When a timeout occurs, this causes libcurl to jump from the - signal handler back into the library with a sigsetjmp, which effectively - causes libcurl to continue running within the signal handler. This is - non-portable and could cause problems on some platforms. A discussion on the - problem is available at https://curl.haxx.se/mail/lib-2008-09/0197.html + The share interface could benefit from allowing the alt-svc cache to be + possible to share between easy handles. - Also, alarm() provides timeout resolution only to the nearest second. alarm - ought to be replaced by setitimer on systems that support it. + See https://github.com/curl/curl/issues/4476 1.5 get rid of PATH_MAX Having code use and rely on PATH_MAX is not nice: https://insanecoding.blogspot.com/2007/11/pathmax-simply-isnt.html - Currently the SSH based code uses it a bit, but to remove PATH_MAX from there - we need libssh2 to properly tell us when we pass in a too small buffer and - its current API (as of libssh2 1.2.7) doesn't. - -1.6 Modified buffer size approach - - Current libcurl allocates a fixed 16K size buffer for download and an - additional 16K for upload. They are always unconditionally part of the easy - handle. If CRLF translations are requested, an additional 32K "scratch - buffer" is allocated. A total of 64K transfer buffers in the worst case. + Currently the libssh2 SSH based code uses it, but to remove PATH_MAX from + there we need libssh2 to properly tell us when we pass in a too small buffer + and its current API (as of libssh2 1.2.7) doesn't. - First, while the handles are not actually in use these buffers could be freed - so that lingering handles just kept in queues or whatever waste less memory. +1.6 native IDN support on macOS - Secondly, SFTP is a protocol that needs to handle many ~30K blocks at once - since each need to be individually acked and therefore libssh2 must be - allowed to send (or receive) many separate ones in parallel to achieve high - transfer speeds. A current libcurl build with a 16K buffer makes that - impossible, but one with a 512K buffer will reach MUCH faster transfers. But - allocating 512K unconditionally for all buffers just in case they would like - to do fast SFTP transfers at some point is not a good solution either. + On recent macOS versions, the getaddrinfo() function itself has built-in IDN + support. By setting the AI_CANONNAME flag, the function will return the + encoded name in the ai_canonname struct field in the returned information. + This could be used by curl on macOS when built without a separate IDN library + and an IDN host name is used in a URL. - Dynamically allocate buffer size depending on protocol in use in combination - with freeing it after each individual transfer? Other suggestions? + See initial work in https://github.com/curl/curl/pull/5371 1.7 Support HTTP/2 for HTTP(S) proxies Support for doing HTTP/2 to HTTP and HTTPS proxies is still missing. + See https://github.com/curl/curl/issues/3570 + 1.8 CURLOPT_RESOLVE for any port number This option allows applications to set a replacement IP address for a given @@ -372,27 +360,11 @@ https://github.com/curl/curl/issues/896 -1.19 Timeout idle connections from the pool - - libcurl currently keeps connections in its connection pool for an indefinite - period of time, until it either gets reused, gets noticed that it has been - closed by the server or gets pruned to make room for a new connection. - - To reduce overhead (especially for when we add monitoring of the connections - in the pool), we should introduce a timeout so that connections that have - been idle for N seconds get closed. - 1.20 SRV and URI DNS records Offer support for resolving SRV and URI DNS records for libcurl to know which server to connect to for various protocols (including HTTP!). -1.21 Have the URL API offer IDN decoding - - Similar to how URL decoding/encoding is done, we could have URL functions to - convert IDN host names to punycode (probably not the reverse). - https://github.com/curl/curl/issues/3232 - 1.22 CURLINFO_PAUSE_STATE Return information about the transfer's current pause state, in both @@ -417,21 +389,6 @@ https://github.com/curl/curl/issues/2126 -1.26 CURL_REFUSE_CLEARTEXT - - An environment variable that when set will make libcurl refuse to use any - cleartext network protocol. That's all non-encrypted ones (FTP, HTTP, Gopher, - etc). By adding the check to libcurl and not just curl, this environment - variable can then help users to block all libcurl-using programs from - accessing the network using unsafe protocols. - - The variable could be given some sort of syntax or different levels and be - used to also allow for example users to refuse libcurl to do transfers with - HTTPS certificate checks disabled. - - It could also automatically refuse usernames in URLs when set - (see CURLOPT_DISALLOW_USERNAME_IN_URL) - 1.27 hardcode the "localhost" addresses There's this new spec getting adopted that says "localhost" should always and @@ -475,12 +432,21 @@ Make sure we don't ever loop because of non-blocking sockets returning EWOULDBLOCK or similar. Blocking cases include: - - Name resolves on non-windows unless c-ares or the threaded resolver is used - - SOCKS proxy handshakes + - Name resolves on non-windows unless c-ares or the threaded resolver is used. + + - The threaded resolver may block on cleanup: + https://github.com/curl/curl/issues/4852 + - file:// transfers + - TELNET transfers + + - GSSAPI authentication for FTP transfers + - The "DONE" operation (post transfer protocol-specific actions) for the - protocols SFTP, SMTP, FTP. Fixing Curl_done() for this is a worthy task. + protocols SFTP, SMTP, FTP. Fixing multi_done() for this is a worthy task. + + - curl_multi_remove_handle for any of the above. See section 2.3. 2.2 Better support for same name resolves @@ -520,6 +486,23 @@ See https://github.com/curl/curl/issues/3199 +2.7 Virtual external sockets + + libcurl performs operations on the given file descriptor that presumes it is + a socket and an application cannot replace them at the moment. Allowing an + application to fully replace those would allow a larger degree of freedom and + flexibility. + + See https://github.com/curl/curl/issues/5835 + +2.8 dynamically decide to use socketpair + + For users who don't use curl_multi_wait() or don't care for + curl_multi_wakeup(), we could introduce a way to make libcurl NOT + create a socketpair in the multi handle. + + See https://github.com/curl/curl/issues/4829 + 3. Documentation 3.2 Provide cmake config-file @@ -549,12 +532,6 @@ Make the detection of (bad) %0d and %0a codes in FTP URL parts earlier in the process to avoid doing a resolve and connect in vain. -4.4 REST for large files - - REST fix for servers not behaving well on >2GB requests. This should fail if - the server doesn't set the pointer to the requested index. The tricky - (impossible?) part is to figure out if the server did the right thing or not. - 4.5 ASCII support FTP ASCII transfers do not follow RFC959. They don't convert the data @@ -587,11 +564,12 @@ "Better" support for persistent connections over HTTP 1.0 https://curl.haxx.se/bug/feature.cgi?id=1089001 -5.2 support FF3 sqlite cookie files +5.2 Set custom client ip when using haproxy protocol + + This would allow testing servers with different client ip addresses (without + using x-forward-for header). - Firefox 3 is changing from its former format to a a sqlite database instead. - We should consider how (lib)curl can/should support this. - https://curl.haxx.se/bug/feature.cgi?id=1871388 + https://github.com/curl/curl/issues/5125 5.3 Rearrange request header order @@ -621,36 +599,19 @@ For example: - http://test:pass;auth=NTLM@example.com would be equivalent to specifying --user - test:pass;auth=NTLM or --user test:pass --ntlm from the command line. + http://test:pass;auth=NTLM@example.com would be equivalent to specifying + --user test:pass;auth=NTLM or --user test:pass --ntlm from the command line. Additionally this should be implemented for proxy base URLs as well. -5.6 Refuse "downgrade" redirects - - See https://github.com/curl/curl/issues/226 - - Consider a way to tell curl to refuse to "downgrade" protocol with a redirect - and/or possibly a bit that refuses redirect to change protocol completely. - -5.7 QUIC - - The standardization process of QUIC has been taken to the IETF and can be - followed on the [IETF QUIC Mailing - list](https://www.ietf.org/mailman/listinfo/quic). I'd like us to get on the - bandwagon. Ideally, this would be done with a separate library/project to - handle the binary/framing layer in a similar fashion to how HTTP/2 is - implemented. This, to allow other projects to benefit from the work and to - thus broaden the interest and chance of others to participate. - 6. TELNET 6.1 ditch stdin -Reading input (to send to the remote server) on stdin is a crappy solution for -library purposes. We need to invent a good way for the application to be able -to provide the data to send. + Reading input (to send to the remote server) on stdin is a crappy solution + for library purposes. We need to invent a good way for the application to be + able to provide the data to send. 6.2 ditch telnet-specific select @@ -660,15 +621,11 @@ to provide the data to send. 6.3 feature negotiation debug data - Add telnet feature negotiation data to the debug callback as header data. + Add telnet feature negotiation data to the debug callback as header data. 7. SMTP -7.1 Pipelining - - Add support for pipelining emails. - 7.2 Enhanced capability support Add the ability, for an application that uses libcurl, to obtain the list of @@ -687,10 +644,6 @@ to provide the data to send. 8. POP3 -8.1 Pipelining - - Add support for pipelining commands. - 8.2 Enhanced capability support Add the ability, for an application that uses libcurl, to obtain the list of @@ -713,6 +666,17 @@ to provide the data to send. be possible to use ldap_bind_s() instead specifying the security context information ourselves. +10.2 CURLOPT_SSL_CTX_FUNCTION for LDAPS + + CURLOPT_SSL_CTX_FUNCTION works perfectly for HTTPS and email protocols, but + it has no effect for LDAPS connections. + + https://github.com/curl/curl/issues/4108 + +10.3 Paged searches on LDAP server + + https://github.com/curl/curl/issues/4452 + 11. SMB 11.1 File listing support @@ -735,17 +699,16 @@ that doesn't exist on the server, just like --ftp-create-dirs. 12. New protocols -12.1 RSYNC - - There's no RFC for the protocol or an URI/URL format. An implementation - should most probably use an existing rsync library, such as librsync. - 13. SSL -13.1 Disable specific versions +13.1 TLS-PSK with OpenSSL + + Transport Layer Security pre-shared key ciphersuites (TLS-PSK) is a set of + cryptographic protocols that provide secure communication based on pre-shared + keys (PSKs). These pre-shared keys are symmetric keys shared in advance among + the communicating parties. - Provide an option that allows for disabling specific SSL versions, such as - SSLv2 https://curl.haxx.se/bug/feature.cgi?id=1767276 + https://github.com/curl/curl/issues/5081 13.2 Provide mutex locking API @@ -811,16 +774,13 @@ that doesn't exist on the server, just like --ftp-create-dirs. Björn Stenberg wrote a separate initial take on DANE that was never completed. -13.9 Configurable loading of OpenSSL configuration file - - libcurl calls the OpenSSL function CONF_modules_load_file() in openssl.c, - Curl_ossl_init(). "We regard any changes in the OpenSSL configuration as a - security risk or at least as unnecessary." +13.9 TLS record padding - Please add a configuration switch or something similar to disable the - CONF_modules_load_file() call. + TLS (1.3) offers optional record padding and OpenSSL provides an API for it. + I could make sense for libcurl to offer this ability to applications to make + traffic patterns harder to figure out by network traffic observers. - See https://github.com/curl/curl/issues/2724 + See https://github.com/curl/curl/issues/5398 13.10 Support Authority Information Access certificate extension (AIA) @@ -837,37 +797,29 @@ that doesn't exist on the server, just like --ftp-create-dirs. CURLOPT_PINNEDPUBLICKEY does not consider the hashes of intermediate & root certificates when comparing the pinned keys. Therefore it is not compatible - with "HTTP Public Key Pinning" as there also intermediate and root certificates - can be pinned. This is very useful as it prevents webadmins from "locking - themself out of their servers". + with "HTTP Public Key Pinning" as there also intermediate and root + certificates can be pinned. This is very useful as it prevents webadmins from + "locking themself out of their servers". - Adding this feature would make curls pinning 100% compatible to HPKP and allow - more flexible pinning. + Adding this feature would make curls pinning 100% compatible to HPKP and + allow more flexible pinning. 13.12 Support HSTS "HTTP Strict Transport Security" is TOFU (trust on first use), time-based features indicated by a HTTP header send by the webserver. It is widely used - in browsers and it's purpose is to prevent insecure HTTP connections after - a previous HTTPS connection. It protects against SSLStripping attacks. + in browsers and it's purpose is to prevent insecure HTTP connections after a + previous HTTPS connection. It protects against SSLStripping attacks. Doc: https://developer.mozilla.org/en-US/docs/Web/Security/HTTP_strict_transport_security RFC 6797: https://tools.ietf.org/html/rfc6797 -13.13 Support HPKP +13.13 Make sure we forbid TLS 1.3 post-handshake authentication - "HTTP Public Key Pinning" is TOFU (trust on first use), time-based - features indicated by a HTTP header send by the webserver. It's purpose is - to prevent Man-in-the-middle attacks by trusted CAs by allowing webadmins - to specify which CAs/certificates/public keys to trust when connection to - their websites. + RFC 8740 explains how using HTTP/2 must forbid the use of TLS 1.3 + post-handshake authentication. We should make sure to live up to that. - It can be build based on PINNEDPUBLICKEY. - - Wikipedia: https://en.wikipedia.org/wiki/HTTP_Public_Key_Pinning - OWASP: https://www.owasp.org/index.php/Certificate_and_Public_Key_Pinning - Doc: https://developer.mozilla.org/de/docs/Web/Security/Public_Key_Pinning - RFC: https://tools.ietf.org/html/draft-ietf-websec-key-pinning-21 + See https://github.com/curl/curl/issues/5396 13.14 Support the clienthello extension @@ -881,40 +833,28 @@ that doesn't exist on the server, just like --ftp-create-dirs. 14. GnuTLS -14.1 SSL engine stuff - - Is this even possible? - 14.2 check connection Add a way to check if the connection seems to be alive, to correspond to the SSL_peak() way we use with OpenSSL. -15. WinSSL/SChannel +15. Schannel -15.1 Add support for client certificate authentication +15.1 Extend support for client certificate authentication - WinSSL/SChannel currently makes use of the OS-level system and user - certificate and private key stores. This does not allow the application - or the user to supply a custom client certificate using curl or libcurl. - - Therefore support for the existing -E/--cert and --key options should be - implemented by supplying a custom certificate to the SChannel APIs, see: + The existing support for the -E/--cert and --key options could be + extended by supplying a custom certificate and key in PEM format, see: - Getting a Certificate for Schannel https://msdn.microsoft.com/en-us/library/windows/desktop/aa375447.aspx -15.3 Add support for the --ciphers option - - The cipher suites used by WinSSL/SChannel are configured on an OS-level - instead of an application-level. This does not allow the application or - the user to customize the configured cipher suites using curl or libcurl. +15.2 Extend support for the --ciphers option - Therefore support for the existing --ciphers option should be implemented - by mapping the OpenSSL/GnuTLS cipher suites to the SChannel APIs, see + The existing support for the --ciphers option could be extended + by mapping the OpenSSL/GnuTLS cipher suites to the Schannel APIs, see - Specifying Schannel Ciphers and Cipher Strengths https://msdn.microsoft.com/en-us/library/windows/desktop/aa380161.aspx -15.4 Add option to disable client certificate auto-send +15.3 Add option to disable client certificate auto-send Microsoft says "By default, Schannel will, with no notification to the client, attempt to locate a client certificate and send it to the server." That could @@ -927,6 +867,15 @@ that doesn't exist on the server, just like --ftp-create-dirs. https://github.com/curl/curl/issues/2262 +15.4 Add option to allow abrupt server closure + + libcurl w/schannel will error without a known termination point from the + server (such as length of transfer, or SSL "close notify" alert) to prevent + against a truncation attack. Really old servers may neglect to send any + termination point. An option could be added to ignore such abrupt closures. + + https://github.com/curl/curl/issues/4427 + 16. SASL 16.1 Other authentication mechanisms @@ -959,10 +908,15 @@ that doesn't exist on the server, just like --ftp-create-dirs. To fix this, libcurl would have to detect an existing connection and "attach" the new transfer to the existing one. -17.2 SFTP performance +17.2 Handle growing SFTP files - libcurl's SFTP transfer performance is sub par and can be improved, mostly by - the approach mentioned in "1.6 Modified buffer size approach". + The SFTP code in libcurl checks the file size *before* a transfer starts and + then proceeds to transfer exactly that amount of data. If the remote file + grows while the transfer is in progress libcurl won't notice and will not + adapt. The OpenSSH SFTP command line tool does and libcurl could also just + attempt to download more to see if there is more to get... + + https://github.com/curl/curl/issues/4344 17.3 Support better than MD5 hostkey hash @@ -978,6 +932,13 @@ that doesn't exist on the server, just like --ftp-create-dirs. The two other QUOTE options are supported for SFTP, but this was left out for unknown reasons! +17.5 SSH over HTTPS proxy with more backends + + The SSH based protocols SFTP and SCP didn't work over HTTPS proxy at + all until PR https://github.com/curl/curl/pull/6021 brought the + functionality with the libssh2 backend. Presumably, this support + can/could be added for the other backends as well. + 18. Command line tool 18.1 sync @@ -1002,15 +963,15 @@ that doesn't exist on the server, just like --ftp-create-dirs. existing). So that index.html becomes first index.html.1 and then index.html.2 etc. -18.4 simultaneous parallel transfers +18.4 --proxycommand - The client could be told to use maximum N simultaneous parallel transfers and - then just make sure that happens. It should of course not make more than one - connection to the same remote host. This would require the client to use the - multi interface. https://curl.haxx.se/bug/feature.cgi?id=1558595 + Allow the user to make curl run a command and use its stdio to make requests + and not do any network connection by itself. Example: - Using the multi interface would also allow properly using parallel transfers - with HTTP/2 and supporting HTTP/2 server push from the command line. + curl --proxycommand 'ssh pi@raspberrypi.local -W 10.1.1.75 80' \ + http://some/otherwise/unavailable/service.php + + See https://github.com/curl/curl/issues/4941 18.5 UTF-8 filenames in Content-Disposition @@ -1019,24 +980,38 @@ that doesn't exist on the server, just like --ftp-create-dirs. https://github.com/curl/curl/issues/1888 -18.6 warning when setting an option +18.6 Option to make -Z merge lined based outputs on stdout + + When a user requests multiple lined based files using -Z and sends them to + stdout, curl will not "merge" and send complete lines fine but may very well + send partial lines from several sources. + + https://github.com/curl/curl/issues/5175 + +18.7 at least N milliseconds between requests + + Allow curl command lines issue a lot of request against services that limit + users to no more than N requests/second or similar. Could be implemented with + an option asking that at least a certain time has elapsed since the previous + request before the next one will be performed. Example: + + $ curl "https://example.com/api?input=[1-1000]" -d yadayada --after 500 + + See https://github.com/curl/curl/issues/3920 - Display a warning when libcurl returns an error when setting an option. - This can be useful to tell when support for a particular feature hasn't been - compiled into the library. +18.8 Consider convenience options for JSON and XML? -18.7 warning if curl version is not in sync with libcurl version + Could we add `--xml` or `--json` to add headers needed to call rest API: - This is usually a sign of a funny, weird or unexpected install situations - that aren't always quickly nor easily detected by users. curl and libcurl are - always released in sync and should use the same version numbers unless very - special situations. + `--xml` adds -H 'Content-Type: application/xml' -H "Accept: application/xml" and + `--json` adds -H 'Content-Type: application/json' -H "Accept: application/json" -18.8 offer color-coded HTTP header output + Setting Content-Type when doing a GET or any other method without a body + would be a bit strange I think - so maybe only add CT for requests with body? + Maybe plain `--xml` and ` --json` are a bit too brief and generic. Maybe + `--http-json` etc? - By offering different color output on the header name and the header - contents, they could be made more readable and thus help users working on - HTTP services. + See https://github.com/curl/curl/issues/5203 18.9 Choose the name of file in braces for complex URLs @@ -1075,29 +1050,22 @@ that doesn't exist on the server, just like --ftp-create-dirs. invoke can talk to the still running instance and ask for transfers to get done, and thus maintain its connection pool, DNS cache and more. -18.13 support metalink in http headers +18.13 Ratelimit or wait between serial requests - Curl has support for downloading a metalink xml file, processing it, and then - downloading the target of the metalink. This is done via the --metalink option. - It would be nice if metalink also supported downloading via metalink - information that is stored in HTTP headers (RFC 6249). Theoretically this could - also be supported with the --metalink option. + Consider a command line option that can make curl do multiple serial requests + slow, potentially with a (random) wait between transfers. There's also a + propsed set of standard HTTP headers to let servers let the client adapt to + its rate limits: + https://www.ietf.org/id/draft-polli-ratelimit-headers-02.html - See https://tools.ietf.org/html/rfc6249 + See https://github.com/curl/curl/issues/5406 - See also https://lists.gnu.org/archive/html/bug-wget/2015-06/msg00034.html for - an implematation of this in wget. +18.14 --dry-run -18.14 --fail without --location should treat 3xx as a failure + A command line option that makes curl show exactly what it would do and send + if it would run for real. - To allow a command line like this to detect a redirect and consider it a - failure: - - curl -v --fail -O https://example.com/curl-7.48.0.tar.gz - - ... --fail must treat 3xx responses as failures too. The least problematic - way to implement this is probably to add that new logic in the command line - tool only and not in the underlying CURLOPT_FAILONERROR logic. + See https://github.com/curl/curl/issues/5426 18.15 --retry should resume @@ -1161,6 +1129,45 @@ that doesn't exist on the server, just like --ftp-create-dirs. default .curlrc could a specific user-agent only when doing requests against a certain site. +18.21 retry on the redirected-to URL + + When curl is told to --retry a failed transfer and follows redirects, it + might get a HTTP 429 response from the redirected-to URL and not the original + one, which then could make curl decide to rather retry the transfer on that + URL only instead of the original operation to the original URL. + + Perhaps extra emphasized if the original transfer is a large POST that + redirects to a separate GET, and that GET is what gets the 529 + + See https://github.com/curl/curl/issues/5462 + +18.23 Set the modification date on an uploaded file + + For SFTP and posssibly FTP, curl could offer an option to set the + modification time for the uploaded file. + + See https://github.com/curl/curl/issues/5768 + +18.24 Use multiple parallel transfers for a single download + + To enhance transfer speed, downloading a single URL can be split up into + multiple separate range downloads that get combined into a single final + result. + + An ideal implementation would not use a specified number of parallel + transfers, but curl could: + - First start getting the full file as transfer A + - If after N seconds have passed and the transfer is expected to continue for + M seconds or more, add a new transfer (B) that asks for the second half of + A's content (and stop A at the middle). + - If splitting up the work improves the transfer rate, it could then be done + again. Then again, etc up to a limit. + + This way, if transfer B fails (because Range: isn't supported) it will let + transfer A remain the single one. N and M could be set to some sensible + defaults. + + See https://github.com/curl/curl/issues/5774 19. Build @@ -1181,13 +1188,15 @@ that doesn't exist on the server, just like --ftp-create-dirs. to no impact, neither on the performance nor on the general functionality of curl. -19.3 cmake test suite improvements - - The cmake build doesn't support 'make show' so it doesn't know which tests - are in the makefile or not (making appveyor builds do many false warnings - about it) nor does it support running the test suite if building out-of-tree. +19.3 Don't use GNU libtool on OpenBSD + When compiling curl on OpenBSD with "--enable-debug" it will give linking + errors when you use GNU libtool. This can be fixed by using the libtool + provided by OpenBSD itself. However for this the user always needs to invoke + make with "LIBTOOL=/usr/bin/libtool". It would be nice if the script could + have some magic to detect if this system is an OpenBSD host and then use the + OpenBSD libtool instead. - See https://github.com/curl/curl/issues/3109 + See https://github.com/curl/curl/issues/5862 20. Test suite @@ -1214,17 +1223,17 @@ that doesn't exist on the server, just like --ftp-create-dirs. 20.5 Add support for concurrent connections - Tests 836, 882 and 938 were designed to verify that separate connections aren't - used when using different login credentials in protocols that shouldn't re-use - a connection under such circumstances. + Tests 836, 882 and 938 were designed to verify that separate connections + aren't used when using different login credentials in protocols that + shouldn't re-use a connection under such circumstances. Unfortunately, ftpserver.pl doesn't appear to support multiple concurrent - connections. The read while() loop seems to loop until it receives a disconnect - from the client, where it then enters the waiting for connections loop. When - the client opens a second connection to the server, the first connection hasn't - been dropped (unless it has been forced - which we shouldn't do in these tests) - and thus the wait for connections loop is never entered to receive the second - connection. + connections. The read while() loop seems to loop until it receives a + disconnect from the client, where it then enters the waiting for connections + loop. When the client opens a second connection to the server, the first + connection hasn't been dropped (unless it has been forced - which we + shouldn't do in these tests) and thus the wait for connections loop is never + entered to receive the second connection. 20.6 Use the RFC6265 test suite @@ -1241,6 +1250,26 @@ that doesn't exist on the server, just like --ftp-create-dirs. properly. Look into making the preload support in runtests.pl portable such that it uses DYLD_INSERT_LIBRARIES on macOS. +20.8 Run web-platform-tests url tests + + Run web-platform-tests url tests and compare results with browsers on wpt.fyi + + It would help us find issues to fix and help us document where our parser + differs from the WHATWG URL spec parsers. + + See https://github.com/curl/curl/issues/4477 + +20.9 Use "random" ports for the test servers + + Instead of insisting and using fixed port numbers for the tests (even though + they can be changed with a switch), consider letting each server pick a + random available one at start-up, store that info in a file and let the test + suite use that. + + We could then remove the "check that it is our server that's running"-check + and we would immediately detect when we write tests wrongly to use hard-coded + port numbers. + 21. Next SONAME bump 21.1 http-style HEAD output for FTP diff --git a/docs/TheArtOfHttpScripting b/docs/TheArtOfHttpScripting deleted file mode 100644 index c5b67ca1b5f3ed..00000000000000 --- a/docs/TheArtOfHttpScripting +++ /dev/null @@ -1,758 +0,0 @@ - _ _ ____ _ - ___| | | | _ \| | - / __| | | | |_) | | - | (__| |_| | _ <| |___ - \___|\___/|_| \_\_____| - - -The Art Of Scripting HTTP Requests Using Curl - - 1. HTTP Scripting - 1.1 Background - 1.2 The HTTP Protocol - 1.3 See the Protocol - 1.4 See the Timing - 1.5 See the Response - 2. URL - 2.1 Spec - 2.2 Host - 2.3 Port number - 2.4 User name and password - 2.5 Path part - 3. Fetch a page - 3.1 GET - 3.2 HEAD - 3.3 Multiple URLs in a single command line - 3.4 Multiple HTTP methods in a single command line - 4. HTML forms - 4.1 Forms explained - 4.2 GET - 4.3 POST - 4.4 File Upload POST - 4.5 Hidden Fields - 4.6 Figure Out What A POST Looks Like - 5. HTTP upload - 5.1 PUT - 6. HTTP Authentication - 6.1 Basic Authentication - 6.2 Other Authentication - 6.3 Proxy Authentication - 6.4 Hiding credentials - 7. More HTTP Headers - 7.1 Referer - 7.2 User Agent - 8. Redirects - 8.1 Location header - 8.2 Other redirects - 9. Cookies - 9.1 Cookie Basics - 9.2 Cookie options - 10. HTTPS - 10.1 HTTPS is HTTP secure - 10.2 Certificates - 11. Custom Request Elements - 11.1 Modify method and headers - 11.2 More on changed methods - 12. Web Login - 12.1 Some login tricks - 13. Debug - 13.1 Some debug tricks - 14. References - 14.1 Standards - 14.2 Sites - -============================================================================== - -1. HTTP Scripting - - 1.1 Background - - This document assumes that you're familiar with HTML and general networking. - - The increasing amount of applications moving to the web has made "HTTP - Scripting" more frequently requested and wanted. To be able to automatically - extract information from the web, to fake users, to post or upload data to - web servers are all important tasks today. - - Curl is a command line tool for doing all sorts of URL manipulations and - transfers, but this particular document will focus on how to use it when - doing HTTP requests for fun and profit. I'll assume that you know how to - invoke 'curl --help' or 'curl --manual' to get basic information about it. - - Curl is not written to do everything for you. It makes the requests, it gets - the data, it sends data and it retrieves the information. You probably need - to glue everything together using some kind of script language or repeated - manual invokes. - - 1.2 The HTTP Protocol - - HTTP is the protocol used to fetch data from web servers. It is a very simple - protocol that is built upon TCP/IP. The protocol also allows information to - get sent to the server from the client using a few different methods, as will - be shown here. - - HTTP is plain ASCII text lines being sent by the client to a server to - request a particular action, and then the server replies a few text lines - before the actual requested content is sent to the client. - - The client, curl, sends a HTTP request. The request contains a method (like - GET, POST, HEAD etc), a number of request headers and sometimes a request - body. The HTTP server responds with a status line (indicating if things went - well), response headers and most often also a response body. The "body" part - is the plain data you requested, like the actual HTML or the image etc. - - 1.3 See the Protocol - - Using curl's option --verbose (-v as a short option) will display what kind - of commands curl sends to the server, as well as a few other informational - texts. - - --verbose is the single most useful option when it comes to debug or even - understand the curl<->server interaction. - - Sometimes even --verbose is not enough. Then --trace and --trace-ascii offer - even more details as they show EVERYTHING curl sends and receives. Use it - like this: - - curl --trace-ascii debugdump.txt http://www.example.com/ - - 1.4 See the Timing - - Many times you may wonder what exactly is taking all the time, or you just - want to know the amount of milliseconds between two points in a - transfer. For those, and other similar situations, the --trace-time option - is what you need. It'll prepend the time to each trace output line: - - curl --trace-ascii d.txt --trace-time http://example.com/ - - 1.5 See the Response - - By default curl sends the response to stdout. You need to redirect it - somewhere to avoid that, most often that is done with -o or -O. - -2. URL - - 2.1 Spec - - The Uniform Resource Locator format is how you specify the address of a - particular resource on the Internet. You know these, you've seen URLs like - https://curl.haxx.se or https://yourbank.com a million times. RFC 3986 is the - canonical spec. And yeah, the formal name is not URL, it is URI. - - 2.2 Host - - The host name is usually resolved using DNS or your /etc/hosts file to an IP - address and that's what curl will communicate with. Alternatively you specify - the IP address directly in the URL instead of a name. - - For development and other trying out situations, you can point to a different - IP address for a host name than what would otherwise be used, by using curl's - --resolve option: - - curl --resolve www.example.org:80:127.0.0.1 http://www.example.org/ - - 2.3 Port number - - Each protocol curl supports operates on a default port number, be it over TCP - or in some cases UDP. Normally you don't have to take that into - consideration, but at times you run test servers on other ports or - similar. Then you can specify the port number in the URL with a colon and a - number immediately following the host name. Like when doing HTTP to port - 1234: - - curl http://www.example.org:1234/ - - The port number you specify in the URL is the number that the server uses to - offer its services. Sometimes you may use a local proxy, and then you may - need to specify that proxy's port number separately for what curl needs to - connect to locally. Like when using a HTTP proxy on port 4321: - - curl --proxy http://proxy.example.org:4321 http://remote.example.org/ - - 2.4 User name and password - - Some services are setup to require HTTP authentication and then you need to - provide name and password which is then transferred to the remote site in - various ways depending on the exact authentication protocol used. - - You can opt to either insert the user and password in the URL or you can - provide them separately: - - curl http://user:password@example.org/ - - or - - curl -u user:password http://example.org/ - - You need to pay attention that this kind of HTTP authentication is not what - is usually done and requested by user-oriented web sites these days. They - tend to use forms and cookies instead. - - 2.5 Path part - - The path part is just sent off to the server to request that it sends back - the associated response. The path is what is to the right side of the slash - that follows the host name and possibly port number. - -3. Fetch a page - - 3.1 GET - - The simplest and most common request/operation made using HTTP is to GET a - URL. The URL could itself refer to a web page, an image or a file. The client - issues a GET request to the server and receives the document it asked for. - If you issue the command line - - curl https://curl.haxx.se - - you get a web page returned in your terminal window. The entire HTML document - that that URL holds. - - All HTTP replies contain a set of response headers that are normally hidden, - use curl's --include (-i) option to display them as well as the rest of the - document. - - 3.2 HEAD - - You can ask the remote server for ONLY the headers by using the --head (-I) - option which will make curl issue a HEAD request. In some special cases - servers deny the HEAD method while others still work, which is a particular - kind of annoyance. - - The HEAD method is defined and made so that the server returns the headers - exactly the way it would do for a GET, but without a body. It means that you - may see a Content-Length: in the response headers, but there must not be an - actual body in the HEAD response. - - 3.3 Multiple URLs in a single command line - - A single curl command line may involve one or many URLs. The most common case - is probably to just use one, but you can specify any amount of URLs. Yes - any. No limits. You'll then get requests repeated over and over for all the - given URLs. - - Example, send two GETs: - - curl http://url1.example.com http://url2.example.com - - If you use --data to POST to the URL, using multiple URLs means that you send - that same POST to all the given URLs. - - Example, send two POSTs: - - curl --data name=curl http://url1.example.com http://url2.example.com - - - 3.4 Multiple HTTP methods in a single command line - - Sometimes you need to operate on several URLs in a single command line and do - different HTTP methods on each. For this, you'll enjoy the --next option. It - is basically a separator that separates a bunch of options from the next. All - the URLs before --next will get the same method and will get all the POST - data merged into one. - - When curl reaches the --next on the command line, it'll sort of reset the - method and the POST data and allow a new set. - - Perhaps this is best shown with a few examples. To send first a HEAD and then - a GET: - - curl -I http://example.com --next http://example.com - - To first send a POST and then a GET: - - curl -d score=10 http://example.com/post.cgi --next http://example.com/results.html - - -4. HTML forms - - 4.1 Forms explained - - Forms are the general way a web site can present a HTML page with fields for - the user to enter data in, and then press some kind of 'OK' or 'Submit' - button to get that data sent to the server. The server then typically uses - the posted data to decide how to act. Like using the entered words to search - in a database, or to add the info in a bug tracking system, display the entered - address on a map or using the info as a login-prompt verifying that the user - is allowed to see what it is about to see. - - Of course there has to be some kind of program on the server end to receive - the data you send. You cannot just invent something out of the air. - - 4.2 GET - - A GET-form uses the method GET, as specified in HTML like: - -
- - -
- - In your favorite browser, this form will appear with a text box to fill in - and a press-button labeled "OK". If you fill in '1905' and press the OK - button, your browser will then create a new URL to get for you. The URL will - get "junk.cgi?birthyear=1905&press=OK" appended to the path part of the - previous URL. - - If the original form was seen on the page "www.hotmail.com/when/birth.html", - the second page you'll get will become - "www.hotmail.com/when/junk.cgi?birthyear=1905&press=OK". - - Most search engines work this way. - - To make curl do the GET form post for you, just enter the expected created - URL: - - curl "http://www.hotmail.com/when/junk.cgi?birthyear=1905&press=OK" - - 4.3 POST - - The GET method makes all input field names get displayed in the URL field of - your browser. That's generally a good thing when you want to be able to - bookmark that page with your given data, but it is an obvious disadvantage - if you entered secret information in one of the fields or if there are a - large amount of fields creating a very long and unreadable URL. - - The HTTP protocol then offers the POST method. This way the client sends the - data separated from the URL and thus you won't see any of it in the URL - address field. - - The form would look very similar to the previous one: - -
- - -
- - And to use curl to post this form with the same data filled in as before, we - could do it like: - - curl --data "birthyear=1905&press=%20OK%20" \ - http://www.example.com/when.cgi - - This kind of POST will use the Content-Type - application/x-www-form-urlencoded and is the most widely used POST kind. - - The data you send to the server MUST already be properly encoded, curl will - not do that for you. For example, if you want the data to contain a space, - you need to replace that space with %20 etc. Failing to comply with this - will most likely cause your data to be received wrongly and messed up. - - Recent curl versions can in fact url-encode POST data for you, like this: - - curl --data-urlencode "name=I am Daniel" http://www.example.com - - If you repeat --data several times on the command line, curl will - concatenate all the given data pieces - and put a '&' symbol between each - data segment. - - 4.4 File Upload POST - - Back in late 1995 they defined an additional way to post data over HTTP. It - is documented in the RFC 1867, why this method sometimes is referred to as - RFC1867-posting. - - This method is mainly designed to better support file uploads. A form that - allows a user to upload a file could be written like this in HTML: - -
- - -
- - This clearly shows that the Content-Type about to be sent is - multipart/form-data. - - To post to a form like this with curl, you enter a command line like: - - curl --form upload=@localfilename --form press=OK [URL] - - 4.5 Hidden Fields - - A very common way for HTML based applications to pass state information - between pages is to add hidden fields to the forms. Hidden fields are - already filled in, they aren't displayed to the user and they get passed - along just as all the other fields. - - A similar example form with one visible field, one hidden field and one - submit button could look like: - -
- - - -
- - To POST this with curl, you won't have to think about if the fields are - hidden or not. To curl they're all the same: - - curl --data "birthyear=1905&press=OK&person=daniel" [URL] - - 4.6 Figure Out What A POST Looks Like - - When you're about fill in a form and send to a server by using curl instead - of a browser, you're of course very interested in sending a POST exactly the - way your browser does. - - An easy way to get to see this, is to save the HTML page with the form on - your local disk, modify the 'method' to a GET, and press the submit button - (you could also change the action URL if you want to). - - You will then clearly see the data get appended to the URL, separated with a - '?'-letter as GET forms are supposed to. - -5. HTTP upload - - 5.1 PUT - - Perhaps the best way to upload data to a HTTP server is to use PUT. Then - again, this of course requires that someone put a program or script on the - server end that knows how to receive a HTTP PUT stream. - - Put a file to a HTTP server with curl: - - curl --upload-file uploadfile http://www.example.com/receive.cgi - -6. HTTP Authentication - - 6.1 Basic Authentication - - HTTP Authentication is the ability to tell the server your username and - password so that it can verify that you're allowed to do the request you're - doing. The Basic authentication used in HTTP (which is the type curl uses by - default) is *plain* *text* based, which means it sends username and password - only slightly obfuscated, but still fully readable by anyone that sniffs on - the network between you and the remote server. - - To tell curl to use a user and password for authentication: - - curl --user name:password http://www.example.com - - 6.2 Other Authentication - - The site might require a different authentication method (check the headers - returned by the server), and then --ntlm, --digest, --negotiate or even - --anyauth might be options that suit you. - - 6.3 Proxy Authentication - - Sometimes your HTTP access is only available through the use of a HTTP - proxy. This seems to be especially common at various companies. A HTTP proxy - may require its own user and password to allow the client to get through to - the Internet. To specify those with curl, run something like: - - curl --proxy-user proxyuser:proxypassword curl.haxx.se - - If your proxy requires the authentication to be done using the NTLM method, - use --proxy-ntlm, if it requires Digest use --proxy-digest. - - If you use any one of these user+password options but leave out the password - part, curl will prompt for the password interactively. - - 6.4 Hiding credentials - - Do note that when a program is run, its parameters might be possible to see - when listing the running processes of the system. Thus, other users may be - able to watch your passwords if you pass them as plain command line - options. There are ways to circumvent this. - - It is worth noting that while this is how HTTP Authentication works, very - many web sites will not use this concept when they provide logins etc. See - the Web Login chapter further below for more details on that. - -7. More HTTP Headers - - 7.1 Referer - - A HTTP request may include a 'referer' field (yes it is misspelled), which - can be used to tell from which URL the client got to this particular - resource. Some programs/scripts check the referer field of requests to verify - that this wasn't arriving from an external site or an unknown page. While - this is a stupid way to check something so easily forged, many scripts still - do it. Using curl, you can put anything you want in the referer-field and - thus more easily be able to fool the server into serving your request. - - Use curl to set the referer field with: - - curl --referer http://www.example.come http://www.example.com - - 7.2 User Agent - - Very similar to the referer field, all HTTP requests may set the User-Agent - field. It names what user agent (client) that is being used. Many - applications use this information to decide how to display pages. Silly web - programmers try to make different pages for users of different browsers to - make them look the best possible for their particular browsers. They usually - also do different kinds of javascript, vbscript etc. - - At times, you will see that getting a page with curl will not return the same - page that you see when getting the page with your browser. Then you know it - is time to set the User Agent field to fool the server into thinking you're - one of those browsers. - - To make curl look like Internet Explorer 5 on a Windows 2000 box: - - curl --user-agent "Mozilla/4.0 (compatible; MSIE 5.01; Windows NT 5.0)" [URL] - - Or why not look like you're using Netscape 4.73 on an old Linux box: - - curl --user-agent "Mozilla/4.73 [en] (X11; U; Linux 2.2.15 i686)" [URL] - -8. Redirects - - 8.1 Location header - - When a resource is requested from a server, the reply from the server may - include a hint about where the browser should go next to find this page, or a - new page keeping newly generated output. The header that tells the browser - to redirect is Location:. - - Curl does not follow Location: headers by default, but will simply display - such pages in the same manner it displays all HTTP replies. It does however - feature an option that will make it attempt to follow the Location: pointers. - - To tell curl to follow a Location: - - curl --location http://www.example.com - - If you use curl to POST to a site that immediately redirects you to another - page, you can safely use --location (-L) and --data/--form together. Curl will - only use POST in the first request, and then revert to GET in the following - operations. - - 8.2 Other redirects - - Browser typically support at least two other ways of redirects that curl - doesn't: first the html may contain a meta refresh tag that asks the browser - to load a specific URL after a set number of seconds, or it may use - javascript to do it. - -9. Cookies - - 9.1 Cookie Basics - - The way the web browsers do "client side state control" is by using - cookies. Cookies are just names with associated contents. The cookies are - sent to the client by the server. The server tells the client for what path - and host name it wants the cookie sent back, and it also sends an expiration - date and a few more properties. - - When a client communicates with a server with a name and path as previously - specified in a received cookie, the client sends back the cookies and their - contents to the server, unless of course they are expired. - - Many applications and servers use this method to connect a series of requests - into a single logical session. To be able to use curl in such occasions, we - must be able to record and send back cookies the way the web application - expects them. The same way browsers deal with them. - - 9.2 Cookie options - - The simplest way to send a few cookies to the server when getting a page with - curl is to add them on the command line like: - - curl --cookie "name=Daniel" http://www.example.com - - Cookies are sent as common HTTP headers. This is practical as it allows curl - to record cookies simply by recording headers. Record cookies with curl by - using the --dump-header (-D) option like: - - curl --dump-header headers_and_cookies http://www.example.com - - (Take note that the --cookie-jar option described below is a better way to - store cookies.) - - Curl has a full blown cookie parsing engine built-in that comes in use if you - want to reconnect to a server and use cookies that were stored from a - previous connection (or hand-crafted manually to fool the server into - believing you had a previous connection). To use previously stored cookies, - you run curl like: - - curl --cookie stored_cookies_in_file http://www.example.com - - Curl's "cookie engine" gets enabled when you use the --cookie option. If you - only want curl to understand received cookies, use --cookie with a file that - doesn't exist. Example, if you want to let curl understand cookies from a - page and follow a location (and thus possibly send back cookies it received), - you can invoke it like: - - curl --cookie nada --location http://www.example.com - - Curl has the ability to read and write cookie files that use the same file - format that Netscape and Mozilla once used. It is a convenient way to share - cookies between scripts or invokes. The --cookie (-b) switch automatically - detects if a given file is such a cookie file and parses it, and by using the - --cookie-jar (-c) option you'll make curl write a new cookie file at the end - of an operation: - - curl --cookie cookies.txt --cookie-jar newcookies.txt \ - http://www.example.com - -10. HTTPS - - 10.1 HTTPS is HTTP secure - - There are a few ways to do secure HTTP transfers. By far the most common - protocol for doing this is what is generally known as HTTPS, HTTP over - SSL. SSL encrypts all the data that is sent and received over the network and - thus makes it harder for attackers to spy on sensitive information. - - SSL (or TLS as the latest version of the standard is called) offers a - truckload of advanced features to allow all those encryptions and key - infrastructure mechanisms encrypted HTTP requires. - - Curl supports encrypted fetches when built to use a TLS library and it can be - built to use one out of a fairly large set of libraries - "curl -V" will show - which one your curl was built to use (if any!). To get a page from a HTTPS - server, simply run curl like: - - curl https://secure.example.com - - 10.2 Certificates - - In the HTTPS world, you use certificates to validate that you are the one - you claim to be, as an addition to normal passwords. Curl supports client- - side certificates. All certificates are locked with a pass phrase, which you - need to enter before the certificate can be used by curl. The pass phrase - can be specified on the command line or if not, entered interactively when - curl queries for it. Use a certificate with curl on a HTTPS server like: - - curl --cert mycert.pem https://secure.example.com - - curl also tries to verify that the server is who it claims to be, by - verifying the server's certificate against a locally stored CA cert - bundle. Failing the verification will cause curl to deny the connection. You - must then use --insecure (-k) in case you want to tell curl to ignore that - the server can't be verified. - - More about server certificate verification and ca cert bundles can be read - in the SSLCERTS document, available online here: - - https://curl.haxx.se/docs/sslcerts.html - - At times you may end up with your own CA cert store and then you can tell - curl to use that to verify the server's certificate: - - curl --cacert ca-bundle.pem https://example.com/ - - -11. Custom Request Elements - -11.1 Modify method and headers - - Doing fancy stuff, you may need to add or change elements of a single curl - request. - - For example, you can change the POST request to a PROPFIND and send the data - as "Content-Type: text/xml" (instead of the default Content-Type) like this: - - curl --data "" --header "Content-Type: text/xml" \ - --request PROPFIND url.com - - You can delete a default header by providing one without content. Like you - can ruin the request by chopping off the Host: header: - - curl --header "Host:" http://www.example.com - - You can add headers the same way. Your server may want a "Destination:" - header, and you can add it: - - curl --header "Destination: http://nowhere" http://example.com - - 11.2 More on changed methods - - It should be noted that curl selects which methods to use on its own - depending on what action to ask for. -d will do POST, -I will do HEAD and so - on. If you use the --request / -X option you can change the method keyword - curl selects, but you will not modify curl's behavior. This means that if you - for example use -d "data" to do a POST, you can modify the method to a - PROPFIND with -X and curl will still think it sends a POST. You can change - the normal GET to a POST method by simply adding -X POST in a command line - like: - - curl -X POST http://example.org/ - - ... but curl will still think and act as if it sent a GET so it won't send any - request body etc. - - -12. Web Login - - 12.1 Some login tricks - - While not strictly just HTTP related, it still causes a lot of people problems - so here's the executive run-down of how the vast majority of all login forms - work and how to login to them using curl. - - It can also be noted that to do this properly in an automated fashion, you - will most certainly need to script things and do multiple curl invokes etc. - - First, servers mostly use cookies to track the logged-in status of the - client, so you will need to capture the cookies you receive in the - responses. Then, many sites also set a special cookie on the login page (to - make sure you got there through their login page) so you should make a habit - of first getting the login-form page to capture the cookies set there. - - Some web-based login systems feature various amounts of javascript, and - sometimes they use such code to set or modify cookie contents. Possibly they - do that to prevent programmed logins, like this manual describes how to... - Anyway, if reading the code isn't enough to let you repeat the behavior - manually, capturing the HTTP requests done by your browsers and analyzing the - sent cookies is usually a working method to work out how to shortcut the - javascript need. - - In the actual
tag for the login, lots of sites fill-in random/session - or otherwise secretly generated hidden tags and you may need to first capture - the HTML code for the login form and extract all the hidden fields to be able - to do a proper login POST. Remember that the contents need to be URL encoded - when sent in a normal POST. - -13. Debug - - 13.1 Some debug tricks - - Many times when you run curl on a site, you'll notice that the site doesn't - seem to respond the same way to your curl requests as it does to your - browser's. - - Then you need to start making your curl requests more similar to your - browser's requests: - - * Use the --trace-ascii option to store fully detailed logs of the requests - for easier analyzing and better understanding - - * Make sure you check for and use cookies when needed (both reading with - --cookie and writing with --cookie-jar) - - * Set user-agent to one like a recent popular browser does - - * Set referer like it is set by the browser - - * If you use POST, make sure you send all the fields and in the same order as - the browser does it. - - A very good helper to make sure you do this right, is the LiveHTTPHeader tool - that lets you view all headers you send and receive with Mozilla/Firefox - (even when using HTTPS). Chrome features similar functionality out of the box - among the developer's tools. - - A more raw approach is to capture the HTTP traffic on the network with tools - such as ethereal or tcpdump and check what headers that were sent and - received by the browser. (HTTPS makes this technique inefficient.) - -14. References - - 14.1 Standards - - RFC 7230 is a must to read if you want in-depth understanding of the HTTP - protocol - - RFC 3986 explains the URL syntax - - RFC 1867 defines the HTTP post upload format - - RFC 6525 defines how HTTP cookies work - - 14.2 Sites - - https://curl.haxx.se is the home of the curl project diff --git a/docs/TheArtOfHttpScripting.md b/docs/TheArtOfHttpScripting.md new file mode 100644 index 00000000000000..8de00f0a9d8683 --- /dev/null +++ b/docs/TheArtOfHttpScripting.md @@ -0,0 +1,692 @@ +# The Art Of Scripting HTTP Requests Using Curl + +## Background + + This document assumes that you're familiar with HTML and general networking. + + The increasing amount of applications moving to the web has made "HTTP + Scripting" more frequently requested and wanted. To be able to automatically + extract information from the web, to fake users, to post or upload data to + web servers are all important tasks today. + + Curl is a command line tool for doing all sorts of URL manipulations and + transfers, but this particular document will focus on how to use it when + doing HTTP requests for fun and profit. I willl assume that you know how to + invoke `curl --help` or `curl --manual` to get basic information about it. + + Curl is not written to do everything for you. It makes the requests, it gets + the data, it sends data and it retrieves the information. You probably need + to glue everything together using some kind of script language or repeated + manual invokes. + +## The HTTP Protocol + + HTTP is the protocol used to fetch data from web servers. It is a very simple + protocol that is built upon TCP/IP. The protocol also allows information to + get sent to the server from the client using a few different methods, as will + be shown here. + + HTTP is plain ASCII text lines being sent by the client to a server to + request a particular action, and then the server replies a few text lines + before the actual requested content is sent to the client. + + The client, curl, sends a HTTP request. The request contains a method (like + GET, POST, HEAD etc), a number of request headers and sometimes a request + body. The HTTP server responds with a status line (indicating if things went + well), response headers and most often also a response body. The "body" part + is the plain data you requested, like the actual HTML or the image etc. + +## See the Protocol + + Using curl's option [`--verbose`](https://curl.haxx.se/docs/manpage.html#-v) + (`-v` as a short option) will display what kind of commands curl sends to the + server, as well as a few other informational texts. + + `--verbose` is the single most useful option when it comes to debug or even + understand the curl<->server interaction. + + Sometimes even `--verbose` is not enough. Then + [`--trace`](https://curl.haxx.se/docs/manpage.html#-trace) and + [`--trace-ascii`]((https://curl.haxx.se/docs/manpage.html#--trace-ascii) + offer even more details as they show **everything** curl sends and + receives. Use it like this: + + curl --trace-ascii debugdump.txt http://www.example.com/ + +## See the Timing + + Many times you may wonder what exactly is taking all the time, or you just + want to know the amount of milliseconds between two points in a transfer. For + those, and other similar situations, the + [`--trace-time`]((https://curl.haxx.se/docs/manpage.html#--trace-time) option + is what you need. It'll prepend the time to each trace output line: + + curl --trace-ascii d.txt --trace-time http://example.com/ + +## See the Response + + By default curl sends the response to stdout. You need to redirect it + somewhere to avoid that, most often that is done with ` -o` or `-O`. + +# URL + +## Spec + + The Uniform Resource Locator format is how you specify the address of a + particular resource on the Internet. You know these, you've seen URLs like + https://curl.haxx.se or https://yourbank.com a million times. RFC 3986 is the + canonical spec. And yeah, the formal name is not URL, it is URI. + +## Host + + The host name is usually resolved using DNS or your /etc/hosts file to an IP + address and that's what curl will communicate with. Alternatively you specify + the IP address directly in the URL instead of a name. + + For development and other trying out situations, you can point to a different + IP address for a host name than what would otherwise be used, by using curl's + [`--resolve`](https://curl.haxx.se/docs/manpage.html#--resolve) option: + + curl --resolve www.example.org:80:127.0.0.1 http://www.example.org/ + +## Port number + + Each protocol curl supports operates on a default port number, be it over TCP + or in some cases UDP. Normally you don't have to take that into + consideration, but at times you run test servers on other ports or + similar. Then you can specify the port number in the URL with a colon and a + number immediately following the host name. Like when doing HTTP to port + 1234: + + curl http://www.example.org:1234/ + + The port number you specify in the URL is the number that the server uses to + offer its services. Sometimes you may use a local proxy, and then you may + need to specify that proxy's port number separately for what curl needs to + connect to locally. Like when using a HTTP proxy on port 4321: + + curl --proxy http://proxy.example.org:4321 http://remote.example.org/ + +## User name and password + + Some services are setup to require HTTP authentication and then you need to + provide name and password which is then transferred to the remote site in + various ways depending on the exact authentication protocol used. + + You can opt to either insert the user and password in the URL or you can + provide them separately: + + curl http://user:password@example.org/ + + or + + curl -u user:password http://example.org/ + + You need to pay attention that this kind of HTTP authentication is not what + is usually done and requested by user-oriented websites these days. They tend + to use forms and cookies instead. + +## Path part + + The path part is just sent off to the server to request that it sends back + the associated response. The path is what is to the right side of the slash + that follows the host name and possibly port number. + +# Fetch a page + +## GET + + The simplest and most common request/operation made using HTTP is to GET a + URL. The URL could itself refer to a web page, an image or a file. The client + issues a GET request to the server and receives the document it asked for. + If you issue the command line + + curl https://curl.haxx.se + + you get a web page returned in your terminal window. The entire HTML document + that that URL holds. + + All HTTP replies contain a set of response headers that are normally hidden, + use curl's [`--include`](https://curl.haxx.se/docs/manpage.html#-i) (`-i`) + option to display them as well as the rest of the document. + +## HEAD + + You can ask the remote server for ONLY the headers by using the + [`--head`](https://curl.haxx.se/docs/manpage.html#-I) (`-I`) option which + will make curl issue a HEAD request. In some special cases servers deny the + HEAD method while others still work, which is a particular kind of annoyance. + + The HEAD method is defined and made so that the server returns the headers + exactly the way it would do for a GET, but without a body. It means that you + may see a `Content-Length:` in the response headers, but there must not be an + actual body in the HEAD response. + +## Multiple URLs in a single command line + + A single curl command line may involve one or many URLs. The most common case + is probably to just use one, but you can specify any amount of URLs. Yes + any. No limits. You'll then get requests repeated over and over for all the + given URLs. + + Example, send two GETs: + + curl http://url1.example.com http://url2.example.com + + If you use [`--data`](https://curl.haxx.se/docs/manpage.html#-d) to POST to + the URL, using multiple URLs means that you send that same POST to all the + given URLs. + + Example, send two POSTs: + + curl --data name=curl http://url1.example.com http://url2.example.com + + +## Multiple HTTP methods in a single command line + + Sometimes you need to operate on several URLs in a single command line and do + different HTTP methods on each. For this, you'll enjoy the + [`--next`](https://curl.haxx.se/docs/manpage.html#-:) option. It is basically + a separator that separates a bunch of options from the next. All the URLs + before `--next` will get the same method and will get all the POST data + merged into one. + + When curl reaches the `--next` on the command line, it'll sort of reset the + method and the POST data and allow a new set. + + Perhaps this is best shown with a few examples. To send first a HEAD and then + a GET: + + curl -I http://example.com --next http://example.com + + To first send a POST and then a GET: + + curl -d score=10 http://example.com/post.cgi --next http://example.com/results.html + +# HTML forms + +## Forms explained + + Forms are the general way a website can present a HTML page with fields for + the user to enter data in, and then press some kind of 'OK' or 'Submit' + button to get that data sent to the server. The server then typically uses + the posted data to decide how to act. Like using the entered words to search + in a database, or to add the info in a bug tracking system, display the + entered address on a map or using the info as a login-prompt verifying that + the user is allowed to see what it is about to see. + + Of course there has to be some kind of program on the server end to receive + the data you send. You cannot just invent something out of the air. + +## GET + + A GET-form uses the method GET, as specified in HTML like: + + + + +
+ + In your favorite browser, this form will appear with a text box to fill in + and a press-button labeled "OK". If you fill in '1905' and press the OK + button, your browser will then create a new URL to get for you. The URL will + get `junk.cgi?birthyear=1905&press=OK` appended to the path part of the + previous URL. + + If the original form was seen on the page `www.example.com/when/birth.html`, + the second page you'll get will become + `www.example.com/when/junk.cgi?birthyear=1905&press=OK`. + + Most search engines work this way. + + To make curl do the GET form post for you, just enter the expected created + URL: + + curl "http://www.example.com/when/junk.cgi?birthyear=1905&press=OK" + +## POST + + The GET method makes all input field names get displayed in the URL field of + your browser. That's generally a good thing when you want to be able to + bookmark that page with your given data, but it is an obvious disadvantage if + you entered secret information in one of the fields or if there are a large + amount of fields creating a very long and unreadable URL. + + The HTTP protocol then offers the POST method. This way the client sends the + data separated from the URL and thus you won't see any of it in the URL + address field. + + The form would look very similar to the previous one: + +
+ + +
+ + And to use curl to post this form with the same data filled in as before, we + could do it like: + + curl --data "birthyear=1905&press=%20OK%20" http://www.example.com/when.cgi + + This kind of POST will use the Content-Type + `application/x-www-form-urlencoded' and is the most widely used POST kind. + + The data you send to the server MUST already be properly encoded, curl will + not do that for you. For example, if you want the data to contain a space, + you need to replace that space with %20 etc. Failing to comply with this will + most likely cause your data to be received wrongly and messed up. + + Recent curl versions can in fact url-encode POST data for you, like this: + + curl --data-urlencode "name=I am Daniel" http://www.example.com + + If you repeat `--data` several times on the command line, curl will + concatenate all the given data pieces - and put a `&` symbol between each + data segment. + +## File Upload POST + + Back in late 1995 they defined an additional way to post data over HTTP. It + is documented in the RFC 1867, why this method sometimes is referred to as + RFC1867-posting. + + This method is mainly designed to better support file uploads. A form that + allows a user to upload a file could be written like this in HTML: + +
+ + +
+ + This clearly shows that the Content-Type about to be sent is + `multipart/form-data`. + + To post to a form like this with curl, you enter a command line like: + + curl --form upload=@localfilename --form press=OK [URL] + +## Hidden Fields + + A very common way for HTML based applications to pass state information + between pages is to add hidden fields to the forms. Hidden fields are already + filled in, they aren't displayed to the user and they get passed along just + as all the other fields. + + A similar example form with one visible field, one hidden field and one + submit button could look like: + +
+ + + +
+ + To POST this with curl, you won't have to think about if the fields are + hidden or not. To curl they're all the same: + + curl --data "birthyear=1905&press=OK&person=daniel" [URL] + +## Figure Out What A POST Looks Like + + When you're about fill in a form and send to a server by using curl instead + of a browser, you're of course very interested in sending a POST exactly the + way your browser does. + + An easy way to get to see this, is to save the HTML page with the form on + your local disk, modify the 'method' to a GET, and press the submit button + (you could also change the action URL if you want to). + + You will then clearly see the data get appended to the URL, separated with a + `?`-letter as GET forms are supposed to. + +# HTTP upload + +## PUT + + Perhaps the best way to upload data to a HTTP server is to use PUT. Then + again, this of course requires that someone put a program or script on the + server end that knows how to receive a HTTP PUT stream. + + Put a file to a HTTP server with curl: + + curl --upload-file uploadfile http://www.example.com/receive.cgi + +# HTTP Authentication + +## Basic Authentication + + HTTP Authentication is the ability to tell the server your username and + password so that it can verify that you're allowed to do the request you're + doing. The Basic authentication used in HTTP (which is the type curl uses by + default) is **plain text** based, which means it sends username and password + only slightly obfuscated, but still fully readable by anyone that sniffs on + the network between you and the remote server. + + To tell curl to use a user and password for authentication: + + curl --user name:password http://www.example.com + +## Other Authentication + + The site might require a different authentication method (check the headers + returned by the server), and then + [`--ntlm`](https://curl.haxx.se/docs/manpage.html#--ntlm), + [`--digest`](https://curl.haxx.se/docs/manpage.html#--digest), + [`--negotiate`](https://curl.haxx.se/docs/manpage.html#--negotiate) or even + [`--anyauth`](https://curl.haxx.se/docs/manpage.html#--anyauth) might be + options that suit you. + +## Proxy Authentication + + Sometimes your HTTP access is only available through the use of a HTTP + proxy. This seems to be especially common at various companies. A HTTP proxy + may require its own user and password to allow the client to get through to + the Internet. To specify those with curl, run something like: + + curl --proxy-user proxyuser:proxypassword curl.haxx.se + + If your proxy requires the authentication to be done using the NTLM method, + use [`--proxy-ntlm`](https://curl.haxx.se/docs/manpage.html#--proxy-ntlm), if + it requires Digest use + [`--proxy-digest`](https://curl.haxx.se/docs/manpage.html#--proxy-digest). + + If you use any one of these user+password options but leave out the password + part, curl will prompt for the password interactively. + +## Hiding credentials + + Do note that when a program is run, its parameters might be possible to see + when listing the running processes of the system. Thus, other users may be + able to watch your passwords if you pass them as plain command line + options. There are ways to circumvent this. + + It is worth noting that while this is how HTTP Authentication works, very + many websites will not use this concept when they provide logins etc. See the + Web Login chapter further below for more details on that. + +# More HTTP Headers + +## Referer + + A HTTP request may include a 'referer' field (yes it is misspelled), which + can be used to tell from which URL the client got to this particular + resource. Some programs/scripts check the referer field of requests to verify + that this wasn't arriving from an external site or an unknown page. While + this is a stupid way to check something so easily forged, many scripts still + do it. Using curl, you can put anything you want in the referer-field and + thus more easily be able to fool the server into serving your request. + + Use curl to set the referer field with: + + curl --referer http://www.example.come http://www.example.com + +## User Agent + + Very similar to the referer field, all HTTP requests may set the User-Agent + field. It names what user agent (client) that is being used. Many + applications use this information to decide how to display pages. Silly web + programmers try to make different pages for users of different browsers to + make them look the best possible for their particular browsers. They usually + also do different kinds of javascript, vbscript etc. + + At times, you will see that getting a page with curl will not return the same + page that you see when getting the page with your browser. Then you know it + is time to set the User Agent field to fool the server into thinking you're + one of those browsers. + + To make curl look like Internet Explorer 5 on a Windows 2000 box: + + curl --user-agent "Mozilla/4.0 (compatible; MSIE 5.01; Windows NT 5.0)" [URL] + + Or why not look like you're using Netscape 4.73 on an old Linux box: + + curl --user-agent "Mozilla/4.73 [en] (X11; U; Linux 2.2.15 i686)" [URL] + +## Redirects + +## Location header + + When a resource is requested from a server, the reply from the server may + include a hint about where the browser should go next to find this page, or a + new page keeping newly generated output. The header that tells the browser to + redirect is `Location:`. + + Curl does not follow `Location:` headers by default, but will simply display + such pages in the same manner it displays all HTTP replies. It does however + feature an option that will make it attempt to follow the `Location:` + pointers. + + To tell curl to follow a Location: + + curl --location http://www.example.com + + If you use curl to POST to a site that immediately redirects you to another + page, you can safely use + [`--location`](https://curl.haxx.se/docs/manpage.html#-L) (`-L`) and + `--data`/`--form` together. curl will only use POST in the first request, and + then revert to GET in the following operations. + +## Other redirects + + Browser typically support at least two other ways of redirects that curl + doesn't: first the html may contain a meta refresh tag that asks the browser + to load a specific URL after a set number of seconds, or it may use + javascript to do it. + +# Cookies + +## Cookie Basics + + The way the web browsers do "client side state control" is by using + cookies. Cookies are just names with associated contents. The cookies are + sent to the client by the server. The server tells the client for what path + and host name it wants the cookie sent back, and it also sends an expiration + date and a few more properties. + + When a client communicates with a server with a name and path as previously + specified in a received cookie, the client sends back the cookies and their + contents to the server, unless of course they are expired. + + Many applications and servers use this method to connect a series of requests + into a single logical session. To be able to use curl in such occasions, we + must be able to record and send back cookies the way the web application + expects them. The same way browsers deal with them. + +## Cookie options + + The simplest way to send a few cookies to the server when getting a page with + curl is to add them on the command line like: + + curl --cookie "name=Daniel" http://www.example.com + + Cookies are sent as common HTTP headers. This is practical as it allows curl + to record cookies simply by recording headers. Record cookies with curl by + using the [`--dump-header`](https://curl.haxx.se/docs/manpage.html#-D) (`-D`) + option like: + + curl --dump-header headers_and_cookies http://www.example.com + + (Take note that the + [`--cookie-jar`](https://curl.haxx.se/docs/manpage.html#-c) option described + below is a better way to store cookies.) + + Curl has a full blown cookie parsing engine built-in that comes in use if you + want to reconnect to a server and use cookies that were stored from a + previous connection (or hand-crafted manually to fool the server into + believing you had a previous connection). To use previously stored cookies, + you run curl like: + + curl --cookie stored_cookies_in_file http://www.example.com + + Curl's "cookie engine" gets enabled when you use the + [`--cookie`](https://curl.haxx.se/docs/manpage.html#-b) option. If you only + want curl to understand received cookies, use `--cookie` with a file that + doesn't exist. Example, if you want to let curl understand cookies from a + page and follow a location (and thus possibly send back cookies it received), + you can invoke it like: + + curl --cookie nada --location http://www.example.com + + Curl has the ability to read and write cookie files that use the same file + format that Netscape and Mozilla once used. It is a convenient way to share + cookies between scripts or invokes. The `--cookie` (`-b`) switch + automatically detects if a given file is such a cookie file and parses it, + and by using the `--cookie-jar` (`-c`) option you'll make curl write a new + cookie file at the end of an operation: + + curl --cookie cookies.txt --cookie-jar newcookies.txt \ + http://www.example.com + +# HTTPS + +## HTTPS is HTTP secure + + There are a few ways to do secure HTTP transfers. By far the most common + protocol for doing this is what is generally known as HTTPS, HTTP over + SSL. SSL encrypts all the data that is sent and received over the network and + thus makes it harder for attackers to spy on sensitive information. + + SSL (or TLS as the latest version of the standard is called) offers a + truckload of advanced features to allow all those encryptions and key + infrastructure mechanisms encrypted HTTP requires. + + Curl supports encrypted fetches when built to use a TLS library and it can be + built to use one out of a fairly large set of libraries - `curl -V` will show + which one your curl was built to use (if any!). To get a page from a HTTPS + server, simply run curl like: + + curl https://secure.example.com + +## Certificates + + In the HTTPS world, you use certificates to validate that you are the one + you claim to be, as an addition to normal passwords. Curl supports client- + side certificates. All certificates are locked with a pass phrase, which you + need to enter before the certificate can be used by curl. The pass phrase + can be specified on the command line or if not, entered interactively when + curl queries for it. Use a certificate with curl on a HTTPS server like: + + curl --cert mycert.pem https://secure.example.com + + curl also tries to verify that the server is who it claims to be, by + verifying the server's certificate against a locally stored CA cert + bundle. Failing the verification will cause curl to deny the connection. You + must then use [`--insecure`](https://curl.haxx.se/docs/manpage.html#-k) + (`-k`) in case you want to tell curl to ignore that the server can't be + verified. + + More about server certificate verification and ca cert bundles can be read in + the [SSLCERTS document](https://curl.haxx.se/docs/sslcerts.html). + + At times you may end up with your own CA cert store and then you can tell + curl to use that to verify the server's certificate: + + curl --cacert ca-bundle.pem https://example.com/ + +# Custom Request Elements + +## Modify method and headers + + Doing fancy stuff, you may need to add or change elements of a single curl + request. + + For example, you can change the POST request to a PROPFIND and send the data + as `Content-Type: text/xml` (instead of the default Content-Type) like this: + + curl --data "" --header "Content-Type: text/xml" \ + --request PROPFIND example.com + + You can delete a default header by providing one without content. Like you + can ruin the request by chopping off the Host: header: + + curl --header "Host:" http://www.example.com + + You can add headers the same way. Your server may want a `Destination:` + header, and you can add it: + + curl --header "Destination: http://nowhere" http://example.com + +## More on changed methods + + It should be noted that curl selects which methods to use on its own + depending on what action to ask for. `-d` will do POST, `-I` will do HEAD and + so on. If you use the + [`--request`](https://curl.haxx.se/docs/manpage.html#-X) / `-X` option you + can change the method keyword curl selects, but you will not modify curl's + behavior. This means that if you for example use -d "data" to do a POST, you + can modify the method to a `PROPFIND` with `-X` and curl will still think it + sends a POST . You can change the normal GET to a POST method by simply + adding `-X POST` in a command line like: + + curl -X POST http://example.org/ + + ... but curl will still think and act as if it sent a GET so it won't send + any request body etc. + +# Web Login + +## Some login tricks + + While not strictly just HTTP related, it still causes a lot of people + problems so here's the executive run-down of how the vast majority of all + login forms work and how to login to them using curl. + + It can also be noted that to do this properly in an automated fashion, you + will most certainly need to script things and do multiple curl invokes etc. + + First, servers mostly use cookies to track the logged-in status of the + client, so you will need to capture the cookies you receive in the + responses. Then, many sites also set a special cookie on the login page (to + make sure you got there through their login page) so you should make a habit + of first getting the login-form page to capture the cookies set there. + + Some web-based login systems feature various amounts of javascript, and + sometimes they use such code to set or modify cookie contents. Possibly they + do that to prevent programmed logins, like this manual describes how to... + Anyway, if reading the code isn't enough to let you repeat the behavior + manually, capturing the HTTP requests done by your browsers and analyzing the + sent cookies is usually a working method to work out how to shortcut the + javascript need. + + In the actual `
` tag for the login, lots of sites fill-in + random/session or otherwise secretly generated hidden tags and you may need + to first capture the HTML code for the login form and extract all the hidden + fields to be able to do a proper login POST. Remember that the contents need + to be URL encoded when sent in a normal POST. + +# Debug + +## Some debug tricks + + Many times when you run curl on a site, you'll notice that the site doesn't + seem to respond the same way to your curl requests as it does to your + browser's. + + Then you need to start making your curl requests more similar to your + browser's requests: + + - Use the `--trace-ascii` option to store fully detailed logs of the requests + for easier analyzing and better understanding + + - Make sure you check for and use cookies when needed (both reading with + `--cookie` and writing with `--cookie-jar`) + + - Set user-agent (with [`-A`](https://curl.haxx.se/docs/manpage.html#-A)) to + one like a recent popular browser does + + - Set referer (with [`-E`](https://curl.haxx.se/docs/manpage.html#-E)) like + it is set by the browser + + - If you use POST, make sure you send all the fields and in the same order as + the browser does it. + +## Check what the browsers do + + A very good helper to make sure you do this right, is the web browsers' + developers tools that let you view all headers you send and receive (even + when using HTTPS). + + A more raw approach is to capture the HTTP traffic on the network with tools + such as Wireshark or tcpdump and check what headers that were sent and + received by the browser. (HTTPS forces you to use `SSLKEYLOGFILE` to do + that.) diff --git a/docs/VERSIONS b/docs/VERSIONS.md similarity index 100% rename from docs/VERSIONS rename to docs/VERSIONS.md diff --git a/docs/cmdline-opts/CMakeLists.txt b/docs/cmdline-opts/CMakeLists.txt index 3c020d418df583..a63f4eddf4b8dc 100644 --- a/docs/cmdline-opts/CMakeLists.txt +++ b/docs/cmdline-opts/CMakeLists.txt @@ -1,3 +1,24 @@ +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 1998 - 2020, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.haxx.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +########################################################################### set(MANPAGE "${CURL_BINARY_DIR}/docs/curl.1") # Load DPAGES and OTHERPAGES from shared file diff --git a/docs/cmdline-opts/Makefile.am b/docs/cmdline-opts/Makefile.am index e6ecf7a6b0c73d..0c81b623b2cc01 100644 --- a/docs/cmdline-opts/Makefile.am +++ b/docs/cmdline-opts/Makefile.am @@ -5,7 +5,7 @@ # | (__| |_| | _ <| |___ # \___|\___/|_| \_\_____| # -# Copyright (C) 1998 - 2017, Daniel Stenberg, , et al. +# Copyright (C) 1998 - 2020, Daniel Stenberg, , et al. # # This software is licensed as described in the file COPYING, which # you should have received as part of this distribution. The terms @@ -31,4 +31,5 @@ EXTRA_DIST = $(DPAGES) MANPAGE.md gen.pl $(OTHERPAGES) CMakeLists.txt all: $(MANPAGE) $(MANPAGE): $(DPAGES) $(OTHERPAGES) Makefile.inc - @PERL@ $(srcdir)/gen.pl mainpage $(srcdir) > $(MANPAGE) + @echo "generate $(MANPAGE)" + @(cd $(srcdir) && @PERL@ ./gen.pl mainpage $(DPAGES)) > $(MANPAGE) diff --git a/docs/cmdline-opts/Makefile.inc b/docs/cmdline-opts/Makefile.inc index 7a8af6f9ee802a..792cadb3c3ccc5 100644 --- a/docs/cmdline-opts/Makefile.inc +++ b/docs/cmdline-opts/Makefile.inc @@ -1,3 +1,24 @@ +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 1998 - 2020, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.haxx.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +########################################################################### # Shared between Makefile.am and CMakeLists.txt DPAGES = \ @@ -20,6 +41,7 @@ DPAGES = \ cookie.d \ create-dirs.d \ crlf.d crlfile.d \ + curves.d \ data-ascii.d \ data-binary.d \ data-urlencode.d \ @@ -38,6 +60,8 @@ DPAGES = \ dump-header.d \ egd-file.d \ engine.d \ + etag-compare.d \ + etag-save.d \ expect100-timeout.d \ fail-early.d \ fail.d \ @@ -65,6 +89,7 @@ DPAGES = \ http1.0.d \ http1.1.d http2.d \ http2-prior-knowledge.d \ + http3.d \ ignore-content-length.d \ include.d \ insecure.d \ @@ -82,6 +107,7 @@ DPAGES = \ login-options.d \ mail-auth.d \ mail-from.d \ + mail-rcpt-allowfails.d \ mail-rcpt.d \ manual.d \ max-filesize.d \ @@ -96,11 +122,17 @@ DPAGES = \ no-buffer.d \ no-keepalive.d \ no-npn.d \ + no-progress-meter.d \ no-sessionid.d \ noproxy.d \ ntlm.d ntlm-wb.d \ oauth2-bearer.d \ - output.d pass.d \ + output.d \ + output-dir.d \ + parallel-immediate.d \ + parallel-max.d \ + parallel.d \ + pass.d \ path-as-is.d \ pinnedpubkey.d \ post301.d \ @@ -150,10 +182,12 @@ DPAGES = \ request-target.d \ request.d \ resolve.d \ + retry-all-errors.d \ retry-connrefused.d \ retry-delay.d \ retry-max-time.d \ retry.d \ + sasl-authzid.d \ sasl-ir.d \ service-name.d \ show-error.d \ @@ -170,6 +204,7 @@ DPAGES = \ ssl-allow-beast.d \ ssl-no-revoke.d \ ssl-reqd.d \ + ssl-revoke-best-effort.d \ ssl.d \ sslv2.d sslv3.d \ stderr.d \ diff --git a/docs/cmdline-opts/abstract-unix-socket.d b/docs/cmdline-opts/abstract-unix-socket.d index 1fda4e5df3ec91..8403cab9d15639 100644 --- a/docs/cmdline-opts/abstract-unix-socket.d +++ b/docs/cmdline-opts/abstract-unix-socket.d @@ -3,6 +3,7 @@ Arg: Help: Connect via abstract Unix domain socket Added: 7.53.0 Protocols: HTTP +Category: connection --- Connect through an abstract Unix domain socket, instead of using the network. Note: netstat shows the path of an abstract socket prefixed with '@', however diff --git a/docs/cmdline-opts/alt-svc.d b/docs/cmdline-opts/alt-svc.d index ba2ded11cd48dd..5598b59b48076c 100644 --- a/docs/cmdline-opts/alt-svc.d +++ b/docs/cmdline-opts/alt-svc.d @@ -3,8 +3,9 @@ Arg: Protocols: HTTPS Help: Enable alt-svc with this cache file Added: 7.64.1 +Category: http --- -WARNING: this option is experiemental. Do not use in production. +WARNING: this option is experimental. Do not use in production. This option enables the alt-svc parser in curl. If the file name points to an existing alt-svc cache file, that will be used. After a completed transfer, @@ -14,4 +15,4 @@ Specify a "" file name (zero length) to avoid loading/saving and make curl just handle the cache in memory. If this option is used several times, curl will load contents from all the -files but the the last one will be used for saving. +files but the last one will be used for saving. diff --git a/docs/cmdline-opts/anyauth.d b/docs/cmdline-opts/anyauth.d index c32d1ed5efd14c..bec0c0c861255c 100644 --- a/docs/cmdline-opts/anyauth.d +++ b/docs/cmdline-opts/anyauth.d @@ -2,6 +2,7 @@ Long: anyauth Help: Pick any authentication method Protocols: HTTP See-also: proxy-anyauth basic digest +Category: http proxy auth --- Tells curl to figure out authentication method by itself, and use the most secure one the remote site claims to support. This is done by first doing a diff --git a/docs/cmdline-opts/append.d b/docs/cmdline-opts/append.d index f001b1239d1d08..8bdb2594d45efc 100644 --- a/docs/cmdline-opts/append.d +++ b/docs/cmdline-opts/append.d @@ -2,6 +2,7 @@ Short: a Long: append Help: Append to target file when uploading Protocols: FTP SFTP +Category: ftp sftp --- When used in an upload, this makes curl append to the target file instead of overwriting it. If the remote file doesn't exist, it will be created. Note diff --git a/docs/cmdline-opts/basic.d b/docs/cmdline-opts/basic.d index 09d42af9d4ad8e..e222c51c1ce6ce 100644 --- a/docs/cmdline-opts/basic.d +++ b/docs/cmdline-opts/basic.d @@ -2,6 +2,7 @@ Long: basic Help: Use HTTP Basic Authentication See-also: proxy-basic Protocols: HTTP +Category: auth --- Tells curl to use HTTP Basic authentication with the remote host. This is the default and this option is usually pointless, unless you use it to override a diff --git a/docs/cmdline-opts/cacert.d b/docs/cmdline-opts/cacert.d index 6a567875220f9a..793d8c1ba176e0 100644 --- a/docs/cmdline-opts/cacert.d +++ b/docs/cmdline-opts/cacert.d @@ -2,6 +2,7 @@ Long: cacert Arg: Help: CA certificate to verify peer against Protocols: TLS +Category: tls --- Tells curl to use the specified certificate file to verify the peer. The file may contain multiple CA certificates. The certificate(s) must be in PEM diff --git a/docs/cmdline-opts/capath.d b/docs/cmdline-opts/capath.d index 0763f7a0d43fa8..4ce077a1471e50 100644 --- a/docs/cmdline-opts/capath.d +++ b/docs/cmdline-opts/capath.d @@ -2,6 +2,7 @@ Long: capath Arg: Help: CA directory to verify peer against Protocols: TLS +Category: tls --- Tells curl to use the specified certificate directory to verify the peer. Multiple paths can be provided by separating them with ":" (e.g. diff --git a/docs/cmdline-opts/cert-status.d b/docs/cmdline-opts/cert-status.d index f1aaa21744bf55..1342b092b8551a 100644 --- a/docs/cmdline-opts/cert-status.d +++ b/docs/cmdline-opts/cert-status.d @@ -2,6 +2,7 @@ Long: cert-status Protocols: TLS Added: 7.41.0 Help: Verify the status of the server certificate +Category: tls --- Tells curl to verify the status of the server certificate by using the Certificate Status Request (aka. OCSP stapling) TLS extension. diff --git a/docs/cmdline-opts/cert-type.d b/docs/cmdline-opts/cert-type.d index 55d8033b457138..b041bcb065134d 100644 --- a/docs/cmdline-opts/cert-type.d +++ b/docs/cmdline-opts/cert-type.d @@ -1,8 +1,9 @@ Long: cert-type Protocols: TLS Arg: -Help: Certificate file type +Help: Certificate type (DER/PEM/ENG) See-also: cert key key-type +Category: tls --- Tells curl what type the provided client certificate is using. PEM, DER, ENG and P12 are recognized types. If not specified, PEM is assumed. diff --git a/docs/cmdline-opts/cert.d b/docs/cmdline-opts/cert.d index de6b42060f9273..2938e571fb467f 100644 --- a/docs/cmdline-opts/cert.d +++ b/docs/cmdline-opts/cert.d @@ -4,6 +4,7 @@ Arg: Help: Client certificate file and password Protocols: TLS See-also: cert-type key key-type +Category: tls --- Tells curl to use the specified client certificate file when getting a file with HTTPS, FTPS or another SSL-based protocol. The certificate must be in diff --git a/docs/cmdline-opts/ciphers.d b/docs/cmdline-opts/ciphers.d index 69e85525a540f5..ed7438a0018e28 100644 --- a/docs/cmdline-opts/ciphers.d +++ b/docs/cmdline-opts/ciphers.d @@ -2,6 +2,7 @@ Long: ciphers Arg: Help: SSL ciphers to use Protocols: TLS +Category: tls --- Specifies which ciphers to use in the connection. The list of ciphers must specify valid ciphers. Read up on SSL cipher list details on this URL: diff --git a/docs/cmdline-opts/compressed-ssh.d b/docs/cmdline-opts/compressed-ssh.d index 583452ae47e50d..849fe18b0007a9 100644 --- a/docs/cmdline-opts/compressed-ssh.d +++ b/docs/cmdline-opts/compressed-ssh.d @@ -2,6 +2,7 @@ Long: compressed-ssh Help: Enable SSH compression Protocols: SCP SFTP Added: 7.56.0 +Category: scp ssh --- Enables built-in SSH compression. This is a request, not an order; the server may or may not do it. diff --git a/docs/cmdline-opts/compressed.d b/docs/cmdline-opts/compressed.d index dc130c1f02ea46..26bc1514a55ee9 100644 --- a/docs/cmdline-opts/compressed.d +++ b/docs/cmdline-opts/compressed.d @@ -1,7 +1,10 @@ Long: compressed Help: Request compressed response Protocols: HTTP +Category: http --- Request a compressed response using one of the algorithms curl supports, and -save the uncompressed document. If this option is used and the server sends -an unsupported encoding, curl will report an error. +automatically decompress the content. Headers are not modified. + +If this option is used and the server sends an unsupported encoding, curl will +report an error. diff --git a/docs/cmdline-opts/config.d b/docs/cmdline-opts/config.d index ef9894b8e18302..26346799791144 100644 --- a/docs/cmdline-opts/config.d +++ b/docs/cmdline-opts/config.d @@ -2,6 +2,7 @@ Long: config Arg: Help: Read config from a file Short: K +Category: curl --- Specify a text file to read curl arguments from. The command line arguments @@ -34,13 +35,19 @@ When curl is invoked, it (unless --disable is used) checks for a default config file and uses it if found. The default config file is checked for in the following places in this order: -1) curl tries to find the "home dir": It first checks for the CURL_HOME and -then the HOME environment variables. Failing that, it uses getpwuid() on -Unix-like systems (which returns the home dir given the current user in your -system). On Windows, it then checks for the APPDATA variable, or as a last -resort the '%USERPROFILE%\\Application Data'. +1) Use the CURL_HOME environment variable if set -2) On windows, if there is no _curlrc file in the home dir, it checks for one +2) Use the XDG_CONFIG_HOME environment variable if set (Added in 7.73.0) + +3) Use the HOME environment variable if set + +4) Non-windows: use getpwuid to find the home directory + +5) Windows: use APPDATA if set + +6) Windows: use "USERPROFILE\Application Data" if set + +7) On windows, if there is no .curlrc file in the home dir, it checks for one in the same dir the curl executable is placed. On Unix-like systems, it will simply try to load .curlrc from the determined home dir. diff --git a/docs/cmdline-opts/connect-timeout.d b/docs/cmdline-opts/connect-timeout.d index 3a32d86853cec3..fa3277221a5d27 100644 --- a/docs/cmdline-opts/connect-timeout.d +++ b/docs/cmdline-opts/connect-timeout.d @@ -2,6 +2,7 @@ Long: connect-timeout Arg: Help: Maximum time allowed for connection See-also: max-time +Category: connection --- Maximum time in seconds that you allow curl's connection to take. This only limits the connection phase, so if curl connects within the given period it diff --git a/docs/cmdline-opts/connect-to.d b/docs/cmdline-opts/connect-to.d index 458bfe855fc160..2f015f8ac45ed8 100644 --- a/docs/cmdline-opts/connect-to.d +++ b/docs/cmdline-opts/connect-to.d @@ -3,6 +3,7 @@ Arg: Help: Connect to host Added: 7.49.0 See-also: resolve header +Category: connection --- For a request to the given HOST1:PORT1 pair, connect to HOST2:PORT2 instead. diff --git a/docs/cmdline-opts/continue-at.d b/docs/cmdline-opts/continue-at.d index 733f4941ea058f..c44d2f496fc4f5 100644 --- a/docs/cmdline-opts/continue-at.d +++ b/docs/cmdline-opts/continue-at.d @@ -3,6 +3,7 @@ Long: continue-at Arg: Help: Resumed transfer offset See-also: range +Category: connection --- Continue/Resume a previous file transfer at the given offset. The given offset is the exact number of bytes that will be skipped, counting from the beginning diff --git a/docs/cmdline-opts/cookie-jar.d b/docs/cmdline-opts/cookie-jar.d index da79777eb6d587..f11991f4e946c6 100644 --- a/docs/cmdline-opts/cookie-jar.d +++ b/docs/cmdline-opts/cookie-jar.d @@ -3,6 +3,7 @@ Long: cookie-jar Arg: Protocols: HTTP Help: Write cookies to after operation +Category: http --- Specify to which file you want curl to write all cookies after a completed operation. Curl writes all cookies from its in-memory cookie storage to the diff --git a/docs/cmdline-opts/cookie.d b/docs/cmdline-opts/cookie.d index 1e9906977e31d4..5d2c9d7838609f 100644 --- a/docs/cmdline-opts/cookie.d +++ b/docs/cmdline-opts/cookie.d @@ -3,6 +3,7 @@ Long: cookie Arg: Protocols: HTTP Help: Send cookies from string/file +Category: http --- Pass the data to the HTTP server in the Cookie header. It is supposedly the data previously received from the server in a "Set-Cookie:" line. The @@ -13,7 +14,7 @@ to read previously stored cookie from. This option also activates the cookie engine which will make curl record incoming cookies, which may be handy if you're using this in combination with the --location option or do multiple URL transfers on the same invoke. If the file name is exactly a minus ("-"), curl -will instead the contents from stdin. +will instead read the contents from stdin. The file format of the file to read cookies from should be plain HTTP headers (Set-Cookie style) or the Netscape/Mozilla cookie file format. diff --git a/docs/cmdline-opts/create-dirs.d b/docs/cmdline-opts/create-dirs.d index 49e22e75a8c8d5..48a8fd466f69f1 100644 --- a/docs/cmdline-opts/create-dirs.d +++ b/docs/cmdline-opts/create-dirs.d @@ -1,9 +1,12 @@ Long: create-dirs Help: Create necessary local directory hierarchy +Category: curl --- When used in conjunction with the --output option, curl will create the necessary local directory hierarchy as needed. This option creates the dirs mentioned with the --output option, nothing else. If the --output file name uses no dir or if the dirs it mentions already exist, no dir will be created. +Created dirs are made with mode 0750 on unix style file systems. + To create remote directories when using FTP or SFTP, try --ftp-create-dirs. diff --git a/docs/cmdline-opts/crlf.d b/docs/cmdline-opts/crlf.d index f6694b654d095c..50c4bef81de3dc 100644 --- a/docs/cmdline-opts/crlf.d +++ b/docs/cmdline-opts/crlf.d @@ -1,6 +1,7 @@ Long: crlf Help: Convert LF to CRLF in upload Protocols: FTP SMTP +Category: ftp smtp --- Convert LF to CRLF in upload. Useful for MVS (OS/390). diff --git a/docs/cmdline-opts/crlfile.d b/docs/cmdline-opts/crlfile.d index 0fcc63c85d21c6..360e668d1e9da7 100644 --- a/docs/cmdline-opts/crlfile.d +++ b/docs/cmdline-opts/crlfile.d @@ -3,6 +3,7 @@ Arg: Protocols: TLS Help: Get a CRL list in PEM format from the given file Added: 7.19.7 +Category: tls --- Provide a file using PEM format with a Certificate Revocation List that may specify peer certificates that are to be considered revoked. diff --git a/docs/cmdline-opts/curves.d b/docs/cmdline-opts/curves.d new file mode 100644 index 00000000000000..47870a2789dc60 --- /dev/null +++ b/docs/cmdline-opts/curves.d @@ -0,0 +1,18 @@ +Long: curves +Arg: +Help: (EC) TLS key exchange algorithm(s) to request +Protocols: TLS +Added: 7.73.0 +Category: tls +--- +Tells curl to request specific curves to use during SSL session establishment +according to RFC 8422, 5.1. Multiple algorithms can be provided by separating +them with ":" (e.g. "X25519:P-521"). The parameter is available identically +in the "openssl s_client/s_server" utilities. + +--curves allows a OpenSSL powered curl to make SSL-connections with exactly +the (EC) curve requested by the client, avoiding intransparent client/server +negotiations. + +If this option is set, the default curves list built into openssl will be +ignored. diff --git a/docs/cmdline-opts/data-ascii.d b/docs/cmdline-opts/data-ascii.d index bda4abc3d1dc74..3522f6345c811d 100644 --- a/docs/cmdline-opts/data-ascii.d +++ b/docs/cmdline-opts/data-ascii.d @@ -2,5 +2,6 @@ Long: data-ascii Arg: Help: HTTP POST ASCII data Protocols: HTTP +Category: http post upload --- This is just an alias for --data. diff --git a/docs/cmdline-opts/data-binary.d b/docs/cmdline-opts/data-binary.d index 3f6ff2dbd0a210..c40785ece121cc 100644 --- a/docs/cmdline-opts/data-binary.d +++ b/docs/cmdline-opts/data-binary.d @@ -2,6 +2,7 @@ Long: data-binary Arg: Help: HTTP POST binary data Protocols: HTTP +Category: http post upload --- This posts data exactly as specified with no extra processing whatsoever. diff --git a/docs/cmdline-opts/data-raw.d b/docs/cmdline-opts/data-raw.d index 7669b4abfa7d80..4db83aefa02671 100644 --- a/docs/cmdline-opts/data-raw.d +++ b/docs/cmdline-opts/data-raw.d @@ -4,6 +4,7 @@ Protocols: HTTP Help: HTTP POST data, '@' allowed Added: 7.43.0 See-also: data +Category: http post upload --- This posts data similarly to --data but without the special interpretation of the @ character. diff --git a/docs/cmdline-opts/data-urlencode.d b/docs/cmdline-opts/data-urlencode.d index 9873f3356e4fcc..2f2a3645bc408f 100644 --- a/docs/cmdline-opts/data-urlencode.d +++ b/docs/cmdline-opts/data-urlencode.d @@ -4,6 +4,7 @@ Help: HTTP POST data url encoded Protocols: HTTP See-also: data data-raw Added: 7.18.0 +Category: http post upload --- This posts data, similar to the other --data options with the exception that this performs URL-encoding. diff --git a/docs/cmdline-opts/data.d b/docs/cmdline-opts/data.d index 7d499665e08a4c..53ba86b36d5b38 100644 --- a/docs/cmdline-opts/data.d +++ b/docs/cmdline-opts/data.d @@ -2,9 +2,10 @@ Long: data Short: d Arg: Help: HTTP POST data -Protocols: HTTP +Protocols: HTTP MQTT See-also: data-binary data-urlencode data-raw Mutexed: form head upload-file +Category: important http post upload --- Sends the specified data in a POST request to the HTTP server, in the same way that a browser does when a user has filled in an HTML form and presses the @@ -22,9 +23,8 @@ data pieces specified will be merged together with a separating chunk that looks like \&'name=daniel&skill=lousy'. If you start the data with the letter @, the rest should be a file name to -read the data from, or - if you want curl to read the data from -stdin. Multiple files can also be specified. Posting data from a file named -'foobar' would thus be done with --data @foobar. When --data is told to read -from a file like that, carriage returns and newlines will be stripped out. If -you don't want the @ character to have a special interpretation use --data-raw -instead. +read the data from, or - if you want curl to read the data from stdin. Posting +data from a file named \&'foobar' would thus be done with --data @foobar. When +--data is told to read from a file like that, carriage returns and newlines +will be stripped out. If you don't want the @ character to have a special +interpretation use --data-raw instead. diff --git a/docs/cmdline-opts/delegation.d b/docs/cmdline-opts/delegation.d index 138d82333e06e9..fa2795f9647532 100644 --- a/docs/cmdline-opts/delegation.d +++ b/docs/cmdline-opts/delegation.d @@ -2,6 +2,7 @@ Long: delegation Arg: Help: GSS-API delegation permission Protocols: GSS/kerberos +Category: auth --- Set LEVEL to tell the server what it is allowed to delegate when it comes to user credentials. diff --git a/docs/cmdline-opts/digest.d b/docs/cmdline-opts/digest.d index 5cdd9258a01eb1..da8c01e26523c7 100644 --- a/docs/cmdline-opts/digest.d +++ b/docs/cmdline-opts/digest.d @@ -3,6 +3,7 @@ Help: Use HTTP Digest Authentication Protocols: HTTP Mutexed: basic ntlm negotiate See-also: user proxy-digest anyauth +Category: proxy auth http --- Enables HTTP Digest authentication. This is an authentication scheme that prevents the password from being sent over the wire in clear text. Use this in diff --git a/docs/cmdline-opts/disable-eprt.d b/docs/cmdline-opts/disable-eprt.d index a1e53c0bd1bd20..ffcf684e4a3f29 100644 --- a/docs/cmdline-opts/disable-eprt.d +++ b/docs/cmdline-opts/disable-eprt.d @@ -1,6 +1,7 @@ Long: disable-eprt Help: Inhibit using EPRT or LPRT Protocols: FTP +Category: ftp --- Tell curl to disable the use of the EPRT and LPRT commands when doing active FTP transfers. Curl will normally always first attempt to use EPRT, then LPRT diff --git a/docs/cmdline-opts/disable-epsv.d b/docs/cmdline-opts/disable-epsv.d index 6d2cb708984fc2..df1de830414218 100644 --- a/docs/cmdline-opts/disable-epsv.d +++ b/docs/cmdline-opts/disable-epsv.d @@ -1,6 +1,7 @@ Long: disable-epsv Help: Inhibit using EPSV Protocols: FTP +Category: ftp --- (FTP) Tell curl to disable the use of the EPSV command when doing passive FTP transfers. Curl will normally always first attempt to use EPSV before PASV, diff --git a/docs/cmdline-opts/disable.d b/docs/cmdline-opts/disable.d index 20b27b4c52437e..688fc0c54d2672 100644 --- a/docs/cmdline-opts/disable.d +++ b/docs/cmdline-opts/disable.d @@ -1,6 +1,7 @@ Long: disable Short: q Help: Disable .curlrc +Category: curl --- If used as the first parameter on the command line, the \fIcurlrc\fP config file will not be read and used. See the --config for details on the default diff --git a/docs/cmdline-opts/disallow-username-in-url.d b/docs/cmdline-opts/disallow-username-in-url.d index a7f46ea15c64bd..e124f675a96ea2 100644 --- a/docs/cmdline-opts/disallow-username-in-url.d +++ b/docs/cmdline-opts/disallow-username-in-url.d @@ -3,5 +3,6 @@ Help: Disallow username in url Protocols: HTTP Added: 7.61.0 See-also: proto +Category: curl http --- This tells curl to exit if passed a url containing a username. diff --git a/docs/cmdline-opts/dns-interface.d b/docs/cmdline-opts/dns-interface.d index 45e5af263e3e24..3cb818ae3e3dc2 100644 --- a/docs/cmdline-opts/dns-interface.d +++ b/docs/cmdline-opts/dns-interface.d @@ -5,6 +5,7 @@ Protocols: DNS See-also: dns-ipv4-addr dns-ipv6-addr Added: 7.33.0 Requires: c-ares +Category: dns --- Tell curl to send outgoing DNS requests through . This option is a counterpart to --interface (which does not affect DNS). The supplied string diff --git a/docs/cmdline-opts/dns-ipv4-addr.d b/docs/cmdline-opts/dns-ipv4-addr.d index 597b8588458300..5ebdda5cad0502 100644 --- a/docs/cmdline-opts/dns-ipv4-addr.d +++ b/docs/cmdline-opts/dns-ipv4-addr.d @@ -5,6 +5,7 @@ Protocols: DNS See-also: dns-interface dns-ipv6-addr Added: 7.33.0 Requires: c-ares +Category: dns --- Tell curl to bind to when making IPv4 DNS requests, so that the DNS requests originate from this address. The argument should be a diff --git a/docs/cmdline-opts/dns-ipv6-addr.d b/docs/cmdline-opts/dns-ipv6-addr.d index 581f0195370538..10e8076461d4a7 100644 --- a/docs/cmdline-opts/dns-ipv6-addr.d +++ b/docs/cmdline-opts/dns-ipv6-addr.d @@ -5,6 +5,7 @@ Protocols: DNS See-also: dns-interface dns-ipv4-addr Added: 7.33.0 Requires: c-ares +Category: dns --- Tell curl to bind to when making IPv6 DNS requests, so that the DNS requests originate from this address. The argument should be a diff --git a/docs/cmdline-opts/dns-servers.d b/docs/cmdline-opts/dns-servers.d index a98fd07d895910..8665426a83a8a8 100644 --- a/docs/cmdline-opts/dns-servers.d +++ b/docs/cmdline-opts/dns-servers.d @@ -3,6 +3,7 @@ Arg: Help: DNS server addrs to use Requires: c-ares Added: 7.33.0 +Category: dns --- Set the list of DNS servers to be used instead of the system default. The list of IP addresses should be separated with commas. Port numbers diff --git a/docs/cmdline-opts/doh-url.d b/docs/cmdline-opts/doh-url.d index 8fa42c1b2a11ff..7fce4460d9dcd5 100644 --- a/docs/cmdline-opts/doh-url.d +++ b/docs/cmdline-opts/doh-url.d @@ -2,6 +2,8 @@ Long: doh-url Arg: Help: Resolve host names over DOH Protocols: all +Added: 7.62.0 +Category: dns --- Specifies which DNS-over-HTTPS (DOH) server to use to resolve hostnames, instead of using the default name resolver mechanism. The URL must be HTTPS. diff --git a/docs/cmdline-opts/dump-header.d b/docs/cmdline-opts/dump-header.d index 05c10affd4761a..8449dfe8516dcc 100644 --- a/docs/cmdline-opts/dump-header.d +++ b/docs/cmdline-opts/dump-header.d @@ -4,6 +4,7 @@ Arg: Help: Write the received headers to Protocols: HTTP FTP See-also: output +Category: http ftp --- Write the received protocol headers to the specified file. @@ -12,6 +13,8 @@ site sends to you. Cookies from the headers could then be read in a second curl invocation by using the --cookie option! The --cookie-jar option is a better way to store cookies. +If no headers are received, the use of this option will create an empty file. + When used in FTP, the FTP server response lines are considered being "headers" and thus are saved there. diff --git a/docs/cmdline-opts/egd-file.d b/docs/cmdline-opts/egd-file.d index c22790f6ae7fc1..05ede9ad13844a 100644 --- a/docs/cmdline-opts/egd-file.d +++ b/docs/cmdline-opts/egd-file.d @@ -3,6 +3,7 @@ Arg: Help: EGD socket path for random data Protocols: TLS See-also: random-file +Category: tls --- Specify the path name to the Entropy Gathering Daemon socket. The socket is used to seed the random engine for SSL connections. diff --git a/docs/cmdline-opts/engine.d b/docs/cmdline-opts/engine.d index cde1a477357bae..9ed0cf1b0bf20c 100644 --- a/docs/cmdline-opts/engine.d +++ b/docs/cmdline-opts/engine.d @@ -2,6 +2,7 @@ Long: engine Arg: Help: Crypto engine to use Protocols: TLS +Category: tls --- Select the OpenSSL crypto engine to use for cipher operations. Use --engine list to print a list of build-time supported engines. Note that not all (or diff --git a/docs/cmdline-opts/etag-compare.d b/docs/cmdline-opts/etag-compare.d new file mode 100644 index 00000000000000..cbf48ac5e6ed6f --- /dev/null +++ b/docs/cmdline-opts/etag-compare.d @@ -0,0 +1,19 @@ +Long: etag-compare +Arg: +Help: Pass an ETag from a file as a custom header +Protocols: HTTP +Added: 7.68.0 +Category: http +--- +This option makes a conditional HTTP request for the specific +ETag read from the given file by sending a custom If-None-Match +header using the extracted ETag. + +For correct results, make sure that specified file contains only a single +line with a desired ETag. An empty file is parsed as an empty ETag. + +Use the option --etag-save to first save the ETag from a response, and +then use this option to compare using the saved ETag in a subsequent request. + +\fCOMPARISON\fP: There are 2 types of comparison or ETags, Weak and Strong. +This option expects, and uses a strong comparison. diff --git a/docs/cmdline-opts/etag-save.d b/docs/cmdline-opts/etag-save.d new file mode 100644 index 00000000000000..82e6dfa84df180 --- /dev/null +++ b/docs/cmdline-opts/etag-save.d @@ -0,0 +1,17 @@ +Long: etag-save +Arg: +Help: Parse ETag from a request and save it to a file +Protocols: HTTP +Added: 7.68.0 +Category: http +--- +This option saves an HTTP ETag to the specified file. Etag is +usually part of headers returned by a request. When server sends an +ETag, it must be enveloped by a double quote. This option extracts the +ETag without the double quotes and saves it into the . + +A server can send a week ETag which is prefixed by "W/". This identifier +is not considered, and only relevant ETag between quotation marks is parsed. + +It an ETag wasn't send by the server or it cannot be parsed, and empty +file is created. diff --git a/docs/cmdline-opts/expect100-timeout.d b/docs/cmdline-opts/expect100-timeout.d index c88f0b84fd8e1e..431a529c297148 100644 --- a/docs/cmdline-opts/expect100-timeout.d +++ b/docs/cmdline-opts/expect100-timeout.d @@ -4,6 +4,7 @@ Help: How long to wait for 100-continue Protocols: HTTP Added: 7.47.0 See-also: connect-timeout +Category: http --- Maximum time in seconds that you allow curl to wait for a 100-continue response when curl emits an Expects: 100-continue header in its request. By diff --git a/docs/cmdline-opts/fail-early.d b/docs/cmdline-opts/fail-early.d index 375d4c919544f1..3fbe238af205c7 100644 --- a/docs/cmdline-opts/fail-early.d +++ b/docs/cmdline-opts/fail-early.d @@ -1,6 +1,7 @@ Long: fail-early Help: Fail on first transfer error, do not continue Added: 7.52.0 +Category: curl --- Fail and exit on the first detected transfer error. diff --git a/docs/cmdline-opts/fail.d b/docs/cmdline-opts/fail.d index c46c571bfe970f..7fc76fb6a11518 100644 --- a/docs/cmdline-opts/fail.d +++ b/docs/cmdline-opts/fail.d @@ -2,6 +2,7 @@ Long: fail Short: f Protocols: HTTP Help: Fail silently (no output at all) on HTTP errors +Category: important http --- Fail silently (no output at all) on server errors. This is mostly done to better enable scripts etc to better deal with failed attempts. In normal cases diff --git a/docs/cmdline-opts/false-start.d b/docs/cmdline-opts/false-start.d index 65a8afb8f3e3cd..abeff0eb5cfdee 100644 --- a/docs/cmdline-opts/false-start.d +++ b/docs/cmdline-opts/false-start.d @@ -2,6 +2,7 @@ Long: false-start Help: Enable TLS False Start Protocols: TLS Added: 7.42.0 +Category: tls --- Tells curl to use false start during the TLS handshake. False start is a mode where a TLS client will start sending application data before verifying the diff --git a/docs/cmdline-opts/form-string.d b/docs/cmdline-opts/form-string.d index 49d0d44ef8456d..04d2578c559684 100644 --- a/docs/cmdline-opts/form-string.d +++ b/docs/cmdline-opts/form-string.d @@ -3,6 +3,7 @@ Help: Specify multipart MIME data Protocols: HTTP SMTP IMAP Arg: See-also: form +Category: http upload --- Similar to --form except that the value string for the named parameter is used literally. Leading \&'@' and \&'<' characters, and the \&';type=' string in diff --git a/docs/cmdline-opts/form.d b/docs/cmdline-opts/form.d index 0bbc3701f19cc4..3cd8dce756de49 100644 --- a/docs/cmdline-opts/form.d +++ b/docs/cmdline-opts/form.d @@ -4,6 +4,7 @@ Arg: Help: Specify multipart MIME data Protocols: HTTP SMTP IMAP Mutexed: data head upload-file +Category: http upload --- For HTTP protocol family, this lets curl emulate a filled-in form in which a user has pressed the submit button. This causes curl to POST data using the @@ -33,11 +34,11 @@ form-field to which the file portrait.jpg will be the input: curl -F profile=@portrait.jpg https://example.com/upload.cgi -Example: send a your name and shoe size in two text fields to the server: +Example: send your name and shoe size in two text fields to the server: curl -F name=John -F shoesize=11 https://example.com/ -Example: send a your essay in a text field to the server. Send it as a plain +Example: send your essay in a text field to the server. Send it as a plain text field, but get the contents for it from a local file: curl -F "story= Help: Account data string Protocols: FTP Added: 7.13.0 +Category: ftp auth --- When an FTP server asks for "account data" after user name and password has been provided, this data is sent off using the ACCT command. diff --git a/docs/cmdline-opts/ftp-alternative-to-user.d b/docs/cmdline-opts/ftp-alternative-to-user.d index 8982ba8b854685..1714849063de17 100644 --- a/docs/cmdline-opts/ftp-alternative-to-user.d +++ b/docs/cmdline-opts/ftp-alternative-to-user.d @@ -3,6 +3,7 @@ Arg: Help: String to replace USER [name] Protocols: FTP Added: 7.15.5 +Category: ftp --- If authenticating with the USER and PASS commands fails, send this command. When connecting to Tumbleweed's Secure Transport server over FTPS using a diff --git a/docs/cmdline-opts/ftp-create-dirs.d b/docs/cmdline-opts/ftp-create-dirs.d index ede57100d1e0f5..5d9dfda46a3ebd 100644 --- a/docs/cmdline-opts/ftp-create-dirs.d +++ b/docs/cmdline-opts/ftp-create-dirs.d @@ -2,6 +2,7 @@ Long: ftp-create-dirs Protocols: FTP SFTP Help: Create the remote dirs if not present See-also: create-dirs +Category: ftp sftp curl --- When an FTP or SFTP URL/operation uses a path that doesn't currently exist on the server, the standard behavior of curl is to fail. Using this option, curl diff --git a/docs/cmdline-opts/ftp-method.d b/docs/cmdline-opts/ftp-method.d index 95aa522e829ea6..149340b82f5812 100644 --- a/docs/cmdline-opts/ftp-method.d +++ b/docs/cmdline-opts/ftp-method.d @@ -3,6 +3,7 @@ Arg: Help: Control CWD usage Protocols: FTP Added: 7.15.1 +Category: ftp --- Control what method curl should use to reach a file on an FTP(S) server. The method argument should be one of the following alternatives: diff --git a/docs/cmdline-opts/ftp-pasv.d b/docs/cmdline-opts/ftp-pasv.d index 44103e21a380c7..cbd548de322c30 100644 --- a/docs/cmdline-opts/ftp-pasv.d +++ b/docs/cmdline-opts/ftp-pasv.d @@ -3,6 +3,7 @@ Help: Use PASV/EPSV instead of PORT Protocols: FTP Added: 7.11.0 See-also: disable-epsv +Category: ftp --- Use passive mode for the data connection. Passive is the internal default behavior, but using this option can be used to override a previous --ftp-port diff --git a/docs/cmdline-opts/ftp-port.d b/docs/cmdline-opts/ftp-port.d index e4b14560820ea3..9bb1d43f0eeb7a 100644 --- a/docs/cmdline-opts/ftp-port.d +++ b/docs/cmdline-opts/ftp-port.d @@ -4,6 +4,7 @@ Help: Use PORT instead of PASV Short: P Protocols: FTP See-also: ftp-pasv disable-eprt +Category: ftp --- Reverses the default initiator/listener roles when connecting with FTP. This option makes curl use active mode. curl then tells the server to connect back diff --git a/docs/cmdline-opts/ftp-pret.d b/docs/cmdline-opts/ftp-pret.d index dac4c35319b051..e9c7d251a6ce66 100644 --- a/docs/cmdline-opts/ftp-pret.d +++ b/docs/cmdline-opts/ftp-pret.d @@ -2,6 +2,7 @@ Long: ftp-pret Help: Send PRET before PASV Protocols: FTP Added: 7.20.0 +Category: ftp --- Tell curl to send a PRET command before PASV (and EPSV). Certain FTP servers, mainly drftpd, require this non-standard command for directory listings as diff --git a/docs/cmdline-opts/ftp-skip-pasv-ip.d b/docs/cmdline-opts/ftp-skip-pasv-ip.d index da6ab11fc72109..d6fd4589b1e968 100644 --- a/docs/cmdline-opts/ftp-skip-pasv-ip.d +++ b/docs/cmdline-opts/ftp-skip-pasv-ip.d @@ -3,6 +3,7 @@ Help: Skip the IP address for PASV Protocols: FTP Added: 7.14.2 See-also: ftp-pasv +Category: ftp --- Tell curl to not use the IP address the server suggests in its response to curl's PASV command when curl connects the data connection. Instead curl diff --git a/docs/cmdline-opts/ftp-ssl-ccc-mode.d b/docs/cmdline-opts/ftp-ssl-ccc-mode.d index be10294985ad02..6289e544a87e84 100644 --- a/docs/cmdline-opts/ftp-ssl-ccc-mode.d +++ b/docs/cmdline-opts/ftp-ssl-ccc-mode.d @@ -4,6 +4,7 @@ Help: Set CCC mode Protocols: FTP Added: 7.16.2 See-also: ftp-ssl-ccc +Category: ftp tls --- Sets the CCC mode. The passive mode will not initiate the shutdown, but instead wait for the server to do it, and will not reply to the shutdown from diff --git a/docs/cmdline-opts/ftp-ssl-ccc.d b/docs/cmdline-opts/ftp-ssl-ccc.d index c6edc5b395ef6a..33cab4302a51d1 100644 --- a/docs/cmdline-opts/ftp-ssl-ccc.d +++ b/docs/cmdline-opts/ftp-ssl-ccc.d @@ -3,6 +3,7 @@ Help: Send CCC after authenticating Protocols: FTP See-also: ssl ftp-ssl-ccc-mode Added: 7.16.1 +Category: ftp tls --- Use CCC (Clear Command Channel) Shuts down the SSL/TLS layer after authenticating. The rest of the control channel communication will be diff --git a/docs/cmdline-opts/ftp-ssl-control.d b/docs/cmdline-opts/ftp-ssl-control.d index 87a822531dd40b..5191353ddc6410 100644 --- a/docs/cmdline-opts/ftp-ssl-control.d +++ b/docs/cmdline-opts/ftp-ssl-control.d @@ -2,6 +2,7 @@ Long: ftp-ssl-control Help: Require SSL/TLS for FTP login, clear for transfer Protocols: FTP Added: 7.16.0 +Category: ftp tls --- Require SSL/TLS for the FTP login, clear for transfer. Allows secure authentication, but non-encrypted data transfers for efficiency. Fails the diff --git a/docs/cmdline-opts/gen.pl b/docs/cmdline-opts/gen.pl index a921298a643e71..3fec1646db6a56 100755 --- a/docs/cmdline-opts/gen.pl +++ b/docs/cmdline-opts/gen.pl @@ -1,10 +1,31 @@ #!/usr/bin/env perl +#*************************************************************************** +# _ _ ____ _ +# Project ___| | | | _ \| | +# / __| | | | |_) | | +# | (__| |_| | _ <| |___ +# \___|\___/|_| \_\_____| +# +# Copyright (C) 1998 - 2020, Daniel Stenberg, , et al. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at https://curl.haxx.se/docs/copyright.html. +# +# You may opt to use, copy, modify, merge, publish, distribute and/or sell +# copies of the Software, and permit persons to whom the Software is +# furnished to do so, under the terms of the COPYING file. +# +# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY +# KIND, either express or implied. +# +########################################################################### =begin comment This script generates the manpage. -Example: gen.pl mainpage > curl.1 +Example: gen.pl [files] > curl.1 Dev notes: @@ -16,18 +37,13 @@ =end comment =cut -my $some_dir=$ARGV[1] || "."; - -opendir(my $dh, $some_dir) || die "Can't opendir $some_dir: $!"; -my @s = grep { /\.d$/ && -f "$some_dir/$_" } readdir($dh); -closedir $dh; - my %optshort; my %optlong; my %helplong; my %arglong; my %redirlong; my %protolong; +my %catlong; # get the long name version, return the man page string sub manpageify { @@ -101,7 +117,7 @@ sub added { sub single { my ($f, $standalone)=@_; - open(F, "<:crlf", "$some_dir/$f") || + open(F, "<:crlf", "$f") || return 1; my $short; my $long; @@ -111,6 +127,7 @@ sub single { my $arg; my $mutexed; my $requires; + my $category; my $seealso; my $magic; # cmdline special option while() { @@ -144,6 +161,9 @@ sub single { elsif(/^Requires: *(.*)/i) { $requires=$1; } + elsif(/^Category: *(.*)/i) { + $category=$1; + } elsif(/^Help: *(.*)/i) { ; } @@ -151,6 +171,9 @@ sub single { if(!$long) { print STDERR "WARN: no 'Long:' in $f\n"; } + if(!$category) { + print STDERR "WARN: no 'Category:' in $f\n"; + } last; } else { @@ -201,12 +224,24 @@ sub single { if($seealso) { my @m=split(/ /, $seealso); my $mstr; + my $and = 0; + my $num = scalar(@m); + if($num > 2) { + # use commas up to this point + $and = $num - 1; + } + my $i = 0; for my $k (@m) { if(!$helplong{$k}) { print STDERR "WARN: $f see-alsos a non-existing option: $k\n"; } my $l = manpageify($k); - $mstr .= sprintf "%s$l", $mstr?" and ":""; + my $sep = " and"; + if($and && ($i < $and)) { + $sep = ","; + } + $mstr .= sprintf "%s$l", $mstr?"$sep ":""; + $i++; } push @foot, seealso($standalone, $mstr); } @@ -241,12 +276,13 @@ sub single { sub getshortlong { my ($f)=@_; - open(F, "<:crlf", "$some_dir/$f"); + open(F, "<:crlf", "$f"); my $short; my $long; my $help; my $arg; my $protocols; + my $category; while() { if(/^Short: (.)/i) { $short=$1; @@ -263,6 +299,9 @@ sub getshortlong { elsif(/^Protocols: (.*)/i) { $protocols=$1; } + elsif(/^Category: (.*)/i) { + $category=$1; + } elsif(/^---/) { last; } @@ -276,18 +315,20 @@ sub getshortlong { $helplong{$long}=$help; $arglong{$long}=$arg; $protolong{$long}=$protocols; + $catlong{$long}=$category; } } sub indexoptions { - foreach my $f (@s) { - getshortlong($f); - } + my (@files) = @_; + foreach my $f (@files) { + getshortlong($f); + } } sub header { my ($f)=@_; - open(F, "<:crlf", "$some_dir/$f"); + open(F, "<:crlf", "$f"); my @d; while() { push @d, $_; @@ -300,6 +341,8 @@ sub listhelp { foreach my $f (sort keys %helplong) { my $long = $f; my $short = $optlong{$long}; + my @categories = split ' ', $catlong{$long}; + my $bitmask; my $opt; if(defined($short) && $long) { @@ -308,7 +351,13 @@ sub listhelp { elsif($long && !$short) { $opt = " --$long"; } - + for my $i (0 .. $#categories) { + $bitmask .= 'CURLHELP_' . uc $categories[$i]; + # If not last element, append | + if($i < $#categories) { + $bitmask .= ' | '; + } + } my $arg = $arglong{$long}; if($arg) { $opt .= " $arg"; @@ -316,7 +365,7 @@ sub listhelp { my $desc = $helplong{$f}; $desc =~ s/\"/\\\"/g; # escape double quotes - my $line = sprintf " {\"%s\",\n \"%s\"},\n", $opt, $desc; + my $line = sprintf " {\"%s\",\n \"%s\",\n %s},\n", $opt, $desc, $bitmask; if(length($opt) + length($desc) > 78) { print STDERR "WARN: the --$long line is too long\n"; @@ -325,13 +374,35 @@ sub listhelp { } } +sub listcats { + my %allcats; + foreach my $f (sort keys %helplong) { + my @categories = split ' ', $catlong{$f}; + foreach (@categories) { + $allcats{$_} = undef; + } + } + my @categories; + foreach my $key (keys %allcats) { + push @categories, $key; + } + @categories = sort @categories; + unshift @categories, 'hidden'; + for my $i (0..$#categories) { + print '#define ' . 'CURLHELP_' . uc($categories[$i]) . ' ' . "1u << " . $i . "u\n"; + } +} + sub mainpage { + my (@files) = @_; # show the page header header("page-header"); # output docs for all options - foreach my $f (sort @s) { - single($f, 0); + foreach my $f (sort @files) { + if(single($f, 0)) { + print STDERR "Can't read $f?\n"; + } } header("page-footer"); @@ -358,33 +429,37 @@ sub showprotocols { } sub getargs { - my $f; - do { - $f = shift @ARGV; - if($f eq "mainpage") { - mainpage(); - return; - } - elsif($f eq "listhelp") { - listhelp(); - return; - } - elsif($f eq "single") { - showonly(shift @ARGV); - return; - } - elsif($f eq "protos") { - showprotocols(); - return; - } - } while($f); + my ($f, @s) = @_; + if($f eq "mainpage") { + mainpage(@s); + return; + } + elsif($f eq "listhelp") { + listhelp(); + return; + } + elsif($f eq "single") { + showonly($s[0]); + return; + } + elsif($f eq "protos") { + showprotocols(); + return; + } + elsif($f eq "listcats") { + listcats(); + return; + } - print "Usage: gen.pl [srcdir]\n"; + print "Usage: gen.pl [files]\n"; } #------------------------------------------------------------------------ +my $cmd = shift @ARGV; +my @files = @ARGV; # the rest are the files + # learn all existing options -indexoptions(); +indexoptions(@files); -getargs(); +getargs($cmd, @files); diff --git a/docs/cmdline-opts/get.d b/docs/cmdline-opts/get.d index be7cb25f0cc6dd..d529bb4e0bb665 100644 --- a/docs/cmdline-opts/get.d +++ b/docs/cmdline-opts/get.d @@ -1,6 +1,7 @@ Long: get Short: G Help: Put the post data in the URL and use GET +Category: http upload --- When used, this option will make all data specified with --data, --data-binary or --data-urlencode to be used in an HTTP GET request instead of the POST diff --git a/docs/cmdline-opts/globoff.d b/docs/cmdline-opts/globoff.d index fff6516b6d0ec8..4b5f30de916a8d 100644 --- a/docs/cmdline-opts/globoff.d +++ b/docs/cmdline-opts/globoff.d @@ -1,6 +1,7 @@ Long: globoff Short: g Help: Disable URL sequences and ranges using {} and [] +Category: curl --- This option switches off the "URL globbing parser". When you set this option, you can specify URLs that contain the letters {}[] without having them being diff --git a/docs/cmdline-opts/happy-eyeballs-timeout-ms.d b/docs/cmdline-opts/happy-eyeballs-timeout-ms.d index ec9a8c228f65c7..ceff2a7df6498e 100644 --- a/docs/cmdline-opts/happy-eyeballs-timeout-ms.d +++ b/docs/cmdline-opts/happy-eyeballs-timeout-ms.d @@ -1,7 +1,8 @@ Long: happy-eyeballs-timeout-ms Arg: -Help: How long to wait in milliseconds for IPv6 before trying IPv4 +Help: Time for IPv6 before trying IPv4 Added: 7.59.0 +Category: connection --- Happy eyeballs is an algorithm that attempts to connect to both IPv4 and IPv6 addresses for dual-stack hosts, preferring IPv6 first for the number of diff --git a/docs/cmdline-opts/haproxy-protocol.d b/docs/cmdline-opts/haproxy-protocol.d index cc41c9c447276f..835820465a551d 100644 --- a/docs/cmdline-opts/haproxy-protocol.d +++ b/docs/cmdline-opts/haproxy-protocol.d @@ -2,6 +2,7 @@ Long: haproxy-protocol Help: Send HAProxy PROXY protocol v1 header Protocols: HTTP Added: 7.60.0 +Category: http proxy --- Send a HAProxy PROXY protocol v1 header at the beginning of the connection. This is used by some load balancers and reverse proxies to indicate the client's diff --git a/docs/cmdline-opts/head.d b/docs/cmdline-opts/head.d index 350a100f6558bc..6fe468511e17f9 100644 --- a/docs/cmdline-opts/head.d +++ b/docs/cmdline-opts/head.d @@ -2,6 +2,7 @@ Long: head Short: I Help: Show document info only Protocols: HTTP FTP FILE +Category: http ftp file --- Fetch the headers only! HTTP-servers feature the command HEAD which this uses to get nothing but the header of a document. When used on an FTP or FILE file, diff --git a/docs/cmdline-opts/header.d b/docs/cmdline-opts/header.d index d8292ed7754175..e84168996c7f6c 100644 --- a/docs/cmdline-opts/header.d +++ b/docs/cmdline-opts/header.d @@ -3,6 +3,7 @@ Short: H Arg:
Help: Pass custom header(s) to server Protocols: HTTP +Category: http --- Extra header to include in the request when sending HTTP to a server. You may specify any number of extra headers. Note that if you should add a custom diff --git a/docs/cmdline-opts/help.d b/docs/cmdline-opts/help.d index 64aa696d476fff..bf80b781891cc4 100644 --- a/docs/cmdline-opts/help.d +++ b/docs/cmdline-opts/help.d @@ -1,6 +1,12 @@ Long: help +Arg: Short: h -Help: This help text +Help: Get help for commands +Category: important curl --- -Usage help. This lists all current command line options with a short -description. +Usage help. This lists all commands of the . +If no arg was provided, curl will display the most important +command line arguments and the list of categories. +If the argument "all" was provided, curl will display all options available. +If the argument "category" was provided, curl will display all categories and +their meanings. diff --git a/docs/cmdline-opts/hostpubmd5.d b/docs/cmdline-opts/hostpubmd5.d index a851158031281f..c926ed8b5051f3 100644 --- a/docs/cmdline-opts/hostpubmd5.d +++ b/docs/cmdline-opts/hostpubmd5.d @@ -3,6 +3,7 @@ Arg: Help: Acceptable MD5 hash of the host public key Protocols: SFTP SCP Added: 7.17.1 +Category: sftp scp --- Pass a string containing 32 hexadecimal digits. The string should be the 128 bit MD5 checksum of the remote host's public key, curl will refuse diff --git a/docs/cmdline-opts/http0.9.d b/docs/cmdline-opts/http0.9.d index 33fe72d1881a3b..954c22a17ad567 100644 --- a/docs/cmdline-opts/http0.9.d +++ b/docs/cmdline-opts/http0.9.d @@ -3,6 +3,7 @@ Tags: Versions Protocols: HTTP Added: Help: Allow HTTP 0.9 responses +Category: http --- Tells curl to be fine with HTTP version 0.9 response. @@ -10,5 +11,4 @@ HTTP/0.9 is a completely headerless response and therefore you can also connect with this to non-HTTP servers and still get a response since curl will simply transparently downgrade - if allowed. -A future curl version will deny continuing if the response isn't at least -HTTP/1.0 unless this option is used. +Since curl 7.66.0, HTTP/0.9 is disabled by default. diff --git a/docs/cmdline-opts/http1.0.d b/docs/cmdline-opts/http1.0.d index d9bbd76f0fa128..a4059dbf0f341a 100644 --- a/docs/cmdline-opts/http1.0.d +++ b/docs/cmdline-opts/http1.0.d @@ -5,6 +5,7 @@ Protocols: HTTP Added: Mutexed: http1.1 http2 Help: Use HTTP 1.0 +Category: http --- Tells curl to use HTTP version 1.0 instead of using its internally preferred HTTP version. diff --git a/docs/cmdline-opts/http1.1.d b/docs/cmdline-opts/http1.1.d index f1e6b5c3bc31b7..a71a40bab2f4ad 100644 --- a/docs/cmdline-opts/http1.1.d +++ b/docs/cmdline-opts/http1.1.d @@ -4,5 +4,6 @@ Protocols: HTTP Added: 7.33.0 Mutexed: http1.0 http2 Help: Use HTTP 1.1 +Category: http --- Tells curl to use HTTP version 1.1. diff --git a/docs/cmdline-opts/http2-prior-knowledge.d b/docs/cmdline-opts/http2-prior-knowledge.d index f793f775dda685..ea2906edb465c3 100644 --- a/docs/cmdline-opts/http2-prior-knowledge.d +++ b/docs/cmdline-opts/http2-prior-knowledge.d @@ -5,6 +5,7 @@ Added: 7.49.0 Mutexed: http1.1 http1.0 http2 Requires: HTTP/2 Help: Use HTTP 2 without HTTP/1.1 Upgrade +Category: http --- Tells curl to issue its non-TLS HTTP requests using HTTP/2 without HTTP/1.1 Upgrade. It requires prior knowledge that the server supports HTTP/2 straight diff --git a/docs/cmdline-opts/http2.d b/docs/cmdline-opts/http2.d index 04cff00a4f7dbf..197515ee8cbc76 100644 --- a/docs/cmdline-opts/http2.d +++ b/docs/cmdline-opts/http2.d @@ -6,5 +6,7 @@ Mutexed: http1.1 http1.0 http2-prior-knowledge Requires: HTTP/2 See-also: no-alpn Help: Use HTTP 2 +See-also: http1.1 http3 +Category: http --- Tells curl to use HTTP version 2. diff --git a/docs/cmdline-opts/http3.d b/docs/cmdline-opts/http3.d new file mode 100644 index 00000000000000..478b662aa90e03 --- /dev/null +++ b/docs/cmdline-opts/http3.d @@ -0,0 +1,20 @@ +Long: http3 +Tags: Versions +Protocols: HTTP +Added: 7.66.0 +Mutexed: http1.1 http1.0 http2 http2-prior-knowledge +Requires: HTTP/3 +Help: Use HTTP v3 +See-also: http1.1 http2 +Category: http +--- + +WARNING: this option is experimental. Do not use in production. + +Tells curl to use HTTP version 3 directly to the host and port number used in +the URL. A normal HTTP/3 transaction will be done to a host and then get +redirected via Alt-SVc, but this option allows a user to circumvent that when +you know that the target speaks HTTP/3 on the given host and port. + +This option will make curl fail if a QUIC connection cannot be established, it +cannot fall back to a lower HTTP version on its own. diff --git a/docs/cmdline-opts/ignore-content-length.d b/docs/cmdline-opts/ignore-content-length.d index 53524f5184151c..82ac5da0918d15 100644 --- a/docs/cmdline-opts/ignore-content-length.d +++ b/docs/cmdline-opts/ignore-content-length.d @@ -1,6 +1,7 @@ Long: ignore-content-length Help: Ignore the size of the remote resource Protocols: FTP HTTP +Category: http ftp --- For HTTP, Ignore the Content-Length header. This is particularly useful for servers running Apache 1.x, which will report incorrect Content-Length for diff --git a/docs/cmdline-opts/include.d b/docs/cmdline-opts/include.d index 9d282dd162f29a..250d4accfd0c21 100644 --- a/docs/cmdline-opts/include.d +++ b/docs/cmdline-opts/include.d @@ -2,6 +2,7 @@ Long: include Short: i Help: Include protocol response headers in the output See-also: verbose +Category: important verbose --- Include the HTTP response headers in the output. The HTTP response headers can include things like server name, cookies, date of the document, HTTP version diff --git a/docs/cmdline-opts/insecure.d b/docs/cmdline-opts/insecure.d index 49b0a432285884..35f4a2e0f5ba3b 100644 --- a/docs/cmdline-opts/insecure.d +++ b/docs/cmdline-opts/insecure.d @@ -3,6 +3,7 @@ Short: k Help: Allow insecure server connections when using SSL Protocols: TLS See-also: proxy-insecure cacert +Category: tls --- By default, every SSL connection curl makes is verified to be secure. This diff --git a/docs/cmdline-opts/interface.d b/docs/cmdline-opts/interface.d index 65827fb8be1fb3..196e7efa34f748 100644 --- a/docs/cmdline-opts/interface.d +++ b/docs/cmdline-opts/interface.d @@ -2,6 +2,7 @@ Long: interface Arg: Help: Use network INTERFACE (or address) See-also: dns-interface +Category: connection --- Perform an operation using a specified interface. You can enter interface diff --git a/docs/cmdline-opts/ipv4.d b/docs/cmdline-opts/ipv4.d index 9c40c8c3eebf24..a6691376a6092c 100644 --- a/docs/cmdline-opts/ipv4.d +++ b/docs/cmdline-opts/ipv4.d @@ -7,6 +7,7 @@ Mutexed: ipv6 Requires: See-also: http1.1 http2 Help: Resolve names to IPv4 addresses +Category: connection dns --- This option tells curl to resolve names to IPv4 addresses only, and not for example try IPv6. diff --git a/docs/cmdline-opts/ipv6.d b/docs/cmdline-opts/ipv6.d index 6eef6dd03b4b7a..ce0415caa91566 100644 --- a/docs/cmdline-opts/ipv6.d +++ b/docs/cmdline-opts/ipv6.d @@ -7,6 +7,7 @@ Mutexed: ipv4 Requires: See-also: http1.1 http2 Help: Resolve names to IPv6 addresses +Category: connection dns --- This option tells curl to resolve names to IPv6 addresses only, and not for example try IPv4. diff --git a/docs/cmdline-opts/junk-session-cookies.d b/docs/cmdline-opts/junk-session-cookies.d index 40ccd9c2df03bf..993b77fa820aef 100644 --- a/docs/cmdline-opts/junk-session-cookies.d +++ b/docs/cmdline-opts/junk-session-cookies.d @@ -3,6 +3,7 @@ Short: j Help: Ignore session cookies read from file Protocols: HTTP See-also: cookie cookie-jar +Category: http --- When curl is told to read cookies from a given file, this option will make it discard all "session cookies". This will basically have the same effect as if diff --git a/docs/cmdline-opts/keepalive-time.d b/docs/cmdline-opts/keepalive-time.d index c816e13ff0f6c3..41261535c835a0 100644 --- a/docs/cmdline-opts/keepalive-time.d +++ b/docs/cmdline-opts/keepalive-time.d @@ -2,6 +2,7 @@ Long: keepalive-time Arg: Help: Interval time for keepalive probes Added: 7.18.0 +Category: connection --- This option sets the time a connection needs to remain idle before sending keepalive probes and the time between individual keepalive probes. It is diff --git a/docs/cmdline-opts/key-type.d b/docs/cmdline-opts/key-type.d index bf39bcd3572aa4..50a068676ad77c 100644 --- a/docs/cmdline-opts/key-type.d +++ b/docs/cmdline-opts/key-type.d @@ -2,6 +2,7 @@ Long: key-type Arg: Help: Private key file type (DER/PEM/ENG) Protocols: TLS +Category: tls --- Private key file type. Specify which type your --key provided private key is. DER, PEM, and ENG are supported. If not specified, PEM is assumed. diff --git a/docs/cmdline-opts/key.d b/docs/cmdline-opts/key.d index 4877b42386f3a3..a762e6fafe9700 100644 --- a/docs/cmdline-opts/key.d +++ b/docs/cmdline-opts/key.d @@ -2,10 +2,11 @@ Long: key Arg: Protocols: TLS SSH Help: Private key file name +Category: tls ssh --- Private key file name. Allows you to provide your private key in this separate file. For SSH, if not specified, curl tries the following candidates in order: -'~/.ssh/id_rsa', '~/.ssh/id_dsa', './id_rsa', './id_dsa'. +\&'~/.ssh/id_rsa', '~/.ssh/id_dsa', './id_rsa', './id_dsa'. If curl is built against OpenSSL library, and the engine pkcs11 is available, then a PKCS#11 URI (RFC 7512) can be used to specify a private key located in a diff --git a/docs/cmdline-opts/krb.d b/docs/cmdline-opts/krb.d index 19547af0803bf9..7759cfb04be5d0 100644 --- a/docs/cmdline-opts/krb.d +++ b/docs/cmdline-opts/krb.d @@ -3,6 +3,7 @@ Arg: Help: Enable Kerberos with security Protocols: FTP Requires: Kerberos +Category: ftp --- Enable Kerberos authentication and use. The level must be entered and should be one of 'clear', 'safe', 'confidential', or 'private'. Should you use a diff --git a/docs/cmdline-opts/libcurl.d b/docs/cmdline-opts/libcurl.d index ef132fe745b45d..eb9850c680cc43 100644 --- a/docs/cmdline-opts/libcurl.d +++ b/docs/cmdline-opts/libcurl.d @@ -2,6 +2,7 @@ Long: libcurl Arg: Help: Dump libcurl equivalent code of this command line Added: 7.16.1 +Category: curl --- Append this option to any ordinary curl command line, and you will get a libcurl-using C source code written to the file that does the equivalent diff --git a/docs/cmdline-opts/limit-rate.d b/docs/cmdline-opts/limit-rate.d index 06c456e3e75752..cb3a8607707bcb 100644 --- a/docs/cmdline-opts/limit-rate.d +++ b/docs/cmdline-opts/limit-rate.d @@ -1,6 +1,7 @@ Long: limit-rate Arg: Help: Limit transfer speed to RATE +Category: connection --- Specify the maximum transfer rate you want curl to use - for both downloads and uploads. This feature is useful if you have a limited pipe and you'd like diff --git a/docs/cmdline-opts/list-only.d b/docs/cmdline-opts/list-only.d index 4c56304a0df6db..c69885801b4de2 100644 --- a/docs/cmdline-opts/list-only.d +++ b/docs/cmdline-opts/list-only.d @@ -2,7 +2,8 @@ Long: list-only Short: l Protocols: FTP POP3 Help: List only mode -Added: 7.21.5 +Added: 4.0 +Category: ftp pop3 --- (FTP) When listing an FTP directory, this switch forces a name-only view. This is diff --git a/docs/cmdline-opts/local-port.d b/docs/cmdline-opts/local-port.d index d96b46eb89339e..3f7a0e024a3dab 100644 --- a/docs/cmdline-opts/local-port.d +++ b/docs/cmdline-opts/local-port.d @@ -2,6 +2,7 @@ Long: local-port Arg: Help: Force use of RANGE for local port numbers Added: 7.15.2 +Category: connection --- Set a preferred single number or range (FROM-TO) of local port numbers to use for the connection(s). Note that port numbers by nature are a scarce resource diff --git a/docs/cmdline-opts/location-trusted.d b/docs/cmdline-opts/location-trusted.d index 995a8718aae843..f01d842bc5e610 100644 --- a/docs/cmdline-opts/location-trusted.d +++ b/docs/cmdline-opts/location-trusted.d @@ -2,6 +2,7 @@ Long: location-trusted Help: Like --location, and send auth to other hosts Protocols: HTTP See-also: user +Category: http auth --- Like --location, but will allow sending the name + password to all hosts that the site may redirect to. This may or may not introduce a security breach if diff --git a/docs/cmdline-opts/location.d b/docs/cmdline-opts/location.d index 7c70e6981f5b43..c70b2eba885cc1 100644 --- a/docs/cmdline-opts/location.d +++ b/docs/cmdline-opts/location.d @@ -2,6 +2,7 @@ Long: location Short: L Help: Follow redirects Protocols: HTTP +Category: http --- If the server reports that the requested page has moved to a different location (indicated with a Location: header and a 3XX response code), this @@ -13,11 +14,13 @@ intercept the user+password. See also --location-trusted on how to change this. You can limit the amount of redirects to follow by using the --max-redirs option. -When curl follows a redirect and the request is not a plain GET (for example -POST or PUT), it will do the following request with a GET if the HTTP response -was 301, 302, or 303. If the response code was any other 3xx code, curl will -re-send the following request using the same unmodified method. +When curl follows a redirect and if the request is a POST, it will do the +following request with a GET if the HTTP response was 301, 302, or 303. If the +response code was any other 3xx code, curl will re-send the following request +using the same unmodified method. -You can tell curl to not change the non-GET request method to GET after a 30x -response by using the dedicated options for that: --post301, --post302 and ---post303. +You can tell curl to not change POST requests to GET after a 30x response by +using the dedicated options for that: --post301, --post302 and --post303. + +The method set with --request overrides the method curl would otherwise select +to use. diff --git a/docs/cmdline-opts/login-options.d b/docs/cmdline-opts/login-options.d index 8bad0511d45862..887d5d517eaf30 100644 --- a/docs/cmdline-opts/login-options.d +++ b/docs/cmdline-opts/login-options.d @@ -3,6 +3,7 @@ Arg: Protocols: IMAP POP3 SMTP Help: Server login options Added: 7.34.0 +Category: imap pop3 smtp auth --- Specify the login options to use during server authentication. diff --git a/docs/cmdline-opts/mail-auth.d b/docs/cmdline-opts/mail-auth.d index 70cf0eda4614fe..529997161dd8bf 100644 --- a/docs/cmdline-opts/mail-auth.d +++ b/docs/cmdline-opts/mail-auth.d @@ -4,6 +4,7 @@ Protocols: SMTP Help: Originator address of the original email Added: 7.25.0 See-also: mail-rcpt mail-from +Category: smtp --- Specify a single address. This will be used to specify the authentication address (identity) of a submitted message that is being relayed to another diff --git a/docs/cmdline-opts/mail-from.d b/docs/cmdline-opts/mail-from.d index 1d932344cfe573..faf48e2a4b0275 100644 --- a/docs/cmdline-opts/mail-from.d +++ b/docs/cmdline-opts/mail-from.d @@ -4,5 +4,6 @@ Help: Mail from this address Protocols: SMTP Added: 7.20.0 See-also: mail-rcpt mail-auth +Category: smtp --- Specify a single address that the given mail should get sent from. diff --git a/docs/cmdline-opts/mail-rcpt-allowfails.d b/docs/cmdline-opts/mail-rcpt-allowfails.d new file mode 100644 index 00000000000000..76457c32ea984f --- /dev/null +++ b/docs/cmdline-opts/mail-rcpt-allowfails.d @@ -0,0 +1,16 @@ +Long: mail-rcpt-allowfails +Help: Allow RCPT TO command to fail for some recipients +Protocols: SMTP +Added: 7.69.0 +Category: smtp +--- +When sending data to multiple recipients, by default curl will abort SMTP +conversation if at least one of the recipients causes RCPT TO command to +return an error. + +The default behavior can be changed by passing --mail-rcpt-allowfails +command-line option which will make curl ignore errors and proceed with the +remaining valid recipients. + +In case when all recipients cause RCPT TO command to fail, curl will abort SMTP +conversation and return the error received from to the last RCPT TO command. \ No newline at end of file diff --git a/docs/cmdline-opts/mail-rcpt.d b/docs/cmdline-opts/mail-rcpt.d index 0a2859b6884fd7..d8ae046ee4def6 100644 --- a/docs/cmdline-opts/mail-rcpt.d +++ b/docs/cmdline-opts/mail-rcpt.d @@ -3,6 +3,7 @@ Arg:
Help: Mail to this address Protocols: SMTP Added: 7.20.0 +Category: smtp --- Specify a single address, user name or mailing list name. Repeat this option several times to send to multiple recipients. diff --git a/docs/cmdline-opts/manual.d b/docs/cmdline-opts/manual.d index a9dbb0c78adcb2..25ed08dd274765 100644 --- a/docs/cmdline-opts/manual.d +++ b/docs/cmdline-opts/manual.d @@ -1,5 +1,6 @@ Long: manual Short: M Help: Display the full manual +Category: curl --- Manual. Display the huge help text. diff --git a/docs/cmdline-opts/max-filesize.d b/docs/cmdline-opts/max-filesize.d index 50d5266e1b1ec3..1f6bdc663d1f86 100644 --- a/docs/cmdline-opts/max-filesize.d +++ b/docs/cmdline-opts/max-filesize.d @@ -2,6 +2,7 @@ Long: max-filesize Arg: Help: Maximum file size to download See-also: limit-rate +Category: connection --- Specify the maximum size (in bytes) of a file to download. If the file requested is larger than this value, the transfer will not start and curl will diff --git a/docs/cmdline-opts/max-redirs.d b/docs/cmdline-opts/max-redirs.d index a97860a8bd6a71..ba16c43fc1f0ff 100644 --- a/docs/cmdline-opts/max-redirs.d +++ b/docs/cmdline-opts/max-redirs.d @@ -2,6 +2,7 @@ Long: max-redirs Arg: Help: Maximum number of redirects allowed Protocols: HTTP +Category: http --- Set maximum number of redirection-followings allowed. When --location is used, is used to prevent curl from following redirections too much. By default, the diff --git a/docs/cmdline-opts/max-time.d b/docs/cmdline-opts/max-time.d index 0057f9d0472fc7..c4e4ed7eb48c2b 100644 --- a/docs/cmdline-opts/max-time.d +++ b/docs/cmdline-opts/max-time.d @@ -3,6 +3,7 @@ Short: m Arg: Help: Maximum time allowed for the transfer See-also: connect-timeout +Category: connection --- Maximum time in seconds that you allow the whole operation to take. This is useful for preventing your batch jobs from hanging for hours due to slow diff --git a/docs/cmdline-opts/metalink.d b/docs/cmdline-opts/metalink.d index 81fc8bc78c1741..88376643975db7 100644 --- a/docs/cmdline-opts/metalink.d +++ b/docs/cmdline-opts/metalink.d @@ -2,6 +2,7 @@ Long: metalink Help: Process given URLs as metalink XML file Added: 7.27.0 Requires: metalink +Category: misc --- This option can tell curl to parse and process a given URI as Metalink file (both version 3 and 4 (RFC 5854) are supported) and make use of the mirrors diff --git a/docs/cmdline-opts/negotiate.d b/docs/cmdline-opts/negotiate.d index 69a6b91709507f..e247bfbee5b2d5 100644 --- a/docs/cmdline-opts/negotiate.d +++ b/docs/cmdline-opts/negotiate.d @@ -2,6 +2,7 @@ Long: negotiate Help: Use HTTP Negotiate (SPNEGO) authentication Protocols: HTTP See-also: basic ntlm anyauth proxy-negotiate +Category: auth http --- Enables Negotiate (SPNEGO) authentication. diff --git a/docs/cmdline-opts/netrc-file.d b/docs/cmdline-opts/netrc-file.d index 50126d25549f84..95fb2654a2418f 100644 --- a/docs/cmdline-opts/netrc-file.d +++ b/docs/cmdline-opts/netrc-file.d @@ -3,6 +3,7 @@ Help: Specify FILE for netrc Arg: Added: 7.21.5 Mutexed: netrc +Category: curl --- This option is similar to --netrc, except that you provide the path (absolute or relative) to the netrc file that curl should use. You can only specify one diff --git a/docs/cmdline-opts/netrc-optional.d b/docs/cmdline-opts/netrc-optional.d index c285403094a5ea..fa92032bd234f8 100644 --- a/docs/cmdline-opts/netrc-optional.d +++ b/docs/cmdline-opts/netrc-optional.d @@ -2,6 +2,7 @@ Long: netrc-optional Help: Use either .netrc or URL Mutexed: netrc See-also: netrc-file +Category: curl --- Very similar to --netrc, but this option makes the .netrc usage \fBoptional\fP and not mandatory as the --netrc option does. diff --git a/docs/cmdline-opts/netrc.d b/docs/cmdline-opts/netrc.d index 2df26782cce6af..6aac568a6438b4 100644 --- a/docs/cmdline-opts/netrc.d +++ b/docs/cmdline-opts/netrc.d @@ -1,6 +1,7 @@ Long: netrc Short: n Help: Must read .netrc for user name and password +Category: curl --- Makes curl scan the \fI.netrc\fP (\fI_netrc\fP on Windows) file in the user's home directory for login name and password. This is typically used for FTP on diff --git a/docs/cmdline-opts/next.d b/docs/cmdline-opts/next.d index 1d1e70a35c9b90..1adcc7535c1377 100644 --- a/docs/cmdline-opts/next.d +++ b/docs/cmdline-opts/next.d @@ -5,6 +5,7 @@ Protocols: Added: 7.36.0 Magic: divider Help: Make next URL use its separate set of options +Category: curl --- Tells curl to use a separate operation for the following URL and associated options. This allows you to send several URL requests, each with their own diff --git a/docs/cmdline-opts/no-alpn.d b/docs/cmdline-opts/no-alpn.d index 88abb83682b88e..8031b4f07987ba 100644 --- a/docs/cmdline-opts/no-alpn.d +++ b/docs/cmdline-opts/no-alpn.d @@ -5,6 +5,7 @@ Added: 7.36.0 See-also: no-npn http2 Requires: TLS Help: Disable the ALPN TLS extension +Category: tls http --- Disable the ALPN TLS extension. ALPN is enabled by default if libcurl was built with an SSL library that supports ALPN. ALPN is used by a libcurl that supports diff --git a/docs/cmdline-opts/no-buffer.d b/docs/cmdline-opts/no-buffer.d index 65a6282f64d4e3..1079f47128cfc7 100644 --- a/docs/cmdline-opts/no-buffer.d +++ b/docs/cmdline-opts/no-buffer.d @@ -1,6 +1,7 @@ Long: no-buffer Short: N Help: Disable buffering of the output stream +Category: curl --- Disables the buffering of the output stream. In normal work situations, curl will use a standard buffered output stream that will have the effect that it diff --git a/docs/cmdline-opts/no-keepalive.d b/docs/cmdline-opts/no-keepalive.d index 8fb28a0365e99f..72f3bc9a5bc992 100644 --- a/docs/cmdline-opts/no-keepalive.d +++ b/docs/cmdline-opts/no-keepalive.d @@ -1,5 +1,6 @@ Long: no-keepalive Help: Disable TCP keepalive on the connection +Category: connection --- Disables the use of keepalive messages on the TCP connection. curl otherwise enables them by default. diff --git a/docs/cmdline-opts/no-npn.d b/docs/cmdline-opts/no-npn.d index ab0f6de2e00ab3..27e5974bdf7bfd 100644 --- a/docs/cmdline-opts/no-npn.d +++ b/docs/cmdline-opts/no-npn.d @@ -6,6 +6,7 @@ Mutexed: See-also: no-alpn http2 Requires: TLS Help: Disable the NPN TLS extension +Category: tls http --- Disable the NPN TLS extension. NPN is enabled by default if libcurl was built with an SSL library that supports NPN. NPN is used by a libcurl that supports diff --git a/docs/cmdline-opts/no-progress-meter.d b/docs/cmdline-opts/no-progress-meter.d new file mode 100644 index 00000000000000..bf28d635d5f209 --- /dev/null +++ b/docs/cmdline-opts/no-progress-meter.d @@ -0,0 +1,11 @@ +Long: no-progress-meter +Help: Do not show the progress meter +See-also: verbose silent +Added: 7.67.0 +Category: verbose +--- +Option to switch off the progress meter output without muting or otherwise +affecting warning and informational messages like --silent does. + +Note that this is the negated option name documented. You can thus use +--progress-meter to enable the progress meter again. diff --git a/docs/cmdline-opts/no-sessionid.d b/docs/cmdline-opts/no-sessionid.d index 397a158697dc87..013ca4cb47e795 100644 --- a/docs/cmdline-opts/no-sessionid.d +++ b/docs/cmdline-opts/no-sessionid.d @@ -2,6 +2,7 @@ Long: no-sessionid Help: Disable SSL session-ID reusing Protocols: TLS Added: 7.16.0 +Category: tls --- Disable curl's use of SSL session-ID caching. By default all transfers are done using the cache. Note that while nothing should ever get hurt by diff --git a/docs/cmdline-opts/noproxy.d b/docs/cmdline-opts/noproxy.d index a216e75f49496b..12ce6d3ed205d3 100644 --- a/docs/cmdline-opts/noproxy.d +++ b/docs/cmdline-opts/noproxy.d @@ -2,6 +2,7 @@ Long: noproxy Arg: Help: List of hosts which do not use proxy Added: 7.19.4 +Category: proxy --- Comma-separated list of hosts which do not use a proxy, if one is specified. The only wildcard is a single * character, which matches all hosts, and diff --git a/docs/cmdline-opts/ntlm-wb.d b/docs/cmdline-opts/ntlm-wb.d index 7b9338408530ac..4a2f7e1e6a0ff8 100644 --- a/docs/cmdline-opts/ntlm-wb.d +++ b/docs/cmdline-opts/ntlm-wb.d @@ -2,6 +2,7 @@ Long: ntlm-wb Help: Use HTTP NTLM authentication with winbind Protocols: HTTP See-also: ntlm proxy-ntlm +Category: auth http --- Enables NTLM much in the style --ntlm does, but hand over the authentication to the separate binary ntlmauth application that is executed when needed. diff --git a/docs/cmdline-opts/ntlm.d b/docs/cmdline-opts/ntlm.d index baaa1d534d3d23..c63c086ebe4098 100644 --- a/docs/cmdline-opts/ntlm.d +++ b/docs/cmdline-opts/ntlm.d @@ -4,6 +4,7 @@ Mutexed: basic negotiate digest anyauth See-also: proxy-ntlm Protocols: HTTP Requires: TLS +Category: auth http --- Enables NTLM authentication. The NTLM authentication method was designed by Microsoft and is used by IIS web servers. It is a proprietary protocol, diff --git a/docs/cmdline-opts/oauth2-bearer.d b/docs/cmdline-opts/oauth2-bearer.d index 780716748ecd4c..e0ea210ed8a0fb 100644 --- a/docs/cmdline-opts/oauth2-bearer.d +++ b/docs/cmdline-opts/oauth2-bearer.d @@ -1,7 +1,8 @@ Long: oauth2-bearer Help: OAuth 2 Bearer Token Arg: -Protocols: IMAP POP3 SMTP +Protocols: IMAP POP3 SMTP HTTP +Category: auth --- Specify the Bearer Token for OAUTH 2.0 server authentication. The Bearer Token is used in conjunction with the user name which can be specified as part of diff --git a/docs/cmdline-opts/output-dir.d b/docs/cmdline-opts/output-dir.d new file mode 100644 index 00000000000000..d2ff7738efca5c --- /dev/null +++ b/docs/cmdline-opts/output-dir.d @@ -0,0 +1,19 @@ +Long: output-dir +Arg: +Help: Directory to save files in +Added: 7.73.0 +See-also: remote-name remote-header-name +Category: curl +--- + +This option specifies the directory in which files should be stored, when +--remote-name or --output are used. + +The given output directory is used for all URLs and output options on the +command line, up until the first --next. + +If the specified target directory doesn't exist, the operation will fail +unless --create-dirs is also used. + +If this option is used multiple times, the last specified directory will be +used. diff --git a/docs/cmdline-opts/output.d b/docs/cmdline-opts/output.d index 35f52a2130303a..bf35bf4a09e31d 100644 --- a/docs/cmdline-opts/output.d +++ b/docs/cmdline-opts/output.d @@ -3,17 +3,18 @@ Arg: Short: o Help: Write to file instead of stdout See-also: remote-name remote-name-all remote-header-name +Category: important curl --- Write output to instead of stdout. If you are using {} or [] to fetch -multiple documents, you can use '#' followed by a number in the -specifier. That variable will be replaced with the current string for the URL -being fetched. Like in: +multiple documents, you should quote the URL and you can use '#' followed by a +number in the specifier. That variable will be replaced with the current +string for the URL being fetched. Like in: - curl http://{one,two}.example.com -o "file_#1.txt" + curl "http://{one,two}.example.com" -o "file_#1.txt" or use several variables like: - curl http://{site,host}.host[1-5].com -o "#1_#2" + curl "http://{site,host}.host[1-5].com" -o "#1_#2" You may use this option as many times as the number of URLs you have. For example, if you specify two URLs on the same command line, you can use it like diff --git a/docs/cmdline-opts/page-footer b/docs/cmdline-opts/page-footer index defe7e8b287ad1..479c712af26da8 100644 --- a/docs/cmdline-opts/page-footer +++ b/docs/cmdline-opts/page-footer @@ -37,6 +37,26 @@ accesses the target URL through the proxy. The list of host names can also be include numerical IP addresses, and IPv6 versions should then be given without enclosing brackets. +IPv6 numerical addresses are compared as strings, so they will only match if +the representations are the same: "::1" is the same as "::0:1" but they don't +match. +.IP "CURL_SSL_BACKEND " +If curl was built with support for "MultiSSL", meaning that it has built-in +support for more than one TLS backend, this environment variable can be set to +the case insensitive name of the particular backend to use when curl is +invoked. Setting a name that isn't a built-in alternative, will make curl +stay with the default. +.IP "QLOGDIR " +If curl was built with HTTP/3 support, setting this environment variable to a +local directory will make curl produce qlogs in that directory, using file +names named after the destination connection id (in hex). Do note that these +files can become rather large. Works with both QUIC backends. +.IP "SSLKEYLOGFILE " +If you set this environment variable to a file name, curl will store TLS +secrets from its connections in that file when invoked to enable you to +analyze the TLS traffic in real time using network analyzing tools such as +Wireshark. This works with the following TLS backends: OpenSSL, libressl, +BoringSSL, GnuTLS, NSS and wolfSSL. .SH "PROXY PROTOCOL PREFIXES" Since curl version 7.21.7, the proxy string may be specified with a protocol:// prefix to specify alternative proxy protocols. @@ -243,6 +263,16 @@ SSL public key does not matched pinned public key Invalid SSL certificate status. .IP 92 Stream error in HTTP/2 framing layer. +.IP 93 +An API function was called from inside a callback. +.IP 94 +An authentication function returned an error. +.IP 95 +A problem was detected in the HTTP/3 layer. This is somewhat generic and can +be one out of several problems, see the error message for details. +.IP 96 +QUIC connection error. This error may be caused by an SSL library error. QUIC +is the protocol used for HTTP/3 transfers. .IP XX More error codes will appear here in future releases. The existing ones are meant to never change. diff --git a/docs/cmdline-opts/page-header b/docs/cmdline-opts/page-header index 51f45edadf12ed..a51e485babe820 100644 --- a/docs/cmdline-opts/page-header +++ b/docs/cmdline-opts/page-header @@ -5,7 +5,7 @@ .\" * | (__| |_| | _ <| |___ .\" * \___|\___/|_| \_\_____| .\" * -.\" * Copyright (C) 1998 - 2018, Daniel Stenberg, , et al. +.\" * Copyright (C) 1998 - 2020, Daniel Stenberg, , et al. .\" * .\" * This software is licensed as described in the file COPYING, which .\" * you should have received as part of this distribution. The terms @@ -31,8 +31,9 @@ curl \- transfer a URL .B curl is a tool to transfer data from or to a server, using one of the supported protocols (DICT, FILE, FTP, FTPS, GOPHER, HTTP, HTTPS, IMAP, IMAPS, LDAP, -LDAPS, POP3, POP3S, RTMP, RTSP, SCP, SFTP, SMB, SMBS, SMTP, SMTPS, TELNET -and TFTP). The command is designed to work without user interaction. +LDAPS, MQTT, POP3, POP3S, RTMP, RTMPS, RTSP, SCP, SFTP, SMB, SMBS, SMTP, +SMTPS, TELNET and TFTP). The command is designed to work without user +interaction. curl offers a busload of useful tricks like proxy support, user authentication, FTP upload, HTTP post, SSL connections, cookies, file transfer @@ -46,22 +47,22 @@ The URL syntax is protocol-dependent. You'll find a detailed description in RFC 3986. You can specify multiple URLs or parts of URLs by writing part sets within -braces as in: +braces and quoting the URL as in: - http://site.{one,two,three}.com + "http://site.{one,two,three}.com" or you can get sequences of alphanumeric series by using [] as in: - ftp://ftp.example.com/file[1-100].txt + "ftp://ftp.example.com/file[1-100].txt" - ftp://ftp.example.com/file[001-100].txt (with leading zeros) + "ftp://ftp.example.com/file[001-100].txt" (with leading zeros) - ftp://ftp.example.com/file[a-z].txt + "ftp://ftp.example.com/file[a-z].txt" Nested sequences are not supported, but you can use several ones next to each other: - http://example.com/archive[1996-1999]/vol[1-4]/part{a,b,c}.html + "http://example.com/archive[1996-1999]/vol[1-4]/part{a,b,c}.html" You can specify any amount of URLs on the command line. They will be fetched in a sequential manner in the specified order. You can specify command line @@ -70,9 +71,9 @@ options and URLs mixed and in any order on the command line. You can specify a step counter for the ranges to get every Nth number or letter: - http://example.com/file[1-100:10].txt + "http://example.com/file[1-100:10].txt" - http://example.com/file[a-z:2].txt + "http://example.com/file[a-z:2].txt" When using [] or {} sequences when invoked from a command line prompt, you probably have to put the full URL within double quotes to avoid the shell from @@ -82,7 +83,7 @@ for example '&', '?' and '*'. Provide the IPv6 zone index in the URL with an escaped percentage sign and the interface name. Like in - http://[fe80::3%25eth0]/ + "http://[fe80::3%25eth0]/" If you specify URL without protocol:// prefix, curl will attempt to guess what protocol you might want. It will then default to HTTP but try other protocols @@ -98,6 +99,55 @@ getting many files from the same server will not do multiple connects / handshakes. This improves speed. Of course this is only done on files specified on a single command line and cannot be used between separate curl invokes. +.SH PROTOCOLS +curl supports numerous protocols, or put in URL terms: schemes. Your +particular build may not support them all. +.IP DICT +Lets you lookup words using online dictionaries. +.IP FILE +Read or write local files. curl does not support accessing file:// URL +remotely, but when running on Microsft Windows using the native UNC approach +will work. +.IP FTP(S) +curl supports the File Transfer Protocol with a lot of tweaks and levers. With +or without using TLS. +.IP GOPHER +Retrieve files. +.IP HTTP(S) +curl supports HTTP with numerous options and variations. It can speak HTTP +version 0.9, 1.0, 1.1, 2 and 3 depending on build options and the correct +command line options. +.IP IMAP(S) +Using the mail reading protocol, curl can "download" emails for you. With or +without using TLS. +.IP LDAP(S) +curl can do directory lookups for you, with or without TLS. +.IP MQTT +curl supports MQTT version 3. Downloading over MQTT equals "subscribe" to a +topic while uploading/posting equals "publish" on a topic. MQTT support is +experimental and TLS based MQTT is not supported (yet). +.IP POP3(S) +Downloading from a pop3 server means getting a mail. With or without using +TLS. +.IP RTMP(S) +The Realtime Messaging Protocol is primarily used to server streaming media +and curl can download it. +.IP RTSP +curl supports RTSP 1.0 downloads. +.IP SCP +curl supports SSH version 2 scp transfers. +.IP SFTP +curl supports SFTP (draft 5) done over SSH version 2. +.IP SMB(S) +curl supports SMB version 1 for upload and download. +.IP SMTP(S) +Uploading contents to an SMTP server means sending an email. With or without +TLS. +.IP TELNET +Telling curl to fetch a telnet URL starts an interactive session where it +sends what it reads on stdin and outputs what the server sends it. +.IP TFTP +curl can do TFTP downloads and uploads. .SH "PROGRESS METER" curl normally displays a progress meter during operations, indicating the amount of transferred data, transfer speeds and estimated time left, etc. The diff --git a/docs/cmdline-opts/parallel-immediate.d b/docs/cmdline-opts/parallel-immediate.d new file mode 100644 index 00000000000000..95c8afc42cffb0 --- /dev/null +++ b/docs/cmdline-opts/parallel-immediate.d @@ -0,0 +1,10 @@ +Long: parallel-immediate +Help: Do not wait for multiplexing (with --parallel) +Added: 7.68.0 +See-also: parallel parallel-max +Category: connection curl +--- +When doing parallel transfers, this option will instruct curl that it should +rather prefer opening up more connections in parallel at once rather than +waiting to see if new transfers can be added as multiplexed streams on another +connection. diff --git a/docs/cmdline-opts/parallel-max.d b/docs/cmdline-opts/parallel-max.d new file mode 100644 index 00000000000000..d49aeb7a9d9b0b --- /dev/null +++ b/docs/cmdline-opts/parallel-max.d @@ -0,0 +1,10 @@ +Long: parallel-max +Help: Maximum concurrency for parallel transfers +Added: 7.66.0 +See-also: parallel +Category: connection curl +--- +When asked to do parallel transfers, using --parallel, this option controls +the maximum amount of transfers to do simultaneously. + +The default is 50. diff --git a/docs/cmdline-opts/parallel.d b/docs/cmdline-opts/parallel.d new file mode 100644 index 00000000000000..d6b4affe3763a3 --- /dev/null +++ b/docs/cmdline-opts/parallel.d @@ -0,0 +1,8 @@ +Short: Z +Long: parallel +Help: Perform transfers in parallel +Added: 7.66.0 +Category: connection curl +--- +Makes curl perform its transfers in parallel as compared to the regular serial +manner. diff --git a/docs/cmdline-opts/pass.d b/docs/cmdline-opts/pass.d index 2639cb9d062aba..f6633d242c219a 100644 --- a/docs/cmdline-opts/pass.d +++ b/docs/cmdline-opts/pass.d @@ -2,6 +2,7 @@ Long: pass Arg: Help: Pass phrase for the private key Protocols: SSH TLS +Category: ssh tls auth --- Passphrase for the private key diff --git a/docs/cmdline-opts/path-as-is.d b/docs/cmdline-opts/path-as-is.d index 946e2f07a233fb..0bfaf6639ba42d 100644 --- a/docs/cmdline-opts/path-as-is.d +++ b/docs/cmdline-opts/path-as-is.d @@ -1,6 +1,7 @@ Long: path-as-is Help: Do not squash .. sequences in URL path Added: 7.42.0 +Category: curl --- Tell curl to not handle sequences of /../ or /./ in the given URL path. Normally curl will squash or merge them according to standards but with diff --git a/docs/cmdline-opts/pinnedpubkey.d b/docs/cmdline-opts/pinnedpubkey.d index 0657e6e791476d..617252c6ce458d 100644 --- a/docs/cmdline-opts/pinnedpubkey.d +++ b/docs/cmdline-opts/pinnedpubkey.d @@ -2,6 +2,7 @@ Long: pinnedpubkey Arg: Help: FILE/HASHES Public key to verify peer against Protocols: TLS +Category: tls --- Tells curl to use the specified public key file (or hashes) to verify the peer. This can be a path to a file which contains a single public key in PEM @@ -15,13 +16,11 @@ abort the connection before sending or receiving any data. PEM/DER support: 7.39.0: OpenSSL, GnuTLS and GSKit - 7.43.0: NSS and wolfSSL/CyaSSL + 7.43.0: NSS and wolfSSL 7.47.0: mbedtls - 7.49.0: PolarSSL sha256 support: - 7.44.0: OpenSSL, GnuTLS, NSS and wolfSSL/CyaSSL. + 7.44.0: OpenSSL, GnuTLS, NSS and wolfSSL 7.47.0: mbedtls - 7.49.0: PolarSSL Other SSL backends not supported. If this option is used several times, the last one will be used. diff --git a/docs/cmdline-opts/post301.d b/docs/cmdline-opts/post301.d index 87a9fe7edc9c2b..9cc2ad01835876 100644 --- a/docs/cmdline-opts/post301.d +++ b/docs/cmdline-opts/post301.d @@ -3,6 +3,7 @@ Help: Do not switch to GET after following a 301 Protocols: HTTP See-also: post302 post303 location Added: 7.17.1 +Category: http post --- Tells curl to respect RFC 7231/6.4.2 and not convert POST requests into GET requests when following a 301 redirection. The non-RFC behaviour is ubiquitous diff --git a/docs/cmdline-opts/post302.d b/docs/cmdline-opts/post302.d index caf0d87f18008a..02749032f03ae7 100644 --- a/docs/cmdline-opts/post302.d +++ b/docs/cmdline-opts/post302.d @@ -3,6 +3,7 @@ Help: Do not switch to GET after following a 302 Protocols: HTTP See-also: post301 post303 location Added: 7.19.1 +Category: http post --- Tells curl to respect RFC 7231/6.4.3 and not convert POST requests into GET requests when following a 302 redirection. The non-RFC behaviour is ubiquitous diff --git a/docs/cmdline-opts/post303.d b/docs/cmdline-opts/post303.d index 44f39e6104da07..1a67e13efd2840 100644 --- a/docs/cmdline-opts/post303.d +++ b/docs/cmdline-opts/post303.d @@ -3,6 +3,7 @@ Help: Do not switch to GET after following a 303 Protocols: HTTP See-also: post302 post301 location Added: 7.26.0 +Category: http post --- Tells curl to violate RFC 7231/6.4.4 and not convert POST requests into GET requests when following 303 redirections. A server may require a POST to diff --git a/docs/cmdline-opts/preproxy.d b/docs/cmdline-opts/preproxy.d index b8eb77fa4f5388..a917c16ed8874c 100644 --- a/docs/cmdline-opts/preproxy.d +++ b/docs/cmdline-opts/preproxy.d @@ -2,6 +2,7 @@ Long: preproxy Arg: [protocol://]host[:port] Help: Use this proxy first Added: 7.52.0 +Category: proxy --- Use the specified SOCKS proxy before connecting to an HTTP or HTTPS --proxy. In such a case curl first connects to the SOCKS proxy and then connects (through diff --git a/docs/cmdline-opts/progress-bar.d b/docs/cmdline-opts/progress-bar.d index f27de2d9309667..299d5384a42cf4 100644 --- a/docs/cmdline-opts/progress-bar.d +++ b/docs/cmdline-opts/progress-bar.d @@ -1,6 +1,7 @@ Short: # Long: progress-bar Help: Display transfer progress as a bar +Category: verbose --- Make curl display transfer progress as a simple progress bar instead of the standard, more informational, meter. diff --git a/docs/cmdline-opts/proto-default.d b/docs/cmdline-opts/proto-default.d index ccc3b85f38eacf..1c2afea2c91be0 100644 --- a/docs/cmdline-opts/proto-default.d +++ b/docs/cmdline-opts/proto-default.d @@ -2,6 +2,7 @@ Long: proto-default Help: Use PROTOCOL for any URL missing a scheme Arg: Added: 7.45.0 +Category: connection curl --- Tells curl to use \fIprotocol\fP for any URL missing a scheme name. diff --git a/docs/cmdline-opts/proto-redir.d b/docs/cmdline-opts/proto-redir.d index c9eeeab1d778e5..9a096f5319223f 100644 --- a/docs/cmdline-opts/proto-redir.d +++ b/docs/cmdline-opts/proto-redir.d @@ -2,6 +2,7 @@ Long: proto-redir Arg: Help: Enable/disable PROTOCOLS on redirect Added: 7.20.2 +Category: connection curl --- Tells curl to limit what protocols it may use on redirect. Protocols denied by --proto are not overridden by this option. See --proto for how protocols are @@ -11,7 +12,8 @@ Example, allow only HTTP and HTTPS on redirect: curl --proto-redir -all,http,https http://example.com -By default curl will allow all protocols on redirect except several disabled -for security reasons: Since 7.19.4 FILE and SCP are disabled, and since 7.40.0 -SMB and SMBS are also disabled. Specifying \fIall\fP or \fI+all\fP enables all -protocols on redirect, including those disabled for security. +By default curl will allow HTTP, HTTPS, FTP and FTPS on redirect (7.65.2). +Older versions of curl allowed all protocols on redirect except several +disabled for security reasons: Since 7.19.4 FILE and SCP are disabled, and +since 7.40.0 SMB and SMBS are also disabled. Specifying \fIall\fP or \fI+all\fP +enables all protocols on redirect, including those disabled for security. diff --git a/docs/cmdline-opts/proto.d b/docs/cmdline-opts/proto.d index 1513fdc054948a..46c912254311d0 100644 --- a/docs/cmdline-opts/proto.d +++ b/docs/cmdline-opts/proto.d @@ -3,10 +3,11 @@ Arg: Help: Enable/disable PROTOCOLS See-also: proto-redir proto-default Added: 7.20.2 +Category: connection curl --- Tells curl to limit what protocols it may use in the transfer. Protocols are evaluated left to right, are comma separated, and are each a protocol name or -'all', optionally prefixed by zero or more modifiers. Available modifiers are: +\&'all', optionally prefixed by zero or more modifiers. Available modifiers are: .RS .TP 3 .B + diff --git a/docs/cmdline-opts/proxy-anyauth.d b/docs/cmdline-opts/proxy-anyauth.d index b60d0a05e6a573..6410c0e6acaaf6 100644 --- a/docs/cmdline-opts/proxy-anyauth.d +++ b/docs/cmdline-opts/proxy-anyauth.d @@ -2,6 +2,7 @@ Long: proxy-anyauth Help: Pick any proxy authentication method Added: 7.13.2 See-also: proxy proxy-basic proxy-digest +Category: proxy auth --- Tells curl to pick a suitable authentication method when communicating with the given HTTP proxy. This might cause an extra request/response round-trip. diff --git a/docs/cmdline-opts/proxy-basic.d b/docs/cmdline-opts/proxy-basic.d index 566f890a974833..e842f9900107e8 100644 --- a/docs/cmdline-opts/proxy-basic.d +++ b/docs/cmdline-opts/proxy-basic.d @@ -1,6 +1,7 @@ Long: proxy-basic Help: Use Basic authentication on the proxy See-also: proxy proxy-anyauth proxy-digest +Category: proxy auth --- Tells curl to use HTTP Basic authentication when communicating with the given proxy. Use --basic for enabling HTTP Basic with a remote host. Basic is the diff --git a/docs/cmdline-opts/proxy-cacert.d b/docs/cmdline-opts/proxy-cacert.d index 2713dd2a4c4a2c..bbc731a28f75a4 100644 --- a/docs/cmdline-opts/proxy-cacert.d +++ b/docs/cmdline-opts/proxy-cacert.d @@ -3,5 +3,6 @@ Help: CA certificate to verify peer against for proxy Arg: Added: 7.52.0 See-also: proxy-capath cacert capath proxy +Category: proxy tls --- Same as --cacert but used in HTTPS proxy context. diff --git a/docs/cmdline-opts/proxy-capath.d b/docs/cmdline-opts/proxy-capath.d index 177246aabc387e..cc342b755e8e09 100644 --- a/docs/cmdline-opts/proxy-capath.d +++ b/docs/cmdline-opts/proxy-capath.d @@ -3,5 +3,6 @@ Help: CA directory to verify peer against for proxy Arg: Added: 7.52.0 See-also: proxy-cacert proxy capath +Category: proxy tls --- Same as --capath but used in HTTPS proxy context. diff --git a/docs/cmdline-opts/proxy-cert-type.d b/docs/cmdline-opts/proxy-cert-type.d index 906d2a1153c523..fb7596c178a0ff 100644 --- a/docs/cmdline-opts/proxy-cert-type.d +++ b/docs/cmdline-opts/proxy-cert-type.d @@ -2,5 +2,6 @@ Long: proxy-cert-type Arg: Added: 7.52.0 Help: Client certificate type for HTTPS proxy +Category: proxy tls --- Same as --cert-type but used in HTTPS proxy context. diff --git a/docs/cmdline-opts/proxy-cert.d b/docs/cmdline-opts/proxy-cert.d index 43acd3950908a3..7df2712f32d16c 100644 --- a/docs/cmdline-opts/proxy-cert.d +++ b/docs/cmdline-opts/proxy-cert.d @@ -2,5 +2,6 @@ Long: proxy-cert Arg: Help: Set client certificate for proxy Added: 7.52.0 +Category: proxy tls --- Same as --cert but used in HTTPS proxy context. diff --git a/docs/cmdline-opts/proxy-ciphers.d b/docs/cmdline-opts/proxy-ciphers.d index dcac8128458464..366555673110b6 100644 --- a/docs/cmdline-opts/proxy-ciphers.d +++ b/docs/cmdline-opts/proxy-ciphers.d @@ -2,5 +2,6 @@ Long: proxy-ciphers Arg: Help: SSL ciphers to use for proxy Added: 7.52.0 +Category: proxy tls --- Same as --ciphers but used in HTTPS proxy context. diff --git a/docs/cmdline-opts/proxy-crlfile.d b/docs/cmdline-opts/proxy-crlfile.d index 1d6247f479e278..580dc50ce04911 100644 --- a/docs/cmdline-opts/proxy-crlfile.d +++ b/docs/cmdline-opts/proxy-crlfile.d @@ -2,5 +2,6 @@ Long: proxy-crlfile Arg: Help: Set a CRL list for proxy Added: 7.52.0 +Category: proxy tls --- Same as --crlfile but used in HTTPS proxy context. diff --git a/docs/cmdline-opts/proxy-digest.d b/docs/cmdline-opts/proxy-digest.d index ccf46636c234e5..52f1fcc67d0917 100644 --- a/docs/cmdline-opts/proxy-digest.d +++ b/docs/cmdline-opts/proxy-digest.d @@ -1,6 +1,7 @@ Long: proxy-digest Help: Use Digest authentication on the proxy See-also: proxy proxy-anyauth proxy-basic +Category: proxy tls --- Tells curl to use HTTP Digest authentication when communicating with the given proxy. Use --digest for enabling HTTP Digest with a remote host. diff --git a/docs/cmdline-opts/proxy-header.d b/docs/cmdline-opts/proxy-header.d index c1b0bb7c448fec..9f1121d2f16bea 100644 --- a/docs/cmdline-opts/proxy-header.d +++ b/docs/cmdline-opts/proxy-header.d @@ -3,6 +3,7 @@ Arg:
Help: Pass custom header(s) to proxy Protocols: HTTP Added: 7.37.0 +Category: proxy --- Extra header to include in the request when sending HTTP to a proxy. You may specify any number of extra headers. This is the equivalent option to --header diff --git a/docs/cmdline-opts/proxy-insecure.d b/docs/cmdline-opts/proxy-insecure.d index 762828f43fdbb2..e123f40192f724 100644 --- a/docs/cmdline-opts/proxy-insecure.d +++ b/docs/cmdline-opts/proxy-insecure.d @@ -1,5 +1,6 @@ Long: proxy-insecure Help: Do HTTPS proxy connections without verifying the proxy Added: 7.52.0 +Category: proxy tls --- Same as --insecure but used in HTTPS proxy context. diff --git a/docs/cmdline-opts/proxy-key-type.d b/docs/cmdline-opts/proxy-key-type.d index ce7482ae941c27..1906872ef65845 100644 --- a/docs/cmdline-opts/proxy-key-type.d +++ b/docs/cmdline-opts/proxy-key-type.d @@ -2,5 +2,6 @@ Long: proxy-key-type Arg: Help: Private key file type for proxy Added: 7.52.0 +Category: proxy tls --- Same as --key-type but used in HTTPS proxy context. diff --git a/docs/cmdline-opts/proxy-key.d b/docs/cmdline-opts/proxy-key.d index e61eb18a99ab5a..57d469f435cf83 100644 --- a/docs/cmdline-opts/proxy-key.d +++ b/docs/cmdline-opts/proxy-key.d @@ -1,5 +1,6 @@ Long: proxy-key Help: Private key for HTTPS proxy Arg: +Category: proxy tls --- Same as --key but used in HTTPS proxy context. diff --git a/docs/cmdline-opts/proxy-negotiate.d b/docs/cmdline-opts/proxy-negotiate.d index 775f62a9a3a0bd..72f35960e98bd7 100644 --- a/docs/cmdline-opts/proxy-negotiate.d +++ b/docs/cmdline-opts/proxy-negotiate.d @@ -2,6 +2,7 @@ Long: proxy-negotiate Help: Use HTTP Negotiate (SPNEGO) authentication on the proxy Added: 7.17.1 See-also: proxy-anyauth proxy-basic +Category: proxy auth --- Tells curl to use HTTP Negotiate (SPNEGO) authentication when communicating with the given proxy. Use --negotiate for enabling HTTP Negotiate (SPNEGO) diff --git a/docs/cmdline-opts/proxy-ntlm.d b/docs/cmdline-opts/proxy-ntlm.d index c30db53b9b48d4..cb1ba7b00ee83d 100644 --- a/docs/cmdline-opts/proxy-ntlm.d +++ b/docs/cmdline-opts/proxy-ntlm.d @@ -1,6 +1,7 @@ Long: proxy-ntlm Help: Use NTLM authentication on the proxy See-also: proxy-negotiate proxy-anyauth +Category: proxy auth --- Tells curl to use HTTP NTLM authentication when communicating with the given proxy. Use --ntlm for enabling NTLM with a remote host. diff --git a/docs/cmdline-opts/proxy-pass.d b/docs/cmdline-opts/proxy-pass.d index 3371714ba0e105..627451bbbe1fac 100644 --- a/docs/cmdline-opts/proxy-pass.d +++ b/docs/cmdline-opts/proxy-pass.d @@ -2,5 +2,6 @@ Long: proxy-pass Arg: Help: Pass phrase for the private key for HTTPS proxy Added: 7.52.0 +Category: proxy tls auth --- Same as --pass but used in HTTPS proxy context. diff --git a/docs/cmdline-opts/proxy-pinnedpubkey.d b/docs/cmdline-opts/proxy-pinnedpubkey.d index abd6dc4aaffd98..2ab79f19e82897 100644 --- a/docs/cmdline-opts/proxy-pinnedpubkey.d +++ b/docs/cmdline-opts/proxy-pinnedpubkey.d @@ -2,6 +2,7 @@ Long: proxy-pinnedpubkey Arg: Help: FILE/HASHES public key to verify proxy with Protocols: TLS +Category: proxy tls --- Tells curl to use the specified public key file (or hashes) to verify the proxy. This can be a path to a file which contains a single public key in PEM diff --git a/docs/cmdline-opts/proxy-service-name.d b/docs/cmdline-opts/proxy-service-name.d index 9a73f2be624683..f78072906e4f3b 100644 --- a/docs/cmdline-opts/proxy-service-name.d +++ b/docs/cmdline-opts/proxy-service-name.d @@ -2,5 +2,6 @@ Long: proxy-service-name Arg: Help: SPNEGO proxy service name Added: 7.43.0 +Category: proxy tls --- This option allows you to change the service name for proxy negotiation. diff --git a/docs/cmdline-opts/proxy-ssl-allow-beast.d b/docs/cmdline-opts/proxy-ssl-allow-beast.d index de96b8436de929..138001b36ba5e7 100644 --- a/docs/cmdline-opts/proxy-ssl-allow-beast.d +++ b/docs/cmdline-opts/proxy-ssl-allow-beast.d @@ -1,5 +1,6 @@ Long: proxy-ssl-allow-beast Help: Allow security flaw for interop for HTTPS proxy Added: 7.52.0 +Category: proxy tls --- Same as --ssl-allow-beast but used in HTTPS proxy context. diff --git a/docs/cmdline-opts/proxy-tls13-ciphers.d b/docs/cmdline-opts/proxy-tls13-ciphers.d index 3e35b07640c935..19add22606d531 100644 --- a/docs/cmdline-opts/proxy-tls13-ciphers.d +++ b/docs/cmdline-opts/proxy-tls13-ciphers.d @@ -2,6 +2,7 @@ Long: proxy-tls13-ciphers Arg: help: TLS 1.3 proxy cipher suites Protocols: TLS +Category: proxy tls --- Specifies which cipher suites to use in the connection to your HTTPS proxy when it negotiates TLS 1.3. The list of ciphers suites must specify valid @@ -9,4 +10,8 @@ ciphers. Read up on TLS 1.3 cipher suite details on this URL: https://curl.haxx.se/docs/ssl-ciphers.html +This option is currently used only when curl is built to use OpenSSL 1.1.1 or +later. If you are using a different SSL backend you can try setting TLS 1.3 +cipher suites by using the --proxy-ciphers option. + If this option is used several times, the last one will be used. diff --git a/docs/cmdline-opts/proxy-tlsauthtype.d b/docs/cmdline-opts/proxy-tlsauthtype.d index 7d0ce8e1a7cfbb..5649a0f5f97510 100644 --- a/docs/cmdline-opts/proxy-tlsauthtype.d +++ b/docs/cmdline-opts/proxy-tlsauthtype.d @@ -2,5 +2,6 @@ Long: proxy-tlsauthtype Arg: Help: TLS authentication type for HTTPS proxy Added: 7.52.0 +Category: proxy tls auth --- Same as --tlsauthtype but used in HTTPS proxy context. diff --git a/docs/cmdline-opts/proxy-tlspassword.d b/docs/cmdline-opts/proxy-tlspassword.d index cf003844e0e78f..38a33d45d4ca50 100644 --- a/docs/cmdline-opts/proxy-tlspassword.d +++ b/docs/cmdline-opts/proxy-tlspassword.d @@ -2,5 +2,6 @@ Long: proxy-tlspassword Arg: Help: TLS password for HTTPS proxy Added: 7.52.0 +Category: proxy tls auth --- Same as --tlspassword but used in HTTPS proxy context. diff --git a/docs/cmdline-opts/proxy-tlsuser.d b/docs/cmdline-opts/proxy-tlsuser.d index 758a7c953ab5b0..587f7f51068672 100644 --- a/docs/cmdline-opts/proxy-tlsuser.d +++ b/docs/cmdline-opts/proxy-tlsuser.d @@ -2,5 +2,6 @@ Long: proxy-tlsuser Arg: Help: TLS username for HTTPS proxy Added: 7.52.0 +Category: proxy tls auth --- Same as --tlsuser but used in HTTPS proxy context. diff --git a/docs/cmdline-opts/proxy-tlsv1.d b/docs/cmdline-opts/proxy-tlsv1.d index d024eeac36dc50..5003f5f707b2f9 100644 --- a/docs/cmdline-opts/proxy-tlsv1.d +++ b/docs/cmdline-opts/proxy-tlsv1.d @@ -1,5 +1,6 @@ Long: proxy-tlsv1 Help: Use TLSv1 for HTTPS proxy Added: 7.52.0 +Category: proxy tls auth --- Same as --tlsv1 but used in HTTPS proxy context. diff --git a/docs/cmdline-opts/proxy-user.d b/docs/cmdline-opts/proxy-user.d index 152466daaa6477..82fc5170f4e7dd 100644 --- a/docs/cmdline-opts/proxy-user.d +++ b/docs/cmdline-opts/proxy-user.d @@ -2,6 +2,7 @@ Long: proxy-user Short: U Arg: Help: Proxy user and password +Category: proxy auth --- Specify the user name and password to use for proxy authentication. diff --git a/docs/cmdline-opts/proxy.d b/docs/cmdline-opts/proxy.d index 6506692be8844b..0592f13ba529ca 100644 --- a/docs/cmdline-opts/proxy.d +++ b/docs/cmdline-opts/proxy.d @@ -2,6 +2,7 @@ Long: proxy Short: x Arg: [protocol://]host[:port] Help: Use this proxy +Category: proxy --- Use the specified proxy. diff --git a/docs/cmdline-opts/proxy1.0.d b/docs/cmdline-opts/proxy1.0.d index 4a931bd15bfa95..b8a232bf74c1b1 100644 --- a/docs/cmdline-opts/proxy1.0.d +++ b/docs/cmdline-opts/proxy1.0.d @@ -1,6 +1,7 @@ Long: proxy1.0 Arg: Help: Use HTTP/1.0 proxy on given port +Category: proxy --- Use the specified HTTP 1.0 proxy. If the port number is not specified, it is assumed at port 1080. diff --git a/docs/cmdline-opts/proxytunnel.d b/docs/cmdline-opts/proxytunnel.d index 1f587f12095954..94dae992f02cfe 100644 --- a/docs/cmdline-opts/proxytunnel.d +++ b/docs/cmdline-opts/proxytunnel.d @@ -2,6 +2,7 @@ Long: proxytunnel Short: p Help: Operate through an HTTP proxy tunnel (using CONNECT) See-also: proxy +Category: proxy --- When an HTTP proxy is used --proxy, this option will make curl tunnel through the proxy. The tunnel approach is made with the HTTP proxy CONNECT request and diff --git a/docs/cmdline-opts/pubkey.d b/docs/cmdline-opts/pubkey.d index b2e11c024bc408..692daf6b0703a3 100644 --- a/docs/cmdline-opts/pubkey.d +++ b/docs/cmdline-opts/pubkey.d @@ -2,6 +2,7 @@ Long: pubkey Arg: Protocols: SFTP SCP Help: SSH Public key file name +Category: sftp scp auth --- Public key file name. Allows you to provide your public key in this separate file. diff --git a/docs/cmdline-opts/quote.d b/docs/cmdline-opts/quote.d index cdd3ca6bd02baa..a33ed3571d6e18 100644 --- a/docs/cmdline-opts/quote.d +++ b/docs/cmdline-opts/quote.d @@ -2,6 +2,7 @@ Long: quote Short: Q Help: Send command(s) to server before transfer Protocols: FTP SFTP +Category: ftp sftp --- Send an arbitrary command to the remote FTP or SFTP server. Quote commands are @@ -16,15 +17,20 @@ If the server returns failure for one of the commands, the entire operation will be aborted. You must send syntactically correct FTP commands as RFC 959 defines to FTP servers, or one of the commands listed below to SFTP servers. -This option can be used multiple times. When speaking to an FTP server, prefix -the command with an asterisk (*) to make curl continue even if the command -fails as by default curl will stop at first failure. +Prefix the command with an asterisk (*) to make curl continue even if the +command fails as by default curl will stop at first failure. + +This option can be used multiple times. SFTP is a binary protocol. Unlike for FTP, curl interprets SFTP quote commands itself before sending them to the server. File names may be quoted shell-style to embed spaces or special characters. Following is the list of all supported SFTP quote commands: .RS +.IP "atime date file" +The atime command sets the last access time of the file named by the file +operand. The can be all sorts of date strings, see the +\fIcurl_getdate(3)\fP man page for date expression details. (Added in 7.73.0) .IP "chgrp group file" The chgrp command sets the group ID of the file named by the file operand to the group ID specified by the group operand. The group operand is a decimal @@ -41,6 +47,10 @@ The ln and symlink commands create a symbolic link at the target_file location pointing to the source_file location. .IP "mkdir directory_name" The mkdir command creates the directory named by the directory_name operand. +.IP "mtime date file" +The mtime command sets the last modification time of the file named by the +file operand. The can be all sorts of date strings, see the +\fIcurl_getdate(3)\fP man page for date expression details. (Added in 7.73.0) .IP "pwd" The pwd command returns the absolute pathname of the current working directory. .IP "rename source target" diff --git a/docs/cmdline-opts/random-file.d b/docs/cmdline-opts/random-file.d index 51626f88dc08a5..c57f52364e549c 100644 --- a/docs/cmdline-opts/random-file.d +++ b/docs/cmdline-opts/random-file.d @@ -1,6 +1,7 @@ Long: random-file Arg: Help: File for reading random data from +Category: misc --- Specify the path name to file containing what will be considered as random data. The data may be used to seed the random engine for SSL connections. See diff --git a/docs/cmdline-opts/range.d b/docs/cmdline-opts/range.d index b888dd1814fe5b..17c6c2aba58c6e 100644 --- a/docs/cmdline-opts/range.d +++ b/docs/cmdline-opts/range.d @@ -3,6 +3,7 @@ Short: r Help: Retrieve only the bytes within RANGE Arg: Protocols: HTTP FTP SFTP FILE +Category: http ftp sftp file --- Retrieve a byte range (i.e. a partial document) from an HTTP/1.1, FTP or SFTP server or a local FILE. Ranges can be specified in a number of ways. diff --git a/docs/cmdline-opts/raw.d b/docs/cmdline-opts/raw.d index c3328e69a1a22c..90e777263bd7fe 100644 --- a/docs/cmdline-opts/raw.d +++ b/docs/cmdline-opts/raw.d @@ -2,6 +2,7 @@ Long: raw Help: Do HTTP "raw"; no transfer decoding Added: 7.16.2 Protocols: HTTP +Category: http --- When used, it disables all internal HTTP decoding of content or transfer encodings and instead makes them passed on unaltered, raw. diff --git a/docs/cmdline-opts/referer.d b/docs/cmdline-opts/referer.d index cd84e9d5a027df..8b2057b9400b6a 100644 --- a/docs/cmdline-opts/referer.d +++ b/docs/cmdline-opts/referer.d @@ -4,6 +4,7 @@ Arg: Protocols: HTTP Help: Referrer URL See-also: user-agent header +Category: http --- Sends the "Referrer Page" information to the HTTP server. This can also be set with the --header flag of course. When used with --location you can append diff --git a/docs/cmdline-opts/remote-header-name.d b/docs/cmdline-opts/remote-header-name.d index 771b6d46996ddb..700da69fc0ebb7 100644 --- a/docs/cmdline-opts/remote-header-name.d +++ b/docs/cmdline-opts/remote-header-name.d @@ -2,6 +2,7 @@ Long: remote-header-name Short: J Protocols: HTTP Help: Use the header-provided filename +Category: output --- This option tells the --remote-name option to use the server-specified Content-Disposition filename instead of extracting a filename from the URL. diff --git a/docs/cmdline-opts/remote-name-all.d b/docs/cmdline-opts/remote-name-all.d index f7a1996793bd8b..299684c51d93fe 100644 --- a/docs/cmdline-opts/remote-name-all.d +++ b/docs/cmdline-opts/remote-name-all.d @@ -1,6 +1,7 @@ Long: remote-name-all Help: Use the remote file name for all URLs Added: 7.19.0 +Category: output --- This option changes the default action for all given URLs to be dealt with as if --remote-name were used for each one. So if you want to disable that for a diff --git a/docs/cmdline-opts/remote-name.d b/docs/cmdline-opts/remote-name.d index 9fed64bf4fe8d2..184c32e4769e1d 100644 --- a/docs/cmdline-opts/remote-name.d +++ b/docs/cmdline-opts/remote-name.d @@ -1,6 +1,7 @@ Long: remote-name Short: O Help: Write output to a file named as the remote file +Category: important output --- Write output to a local file named like the remote file we get. (Only the file part of the remote file is used, the path is cut off.) diff --git a/docs/cmdline-opts/remote-time.d b/docs/cmdline-opts/remote-time.d index 7f6809dc35e2b1..96fb4fbe99837b 100644 --- a/docs/cmdline-opts/remote-time.d +++ b/docs/cmdline-opts/remote-time.d @@ -1,6 +1,7 @@ Long: remote-time Short: R Help: Set the remote file's time on the local output +Category: output --- When used, this will make curl attempt to figure out the timestamp of the remote file, and if that is available make the local file get that same diff --git a/docs/cmdline-opts/request-target.d b/docs/cmdline-opts/request-target.d index b46b4af02e934e..df423f87d7f2ad 100644 --- a/docs/cmdline-opts/request-target.d +++ b/docs/cmdline-opts/request-target.d @@ -2,6 +2,7 @@ Long: request-target Help: Specify the target for this request Protocols: HTTP Added: 7.55.0 +Category: http --- Tells curl to use an alternative "target" (path) instead of using the path as provided in the URL. Particularly useful when wanting to issue HTTP requests diff --git a/docs/cmdline-opts/request.d b/docs/cmdline-opts/request.d index 3919d426a5120c..47c4c8b049afa3 100644 --- a/docs/cmdline-opts/request.d +++ b/docs/cmdline-opts/request.d @@ -2,6 +2,7 @@ Long: request Short: X Arg: Help: Specify request command to use +Category: connection --- (HTTP) Specifies a custom request method to use when communicating with the HTTP server. The specified request method will be used instead of the method diff --git a/docs/cmdline-opts/resolve.d b/docs/cmdline-opts/resolve.d index 9c37525bd96cf1..41f6a1bd57496c 100644 --- a/docs/cmdline-opts/resolve.d +++ b/docs/cmdline-opts/resolve.d @@ -1,7 +1,8 @@ Long: resolve -Arg: +Arg: Help: Resolve the host+port to this address Added: 7.21.3 +Category: connection --- Provide a custom address for a specific host and port pair. Using this, you can make the curl requests(s) use a specified address and prevent the diff --git a/docs/cmdline-opts/retry-all-errors.d b/docs/cmdline-opts/retry-all-errors.d new file mode 100644 index 00000000000000..6bda52b6e103b6 --- /dev/null +++ b/docs/cmdline-opts/retry-all-errors.d @@ -0,0 +1,20 @@ +Long: retry-all-errors +Help: Retry all errors (use with --retry) +Added: 7.71.0 +Category: curl +--- +Retry on any error. This option is used together with --retry. + +This option is the "sledgehammer" of retrying. Do not use this option by +default (eg in curlrc), there may be unintended consequences such as sending or +receiving duplicate data. Do not use with redirected input or output. You'd be +much better off handling your unique problems in shell script. Please read the +example below. + +Warning: For server compatibility curl attempts to retry failed flaky transfers +as close as possible to how they were started, but this is not possible with +redirected input or output. For example, before retrying it removes output data +from a failed partial transfer that was written to an output file. However this +is not true of data redirected to a | pipe or > file, which are not reset. We +strongly suggest don't parse or record output via redirect in combination with +this option, since you may receive duplicate data. diff --git a/docs/cmdline-opts/retry-connrefused.d b/docs/cmdline-opts/retry-connrefused.d index 6a78e1fdaa59c6..be34f973d5ca13 100644 --- a/docs/cmdline-opts/retry-connrefused.d +++ b/docs/cmdline-opts/retry-connrefused.d @@ -1,6 +1,7 @@ Long: retry-connrefused Help: Retry on connection refused (use with --retry) Added: 7.52.0 +Category: curl --- In addition to the other conditions, consider ECONNREFUSED as a transient error too for --retry. This option is used together with --retry. diff --git a/docs/cmdline-opts/retry-delay.d b/docs/cmdline-opts/retry-delay.d index 1691356d4c0b3c..5645c0e6a56545 100644 --- a/docs/cmdline-opts/retry-delay.d +++ b/docs/cmdline-opts/retry-delay.d @@ -2,6 +2,7 @@ Long: retry-delay Arg: Help: Wait time between retries Added: 7.12.3 +Category: curl --- Make curl sleep this amount of time before each retry when a transfer has failed with a transient error (it changes the default backoff time algorithm diff --git a/docs/cmdline-opts/retry-max-time.d b/docs/cmdline-opts/retry-max-time.d index 0920c924465160..9d6faf3041fff5 100644 --- a/docs/cmdline-opts/retry-max-time.d +++ b/docs/cmdline-opts/retry-max-time.d @@ -2,6 +2,7 @@ Long: retry-max-time Arg: Help: Retry only within this period Added: 7.12.3 +Category: curl --- The retry timer is reset before the first transfer attempt. Retries will be done as usual (see --retry) as long as the timer hasn't reached this given diff --git a/docs/cmdline-opts/retry.d b/docs/cmdline-opts/retry.d index 32d1c799bfadf0..70f6e7bfd4d904 100644 --- a/docs/cmdline-opts/retry.d +++ b/docs/cmdline-opts/retry.d @@ -2,6 +2,7 @@ Long: retry Arg: Added: 7.12.3 Help: Retry request if transient problems occur +Category: curl --- If a transient error is returned when curl tries to perform a transfer, it will retry this number of times before giving up. Setting the number to 0 @@ -14,4 +15,7 @@ for all forthcoming retries it will double the waiting time until it reaches using --retry-delay you disable this exponential backoff algorithm. See also --retry-max-time to limit the total time allowed for retries. +Since curl 7.66.0, curl will comply with the Retry-After: response header if +one was present to know when to issue the next retry. + If this option is used several times, the last one will be used. diff --git a/docs/cmdline-opts/sasl-authzid.d b/docs/cmdline-opts/sasl-authzid.d new file mode 100644 index 00000000000000..e802c80a16dfbc --- /dev/null +++ b/docs/cmdline-opts/sasl-authzid.d @@ -0,0 +1,13 @@ +Long: sasl-authzid +Arg: +Help: Identity for SASL PLAIN authentication +Added: 7.66.0 +Category: auth +--- +Use this authorisation identity (authzid), during SASL PLAIN authentication, +in addition to the authentication identity (authcid) as specified by --user. + +If the option isn't specified, the server will derive the authzid from the +authcid, but if specified, and depending on the server implementation, it may +be used to access another user's inbox, that the user has been granted access +to, or a shared mailbox for example. diff --git a/docs/cmdline-opts/sasl-ir.d b/docs/cmdline-opts/sasl-ir.d index c0dab946357538..0041c3b4e7797a 100644 --- a/docs/cmdline-opts/sasl-ir.d +++ b/docs/cmdline-opts/sasl-ir.d @@ -1,5 +1,6 @@ Long: sasl-ir Help: Enable initial response in SASL authentication Added: 7.31.0 +Category: auth --- Enable initial response in SASL authentication. diff --git a/docs/cmdline-opts/service-name.d b/docs/cmdline-opts/service-name.d index 4dfeb27d65d5f1..c64496b85856e9 100644 --- a/docs/cmdline-opts/service-name.d +++ b/docs/cmdline-opts/service-name.d @@ -2,6 +2,7 @@ Long: service-name Help: SPNEGO service name Arg: Added: 7.43.0 +Category: misc --- This option allows you to change the service name for SPNEGO. diff --git a/docs/cmdline-opts/show-error.d b/docs/cmdline-opts/show-error.d index b9667a4ca581d5..2124409e5669c4 100644 --- a/docs/cmdline-opts/show-error.d +++ b/docs/cmdline-opts/show-error.d @@ -1,5 +1,7 @@ Long: show-error Short: S Help: Show error even when -s is used +See-also: no-progress-meter +Category: curl --- When used with --silent, it makes curl show an error message if it fails. diff --git a/docs/cmdline-opts/silent.d b/docs/cmdline-opts/silent.d index b0b4425b3d1672..58a522335efc62 100644 --- a/docs/cmdline-opts/silent.d +++ b/docs/cmdline-opts/silent.d @@ -1,7 +1,8 @@ Long: silent Short: s Help: Silent mode -See-also: verbose stderr +See-also: verbose stderr no-progress-meter +Category: important verbose --- Silent or quiet mode. Don't show progress meter or error messages. Makes Curl mute. It will still output the data you ask for, potentially even to the diff --git a/docs/cmdline-opts/socks4.d b/docs/cmdline-opts/socks4.d index 11f6ae033e954d..bc095d5460b5e1 100644 --- a/docs/cmdline-opts/socks4.d +++ b/docs/cmdline-opts/socks4.d @@ -2,6 +2,7 @@ Long: socks4 Arg: Help: SOCKS4 proxy on given host + port Added: 7.15.2 +Category: proxy --- Use the specified SOCKS4 proxy. If the port number is not specified, it is assumed at port 1080. diff --git a/docs/cmdline-opts/socks4a.d b/docs/cmdline-opts/socks4a.d index ae254ae0e82c9b..d3177a9d77540f 100644 --- a/docs/cmdline-opts/socks4a.d +++ b/docs/cmdline-opts/socks4a.d @@ -2,6 +2,7 @@ Long: socks4a Arg: Help: SOCKS4a proxy on given host + port Added: 7.18.0 +Category: proxy --- Use the specified SOCKS4a proxy. If the port number is not specified, it is assumed at port 1080. diff --git a/docs/cmdline-opts/socks5-basic.d b/docs/cmdline-opts/socks5-basic.d index 67d16b3a66b758..52d1e66194e771 100644 --- a/docs/cmdline-opts/socks5-basic.d +++ b/docs/cmdline-opts/socks5-basic.d @@ -1,6 +1,7 @@ Long: socks5-basic Help: Enable username/password auth for SOCKS5 proxies Added: 7.55.0 +Category: proxy auth --- Tells curl to use username/password authentication when connecting to a SOCKS5 proxy. The username/password authentication is enabled by default. Use diff --git a/docs/cmdline-opts/socks5-gssapi-nec.d b/docs/cmdline-opts/socks5-gssapi-nec.d index 477e218e399ad3..b538f33f362c43 100644 --- a/docs/cmdline-opts/socks5-gssapi-nec.d +++ b/docs/cmdline-opts/socks5-gssapi-nec.d @@ -1,6 +1,7 @@ Long: socks5-gssapi-nec Help: Compatibility with NEC SOCKS5 server Added: 7.19.4 +Category: proxy auth --- As part of the GSS-API negotiation a protection mode is negotiated. RFC 1961 says in section 4.3/4.4 it should be protected, but the NEC reference diff --git a/docs/cmdline-opts/socks5-gssapi-service.d b/docs/cmdline-opts/socks5-gssapi-service.d index eb3b2407b334e1..e61d0f5ab31f0f 100644 --- a/docs/cmdline-opts/socks5-gssapi-service.d +++ b/docs/cmdline-opts/socks5-gssapi-service.d @@ -2,6 +2,7 @@ Long: socks5-gssapi-service Arg: Help: SOCKS5 proxy service name for GSS-API Added: 7.19.4 +Category: proxy auth --- The default service name for a socks server is rcmd/server-fqdn. This option allows you to change it. diff --git a/docs/cmdline-opts/socks5-gssapi.d b/docs/cmdline-opts/socks5-gssapi.d index 0070f37eb237e8..72ae7aeb2b7e11 100644 --- a/docs/cmdline-opts/socks5-gssapi.d +++ b/docs/cmdline-opts/socks5-gssapi.d @@ -1,6 +1,7 @@ Long: socks5-gssapi Help: Enable GSS-API auth for SOCKS5 proxies Added: 7.55.0 +Category: proxy auth --- Tells curl to use GSS-API authentication when connecting to a SOCKS5 proxy. The GSS-API authentication is enabled by default (if curl is compiled with diff --git a/docs/cmdline-opts/socks5-hostname.d b/docs/cmdline-opts/socks5-hostname.d index 9d9d946e5747b2..247d7660e7f950 100644 --- a/docs/cmdline-opts/socks5-hostname.d +++ b/docs/cmdline-opts/socks5-hostname.d @@ -2,6 +2,7 @@ Long: socks5-hostname Arg: Help: SOCKS5 proxy, pass host name to proxy Added: 7.18.0 +Category: proxy --- Use the specified SOCKS5 proxy (and let the proxy resolve the host name). If the port number is not specified, it is assumed at port 1080. diff --git a/docs/cmdline-opts/socks5.d b/docs/cmdline-opts/socks5.d index 22fae76295c994..bbe92f0a10431f 100644 --- a/docs/cmdline-opts/socks5.d +++ b/docs/cmdline-opts/socks5.d @@ -2,6 +2,7 @@ Long: socks5 Arg: Help: SOCKS5 proxy on given host + port Added: 7.18.0 +Category: proxy --- Use the specified SOCKS5 proxy - but resolve the host name locally. If the port number is not specified, it is assumed at port 1080. diff --git a/docs/cmdline-opts/speed-limit.d b/docs/cmdline-opts/speed-limit.d index e2b81c79a33472..3f9ad3a46e6f4e 100644 --- a/docs/cmdline-opts/speed-limit.d +++ b/docs/cmdline-opts/speed-limit.d @@ -2,6 +2,7 @@ Long: speed-limit Short: Y Arg: Help: Stop transfers slower than this +Category: connection --- If a download is slower than this given speed (in bytes per second) for speed-time seconds it gets aborted. speed-time is set with --speed-time and is diff --git a/docs/cmdline-opts/speed-time.d b/docs/cmdline-opts/speed-time.d index 98d6ae13c5fc3a..81acabec46a0b9 100644 --- a/docs/cmdline-opts/speed-time.d +++ b/docs/cmdline-opts/speed-time.d @@ -2,6 +2,7 @@ Long: speed-time Short: y Arg: Help: Trigger 'speed-limit' abort after this time +Category: connection --- If a download is slower than speed-limit bytes per second during a speed-time period, the download gets aborted. If speed-time is used, the default diff --git a/docs/cmdline-opts/ssl-allow-beast.d b/docs/cmdline-opts/ssl-allow-beast.d index 973fcd4518828e..f18fe74704878a 100644 --- a/docs/cmdline-opts/ssl-allow-beast.d +++ b/docs/cmdline-opts/ssl-allow-beast.d @@ -1,6 +1,7 @@ Long: ssl-allow-beast Help: Allow security flaw to improve interop Added: 7.25.0 +Category: tls --- This option tells curl to not work around a security flaw in the SSL3 and TLS1.0 protocols known as BEAST. If this option isn't used, the SSL layer may diff --git a/docs/cmdline-opts/ssl-no-revoke.d b/docs/cmdline-opts/ssl-no-revoke.d index f94b111436f5e3..3b1614243e24c4 100644 --- a/docs/cmdline-opts/ssl-no-revoke.d +++ b/docs/cmdline-opts/ssl-no-revoke.d @@ -1,6 +1,7 @@ Long: ssl-no-revoke Help: Disable cert revocation checks (Schannel) Added: 7.44.0 +Category: tls --- (Schannel) This option tells curl to disable certificate revocation checks. WARNING: this option loosens the SSL security, and by using this flag you ask diff --git a/docs/cmdline-opts/ssl-reqd.d b/docs/cmdline-opts/ssl-reqd.d index 3c6f8a257b9dd1..2e573e39d1220a 100644 --- a/docs/cmdline-opts/ssl-reqd.d +++ b/docs/cmdline-opts/ssl-reqd.d @@ -2,6 +2,7 @@ Long: ssl-reqd Help: Require SSL/TLS Protocols: FTP IMAP POP3 SMTP Added: 7.20.0 +Category: tls --- Require SSL/TLS for the connection. Terminates the connection if the server doesn't support SSL/TLS. diff --git a/docs/cmdline-opts/ssl-revoke-best-effort.d b/docs/cmdline-opts/ssl-revoke-best-effort.d new file mode 100644 index 00000000000000..af22da46115dac --- /dev/null +++ b/docs/cmdline-opts/ssl-revoke-best-effort.d @@ -0,0 +1,8 @@ +Long: ssl-revoke-best-effort +Help: Ignore missing/offline cert CRL dist points +Added: 7.70.0 +Category: tls +--- +(Schannel) This option tells curl to ignore certificate revocation checks when +they failed due to missing/offline distribution points for the revocation check +lists. diff --git a/docs/cmdline-opts/ssl.d b/docs/cmdline-opts/ssl.d index dabd83761f9467..8df460106b2c22 100644 --- a/docs/cmdline-opts/ssl.d +++ b/docs/cmdline-opts/ssl.d @@ -2,6 +2,7 @@ Long: ssl Help: Try SSL/TLS Protocols: FTP IMAP POP3 SMTP Added: 7.20.0 +Category: tls --- Try to use SSL/TLS for the connection. Reverts to a non-secure connection if diff --git a/docs/cmdline-opts/sslv2.d b/docs/cmdline-opts/sslv2.d index 67d2b8506536c9..773ab691ef70ea 100644 --- a/docs/cmdline-opts/sslv2.d +++ b/docs/cmdline-opts/sslv2.d @@ -7,6 +7,7 @@ Mutexed: sslv3 tlsv1 tlsv1.1 tlsv1.2 Requires: TLS See-also: http1.1 http2 Help: Use SSLv2 +Category: tls --- Forces curl to use SSL version 2 when negotiating with a remote SSL server. Sometimes curl is built without SSLv2 support. SSLv2 is widely diff --git a/docs/cmdline-opts/sslv3.d b/docs/cmdline-opts/sslv3.d index 101ad1004710d9..7beed8f81e3eb6 100644 --- a/docs/cmdline-opts/sslv3.d +++ b/docs/cmdline-opts/sslv3.d @@ -7,6 +7,7 @@ Mutexed: sslv2 tlsv1 tlsv1.1 tlsv1.2 Requires: TLS See-also: http1.1 http2 Help: Use SSLv3 +Category: tls --- Forces curl to use SSL version 3 when negotiating with a remote SSL server. Sometimes curl is built without SSLv3 support. SSLv3 is widely diff --git a/docs/cmdline-opts/stderr.d b/docs/cmdline-opts/stderr.d index e8cf7ba68fc33a..6da0401592c0c0 100644 --- a/docs/cmdline-opts/stderr.d +++ b/docs/cmdline-opts/stderr.d @@ -1,6 +1,7 @@ Long: stderr Help: Where to redirect stderr See-also: verbose silent +Category: verbose --- Redirect all writes to stderr to the specified file instead. If the file name is a plain '-', it is instead written to stdout. diff --git a/docs/cmdline-opts/styled-output.d b/docs/cmdline-opts/styled-output.d index e4751aecb1c868..8aa4a0f05efb82 100644 --- a/docs/cmdline-opts/styled-output.d +++ b/docs/cmdline-opts/styled-output.d @@ -1,6 +1,7 @@ Long: styled-output Help: Enable styled output for HTTP headers Added: 7.61.0 +Category: verbose --- Enables the automatic use of bold font styles when writing HTTP headers to the terminal. Use --no-styled-output to switch them off. diff --git a/docs/cmdline-opts/suppress-connect-headers.d b/docs/cmdline-opts/suppress-connect-headers.d index d208b891770513..b4e2a17db12d5e 100644 --- a/docs/cmdline-opts/suppress-connect-headers.d +++ b/docs/cmdline-opts/suppress-connect-headers.d @@ -1,6 +1,7 @@ Long: suppress-connect-headers Help: Suppress proxy CONNECT response headers See-also: dump-header include proxytunnel +Category: proxy --- When --proxytunnel is used and a CONNECT request is made don't output proxy CONNECT response headers. This option is meant to be used with --dump-header or diff --git a/docs/cmdline-opts/tcp-fastopen.d b/docs/cmdline-opts/tcp-fastopen.d index 08e141df783e7e..faef499f3e5ad7 100644 --- a/docs/cmdline-opts/tcp-fastopen.d +++ b/docs/cmdline-opts/tcp-fastopen.d @@ -1,5 +1,6 @@ Long: tcp-fastopen Added: 7.49.0 Help: Use TCP Fast Open +Category: connection --- Enable use of TCP Fast Open (RFC7413). diff --git a/docs/cmdline-opts/tcp-nodelay.d b/docs/cmdline-opts/tcp-nodelay.d index f047a7c6faaaa1..f1cf644a430c28 100644 --- a/docs/cmdline-opts/tcp-nodelay.d +++ b/docs/cmdline-opts/tcp-nodelay.d @@ -1,6 +1,7 @@ Long: tcp-nodelay Help: Use the TCP_NODELAY option Added: 7.11.2 +Category: connection --- Turn on the TCP_NODELAY option. See the \fIcurl_easy_setopt(3)\fP man page for details about this option. diff --git a/docs/cmdline-opts/telnet-option.d b/docs/cmdline-opts/telnet-option.d index a67cb627b85b37..789de3ea1f4d81 100644 --- a/docs/cmdline-opts/telnet-option.d +++ b/docs/cmdline-opts/telnet-option.d @@ -2,6 +2,7 @@ Long: telnet-option Short: t Arg: Help: Set telnet option +Category: telnet --- Pass options to the telnet protocol. Supported options are: diff --git a/docs/cmdline-opts/tftp-blksize.d b/docs/cmdline-opts/tftp-blksize.d index c184328de004a2..6e67ed2e11f06e 100644 --- a/docs/cmdline-opts/tftp-blksize.d +++ b/docs/cmdline-opts/tftp-blksize.d @@ -3,6 +3,7 @@ Arg: Help: Set TFTP BLKSIZE option Protocols: TFTP Added: 7.20.0 +Category: tftp --- Set TFTP BLKSIZE option (must be >512). This is the block size that curl will try to use when transferring data to or from a TFTP server. By default 512 diff --git a/docs/cmdline-opts/tftp-no-options.d b/docs/cmdline-opts/tftp-no-options.d index e2a4dacd5262f4..023327301fe8ad 100644 --- a/docs/cmdline-opts/tftp-no-options.d +++ b/docs/cmdline-opts/tftp-no-options.d @@ -2,6 +2,7 @@ Long: tftp-no-options Help: Do not send any TFTP options Protocols: TFTP Added: 7.48.0 +Category: tftp --- Tells curl not to send TFTP options requests. diff --git a/docs/cmdline-opts/time-cond.d b/docs/cmdline-opts/time-cond.d index 830b4e1a24ce44..f733eeb0bfa770 100644 --- a/docs/cmdline-opts/time-cond.d +++ b/docs/cmdline-opts/time-cond.d @@ -3,6 +3,7 @@ Short: z Arg: