mirror of
https://github.com/cmclark00/retro-imager.git
synced 2025-05-18 16:05:21 +01:00
deps: Add modified curl-8.8.0
Add a modified version of curl-8.8.0, where the modifications are primarily centred around the CMake infrastructure to make the library more consumable as a vendored dependency.
This commit is contained in:
parent
b1043a3601
commit
593abb7981
3823 changed files with 671347 additions and 0 deletions
10709
src/dependencies/curl-8.8.0/CHANGES
Normal file
10709
src/dependencies/curl-8.8.0/CHANGES
Normal file
File diff suppressed because it is too large
Load diff
24
src/dependencies/curl-8.8.0/CMake/CMakeConfigurableFile.in
Normal file
24
src/dependencies/curl-8.8.0/CMake/CMakeConfigurableFile.in
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
#***************************************************************************
|
||||||
|
# _ _ ____ _
|
||||||
|
# Project ___| | | | _ \| |
|
||||||
|
# / __| | | | |_) | |
|
||||||
|
# | (__| |_| | _ <| |___
|
||||||
|
# \___|\___/|_| \_\_____|
|
||||||
|
#
|
||||||
|
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
#
|
||||||
|
# This software is licensed as described in the file COPYING, which
|
||||||
|
# you should have received as part of this distribution. The terms
|
||||||
|
# are also available at https://curl.se/docs/copyright.html.
|
||||||
|
#
|
||||||
|
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
|
||||||
|
# copies of the Software, and permit persons to whom the Software is
|
||||||
|
# furnished to do so, under the terms of the COPYING file.
|
||||||
|
#
|
||||||
|
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
|
||||||
|
# KIND, either express or implied.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: curl
|
||||||
|
#
|
||||||
|
###########################################################################
|
||||||
|
@CMAKE_CONFIGURABLE_FILE_CONTENT@
|
84
src/dependencies/curl-8.8.0/CMake/CurlSymbolHiding.cmake
Normal file
84
src/dependencies/curl-8.8.0/CMake/CurlSymbolHiding.cmake
Normal file
|
@ -0,0 +1,84 @@
|
||||||
|
#***************************************************************************
|
||||||
|
# _ _ ____ _
|
||||||
|
# Project ___| | | | _ \| |
|
||||||
|
# / __| | | | |_) | |
|
||||||
|
# | (__| |_| | _ <| |___
|
||||||
|
# \___|\___/|_| \_\_____|
|
||||||
|
#
|
||||||
|
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
#
|
||||||
|
# This software is licensed as described in the file COPYING, which
|
||||||
|
# you should have received as part of this distribution. The terms
|
||||||
|
# are also available at https://curl.se/docs/copyright.html.
|
||||||
|
#
|
||||||
|
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
|
||||||
|
# copies of the Software, and permit persons to whom the Software is
|
||||||
|
# furnished to do so, under the terms of the COPYING file.
|
||||||
|
#
|
||||||
|
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
|
||||||
|
# KIND, either express or implied.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: curl
|
||||||
|
#
|
||||||
|
###########################################################################
|
||||||
|
include(CheckCSourceCompiles)
|
||||||
|
|
||||||
|
option(CURL_HIDDEN_SYMBOLS "Set to ON to hide libcurl internal symbols (=hide all symbols that aren't officially external)." ON)
|
||||||
|
mark_as_advanced(CURL_HIDDEN_SYMBOLS)
|
||||||
|
|
||||||
|
if(WIN32 AND ENABLE_CURLDEBUG)
|
||||||
|
# We need to export internal debug functions (e.g. curl_dbg_*), so disable
|
||||||
|
# symbol hiding for debug builds.
|
||||||
|
set(CURL_HIDDEN_SYMBOLS OFF)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(CURL_HIDDEN_SYMBOLS)
|
||||||
|
set(SUPPORTS_SYMBOL_HIDING FALSE)
|
||||||
|
|
||||||
|
if(CMAKE_C_COMPILER_ID MATCHES "Clang" AND NOT MSVC)
|
||||||
|
set(SUPPORTS_SYMBOL_HIDING TRUE)
|
||||||
|
set(_SYMBOL_EXTERN "__attribute__ ((__visibility__ (\"default\")))")
|
||||||
|
set(_CFLAG_SYMBOLS_HIDE "-fvisibility=hidden")
|
||||||
|
elseif(CMAKE_COMPILER_IS_GNUCC)
|
||||||
|
if(NOT CMAKE_C_COMPILER_VERSION VERSION_LESS 3.4)
|
||||||
|
# note: this is considered buggy prior to 4.0 but the autotools don't care, so let's ignore that fact
|
||||||
|
set(SUPPORTS_SYMBOL_HIDING TRUE)
|
||||||
|
set(_SYMBOL_EXTERN "__attribute__ ((__visibility__ (\"default\")))")
|
||||||
|
set(_CFLAG_SYMBOLS_HIDE "-fvisibility=hidden")
|
||||||
|
endif()
|
||||||
|
elseif(CMAKE_C_COMPILER_ID MATCHES "SunPro" AND NOT CMAKE_C_COMPILER_VERSION VERSION_LESS 8.0)
|
||||||
|
set(SUPPORTS_SYMBOL_HIDING TRUE)
|
||||||
|
set(_SYMBOL_EXTERN "__global")
|
||||||
|
set(_CFLAG_SYMBOLS_HIDE "-xldscope=hidden")
|
||||||
|
elseif(CMAKE_C_COMPILER_ID MATCHES "Intel" AND NOT CMAKE_C_COMPILER_VERSION VERSION_LESS 9.0)
|
||||||
|
# note: this should probably just check for version 9.1.045 but I'm not 100% sure
|
||||||
|
# so let's do it the same way autotools do.
|
||||||
|
set(SUPPORTS_SYMBOL_HIDING TRUE)
|
||||||
|
set(_SYMBOL_EXTERN "__attribute__ ((__visibility__ (\"default\")))")
|
||||||
|
set(_CFLAG_SYMBOLS_HIDE "-fvisibility=hidden")
|
||||||
|
check_c_source_compiles("#include <stdio.h>
|
||||||
|
int main (void) { printf(\"icc fvisibility bug test\"); return 0; }" _no_bug)
|
||||||
|
if(NOT _no_bug)
|
||||||
|
set(SUPPORTS_SYMBOL_HIDING FALSE)
|
||||||
|
set(_SYMBOL_EXTERN "")
|
||||||
|
set(_CFLAG_SYMBOLS_HIDE "")
|
||||||
|
endif()
|
||||||
|
elseif(MSVC)
|
||||||
|
set(SUPPORTS_SYMBOL_HIDING TRUE)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
set(HIDES_CURL_PRIVATE_SYMBOLS ${SUPPORTS_SYMBOL_HIDING})
|
||||||
|
elseif(MSVC)
|
||||||
|
if(NOT CMAKE_VERSION VERSION_LESS 3.7)
|
||||||
|
set(CMAKE_WINDOWS_EXPORT_ALL_SYMBOLS TRUE) #present since 3.4.3 but broken
|
||||||
|
set(HIDES_CURL_PRIVATE_SYMBOLS FALSE)
|
||||||
|
else()
|
||||||
|
message(WARNING "Hiding private symbols regardless CURL_HIDDEN_SYMBOLS being disabled.")
|
||||||
|
set(HIDES_CURL_PRIVATE_SYMBOLS TRUE)
|
||||||
|
endif()
|
||||||
|
else()
|
||||||
|
set(HIDES_CURL_PRIVATE_SYMBOLS FALSE)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
set(CURL_CFLAG_SYMBOLS_HIDE ${_CFLAG_SYMBOLS_HIDE})
|
||||||
|
set(CURL_EXTERN_SYMBOL ${_SYMBOL_EXTERN})
|
430
src/dependencies/curl-8.8.0/CMake/CurlTests.c
Normal file
430
src/dependencies/curl-8.8.0/CMake/CurlTests.c
Normal file
|
@ -0,0 +1,430 @@
|
||||||
|
/***************************************************************************
|
||||||
|
* _ _ ____ _
|
||||||
|
* Project ___| | | | _ \| |
|
||||||
|
* / __| | | | |_) | |
|
||||||
|
* | (__| |_| | _ <| |___
|
||||||
|
* \___|\___/|_| \_\_____|
|
||||||
|
*
|
||||||
|
* Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
*
|
||||||
|
* This software is licensed as described in the file COPYING, which
|
||||||
|
* you should have received as part of this distribution. The terms
|
||||||
|
* are also available at https://curl.se/docs/copyright.html.
|
||||||
|
*
|
||||||
|
* You may opt to use, copy, modify, merge, publish, distribute and/or sell
|
||||||
|
* copies of the Software, and permit persons to whom the Software is
|
||||||
|
* furnished to do so, under the terms of the COPYING file.
|
||||||
|
*
|
||||||
|
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
|
||||||
|
* KIND, either express or implied.
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: curl
|
||||||
|
*
|
||||||
|
***************************************************************************/
|
||||||
|
|
||||||
|
#ifdef HAVE_FCNTL_O_NONBLOCK
|
||||||
|
/* headers for FCNTL_O_NONBLOCK test */
|
||||||
|
#include <sys/types.h>
|
||||||
|
#include <unistd.h>
|
||||||
|
#include <fcntl.h>
|
||||||
|
/* */
|
||||||
|
#if defined(sun) || defined(__sun__) || \
|
||||||
|
defined(__SUNPRO_C) || defined(__SUNPRO_CC)
|
||||||
|
# if defined(__SVR4) || defined(__srv4__)
|
||||||
|
# define PLATFORM_SOLARIS
|
||||||
|
# else
|
||||||
|
# define PLATFORM_SUNOS4
|
||||||
|
# endif
|
||||||
|
#endif
|
||||||
|
#if (defined(_AIX) || defined(__xlC__)) && !defined(_AIX41)
|
||||||
|
# define PLATFORM_AIX_V3
|
||||||
|
#endif
|
||||||
|
/* */
|
||||||
|
#if defined(PLATFORM_SUNOS4) || defined(PLATFORM_AIX_V3)
|
||||||
|
#error "O_NONBLOCK does not work on this platform"
|
||||||
|
#endif
|
||||||
|
|
||||||
|
int main(void)
|
||||||
|
{
|
||||||
|
/* O_NONBLOCK source test */
|
||||||
|
int flags = 0;
|
||||||
|
if(0 != fcntl(0, F_SETFL, flags | O_NONBLOCK))
|
||||||
|
return 1;
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
/* tests for gethostbyname_r */
|
||||||
|
#if defined(HAVE_GETHOSTBYNAME_R_3_REENTRANT) || \
|
||||||
|
defined(HAVE_GETHOSTBYNAME_R_5_REENTRANT) || \
|
||||||
|
defined(HAVE_GETHOSTBYNAME_R_6_REENTRANT)
|
||||||
|
# define _REENTRANT
|
||||||
|
/* no idea whether _REENTRANT is always set, just invent a new flag */
|
||||||
|
# define TEST_GETHOSTBYFOO_REENTRANT
|
||||||
|
#endif
|
||||||
|
#if defined(HAVE_GETHOSTBYNAME_R_3) || \
|
||||||
|
defined(HAVE_GETHOSTBYNAME_R_5) || \
|
||||||
|
defined(HAVE_GETHOSTBYNAME_R_6) || \
|
||||||
|
defined(TEST_GETHOSTBYFOO_REENTRANT)
|
||||||
|
#include <sys/types.h>
|
||||||
|
#include <netdb.h>
|
||||||
|
int main(void)
|
||||||
|
{
|
||||||
|
char *address = "example.com";
|
||||||
|
int length = 0;
|
||||||
|
int type = 0;
|
||||||
|
struct hostent h;
|
||||||
|
int rc = 0;
|
||||||
|
#if defined(HAVE_GETHOSTBYNAME_R_3) || \
|
||||||
|
defined(HAVE_GETHOSTBYNAME_R_3_REENTRANT)
|
||||||
|
struct hostent_data hdata;
|
||||||
|
#elif defined(HAVE_GETHOSTBYNAME_R_5) || \
|
||||||
|
defined(HAVE_GETHOSTBYNAME_R_5_REENTRANT) || \
|
||||||
|
defined(HAVE_GETHOSTBYNAME_R_6) || \
|
||||||
|
defined(HAVE_GETHOSTBYNAME_R_6_REENTRANT)
|
||||||
|
char buffer[8192];
|
||||||
|
int h_errnop;
|
||||||
|
struct hostent *hp;
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#if defined(HAVE_GETHOSTBYNAME_R_3) || \
|
||||||
|
defined(HAVE_GETHOSTBYNAME_R_3_REENTRANT)
|
||||||
|
rc = gethostbyname_r(address, &h, &hdata);
|
||||||
|
#elif defined(HAVE_GETHOSTBYNAME_R_5) || \
|
||||||
|
defined(HAVE_GETHOSTBYNAME_R_5_REENTRANT)
|
||||||
|
rc = gethostbyname_r(address, &h, buffer, 8192, &h_errnop);
|
||||||
|
(void)hp; /* not used for test */
|
||||||
|
#elif defined(HAVE_GETHOSTBYNAME_R_6) || \
|
||||||
|
defined(HAVE_GETHOSTBYNAME_R_6_REENTRANT)
|
||||||
|
rc = gethostbyname_r(address, &h, buffer, 8192, &hp, &h_errnop);
|
||||||
|
#endif
|
||||||
|
|
||||||
|
(void)length;
|
||||||
|
(void)type;
|
||||||
|
(void)rc;
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef HAVE_IN_ADDR_T
|
||||||
|
#include <sys/types.h>
|
||||||
|
#include <sys/socket.h>
|
||||||
|
#include <arpa/inet.h>
|
||||||
|
int main(void)
|
||||||
|
{
|
||||||
|
if((in_addr_t *) 0)
|
||||||
|
return 0;
|
||||||
|
if(sizeof(in_addr_t))
|
||||||
|
return 0;
|
||||||
|
;
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef HAVE_BOOL_T
|
||||||
|
#ifdef HAVE_SYS_TYPES_H
|
||||||
|
#include <sys/types.h>
|
||||||
|
#endif
|
||||||
|
#ifdef HAVE_STDBOOL_H
|
||||||
|
#include <stdbool.h>
|
||||||
|
#endif
|
||||||
|
int main(void)
|
||||||
|
{
|
||||||
|
if(sizeof(bool *))
|
||||||
|
return 0;
|
||||||
|
;
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef STDC_HEADERS
|
||||||
|
#include <stdlib.h>
|
||||||
|
#include <stdarg.h>
|
||||||
|
#include <string.h>
|
||||||
|
#include <float.h>
|
||||||
|
int main(void) { return 0; }
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef HAVE_FILE_OFFSET_BITS
|
||||||
|
#ifdef _FILE_OFFSET_BITS
|
||||||
|
#undef _FILE_OFFSET_BITS
|
||||||
|
#endif
|
||||||
|
#define _FILE_OFFSET_BITS 64
|
||||||
|
#include <sys/types.h>
|
||||||
|
/* Check that off_t can represent 2**63 - 1 correctly.
|
||||||
|
We can't simply define LARGE_OFF_T to be 9223372036854775807,
|
||||||
|
since some C++ compilers masquerading as C compilers
|
||||||
|
incorrectly reject 9223372036854775807. */
|
||||||
|
#define LARGE_OFF_T (((off_t) 1 << 62) - 1 + ((off_t) 1 << 62))
|
||||||
|
int off_t_is_large[(LARGE_OFF_T % 2147483629 == 721
|
||||||
|
&& LARGE_OFF_T % 2147483647 == 1)
|
||||||
|
? 1 : -1];
|
||||||
|
int main(void) { ; return 0; }
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef HAVE_IOCTLSOCKET
|
||||||
|
/* includes start */
|
||||||
|
#ifdef _WIN32
|
||||||
|
# ifndef WIN32_LEAN_AND_MEAN
|
||||||
|
# define WIN32_LEAN_AND_MEAN
|
||||||
|
# endif
|
||||||
|
# include <winsock2.h>
|
||||||
|
#endif
|
||||||
|
int main(void)
|
||||||
|
{
|
||||||
|
/* ioctlsocket source code */
|
||||||
|
int socket;
|
||||||
|
unsigned long flags = ioctlsocket(socket, FIONBIO, &flags);
|
||||||
|
;
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef HAVE_IOCTLSOCKET_CAMEL
|
||||||
|
/* includes start */
|
||||||
|
#ifdef _WIN32
|
||||||
|
# ifndef WIN32_LEAN_AND_MEAN
|
||||||
|
# define WIN32_LEAN_AND_MEAN
|
||||||
|
# endif
|
||||||
|
# include <winsock2.h>
|
||||||
|
#endif
|
||||||
|
int main(void)
|
||||||
|
{
|
||||||
|
/* IoctlSocket source code */
|
||||||
|
if(0 != IoctlSocket(0, 0, 0))
|
||||||
|
return 1;
|
||||||
|
;
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef HAVE_IOCTLSOCKET_CAMEL_FIONBIO
|
||||||
|
/* includes start */
|
||||||
|
#ifdef _WIN32
|
||||||
|
# ifndef WIN32_LEAN_AND_MEAN
|
||||||
|
# define WIN32_LEAN_AND_MEAN
|
||||||
|
# endif
|
||||||
|
# include <winsock2.h>
|
||||||
|
#endif
|
||||||
|
int main(void)
|
||||||
|
{
|
||||||
|
/* IoctlSocket source code */
|
||||||
|
long flags = 0;
|
||||||
|
if(0 != IoctlSocket(0, FIONBIO, &flags))
|
||||||
|
return 1;
|
||||||
|
;
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef HAVE_IOCTLSOCKET_FIONBIO
|
||||||
|
/* includes start */
|
||||||
|
#ifdef _WIN32
|
||||||
|
# ifndef WIN32_LEAN_AND_MEAN
|
||||||
|
# define WIN32_LEAN_AND_MEAN
|
||||||
|
# endif
|
||||||
|
# include <winsock2.h>
|
||||||
|
#endif
|
||||||
|
int main(void)
|
||||||
|
{
|
||||||
|
unsigned long flags = 0;
|
||||||
|
if(0 != ioctlsocket(0, FIONBIO, &flags))
|
||||||
|
return 1;
|
||||||
|
;
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef HAVE_IOCTL_FIONBIO
|
||||||
|
/* headers for FIONBIO test */
|
||||||
|
/* includes start */
|
||||||
|
#ifdef HAVE_SYS_TYPES_H
|
||||||
|
# include <sys/types.h>
|
||||||
|
#endif
|
||||||
|
#ifdef HAVE_UNISTD_H
|
||||||
|
# include <unistd.h>
|
||||||
|
#endif
|
||||||
|
#ifdef HAVE_SYS_SOCKET_H
|
||||||
|
# include <sys/socket.h>
|
||||||
|
#endif
|
||||||
|
#ifdef HAVE_SYS_IOCTL_H
|
||||||
|
# include <sys/ioctl.h>
|
||||||
|
#endif
|
||||||
|
#ifdef HAVE_STROPTS_H
|
||||||
|
# include <stropts.h>
|
||||||
|
#endif
|
||||||
|
int main(void)
|
||||||
|
{
|
||||||
|
int flags = 0;
|
||||||
|
if(0 != ioctl(0, FIONBIO, &flags))
|
||||||
|
return 1;
|
||||||
|
;
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef HAVE_IOCTL_SIOCGIFADDR
|
||||||
|
/* headers for FIONBIO test */
|
||||||
|
/* includes start */
|
||||||
|
#ifdef HAVE_SYS_TYPES_H
|
||||||
|
# include <sys/types.h>
|
||||||
|
#endif
|
||||||
|
#ifdef HAVE_UNISTD_H
|
||||||
|
# include <unistd.h>
|
||||||
|
#endif
|
||||||
|
#ifdef HAVE_SYS_SOCKET_H
|
||||||
|
# include <sys/socket.h>
|
||||||
|
#endif
|
||||||
|
#ifdef HAVE_SYS_IOCTL_H
|
||||||
|
# include <sys/ioctl.h>
|
||||||
|
#endif
|
||||||
|
#ifdef HAVE_STROPTS_H
|
||||||
|
# include <stropts.h>
|
||||||
|
#endif
|
||||||
|
#include <net/if.h>
|
||||||
|
int main(void)
|
||||||
|
{
|
||||||
|
struct ifreq ifr;
|
||||||
|
if(0 != ioctl(0, SIOCGIFADDR, &ifr))
|
||||||
|
return 1;
|
||||||
|
;
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef HAVE_SETSOCKOPT_SO_NONBLOCK
|
||||||
|
/* includes start */
|
||||||
|
#ifdef _WIN32
|
||||||
|
# ifndef WIN32_LEAN_AND_MEAN
|
||||||
|
# define WIN32_LEAN_AND_MEAN
|
||||||
|
# endif
|
||||||
|
# include <winsock2.h>
|
||||||
|
#endif
|
||||||
|
/* includes start */
|
||||||
|
#ifdef HAVE_SYS_TYPES_H
|
||||||
|
# include <sys/types.h>
|
||||||
|
#endif
|
||||||
|
#ifdef HAVE_SYS_SOCKET_H
|
||||||
|
# include <sys/socket.h>
|
||||||
|
#endif
|
||||||
|
/* includes end */
|
||||||
|
int main(void)
|
||||||
|
{
|
||||||
|
if(0 != setsockopt(0, SOL_SOCKET, SO_NONBLOCK, 0, 0))
|
||||||
|
return 1;
|
||||||
|
;
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef HAVE_GLIBC_STRERROR_R
|
||||||
|
#include <string.h>
|
||||||
|
#include <errno.h>
|
||||||
|
|
||||||
|
void check(char c) {}
|
||||||
|
|
||||||
|
int main(void)
|
||||||
|
{
|
||||||
|
char buffer[1024];
|
||||||
|
/* This will not compile if strerror_r does not return a char* */
|
||||||
|
check(strerror_r(EACCES, buffer, sizeof(buffer))[0]);
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef HAVE_POSIX_STRERROR_R
|
||||||
|
#include <string.h>
|
||||||
|
#include <errno.h>
|
||||||
|
|
||||||
|
/* float, because a pointer can't be implicitly cast to float */
|
||||||
|
void check(float f) {}
|
||||||
|
|
||||||
|
int main(void)
|
||||||
|
{
|
||||||
|
char buffer[1024];
|
||||||
|
/* This will not compile if strerror_r does not return an int */
|
||||||
|
check(strerror_r(EACCES, buffer, sizeof(buffer)));
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef HAVE_FSETXATTR_6
|
||||||
|
#include <sys/xattr.h> /* header from libc, not from libattr */
|
||||||
|
int main(void)
|
||||||
|
{
|
||||||
|
fsetxattr(0, 0, 0, 0, 0, 0);
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef HAVE_FSETXATTR_5
|
||||||
|
#include <sys/xattr.h> /* header from libc, not from libattr */
|
||||||
|
int main(void)
|
||||||
|
{
|
||||||
|
fsetxattr(0, 0, 0, 0, 0);
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef HAVE_CLOCK_GETTIME_MONOTONIC
|
||||||
|
#include <time.h>
|
||||||
|
int main(void)
|
||||||
|
{
|
||||||
|
struct timespec ts = {0, 0};
|
||||||
|
clock_gettime(CLOCK_MONOTONIC, &ts);
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef HAVE_BUILTIN_AVAILABLE
|
||||||
|
int main(void)
|
||||||
|
{
|
||||||
|
if(__builtin_available(macOS 10.12, *)) {}
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef HAVE_ATOMIC
|
||||||
|
/* includes start */
|
||||||
|
#ifdef HAVE_SYS_TYPES_H
|
||||||
|
# include <sys/types.h>
|
||||||
|
#endif
|
||||||
|
#ifdef HAVE_UNISTD_H
|
||||||
|
# include <unistd.h>
|
||||||
|
#endif
|
||||||
|
#ifdef HAVE_STDATOMIC_H
|
||||||
|
# include <stdatomic.h>
|
||||||
|
#endif
|
||||||
|
/* includes end */
|
||||||
|
|
||||||
|
int main(void)
|
||||||
|
{
|
||||||
|
_Atomic int i = 1;
|
||||||
|
i = 0; /* Force an atomic-write operation. */
|
||||||
|
return i;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef HAVE_WIN32_WINNT
|
||||||
|
/* includes start */
|
||||||
|
#ifdef _WIN32
|
||||||
|
# ifndef WIN32_LEAN_AND_MEAN
|
||||||
|
# define WIN32_LEAN_AND_MEAN
|
||||||
|
# endif
|
||||||
|
# ifndef NOGDI
|
||||||
|
# define NOGDI
|
||||||
|
# endif
|
||||||
|
# include <windows.h>
|
||||||
|
#endif
|
||||||
|
/* includes end */
|
||||||
|
|
||||||
|
#define enquote(x) #x
|
||||||
|
#define expand(x) enquote(x)
|
||||||
|
#pragma message("_WIN32_WINNT=" expand(_WIN32_WINNT))
|
||||||
|
|
||||||
|
int main(void)
|
||||||
|
{
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
#endif
|
32
src/dependencies/curl-8.8.0/CMake/FindBearSSL.cmake
Normal file
32
src/dependencies/curl-8.8.0/CMake/FindBearSSL.cmake
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
#***************************************************************************
|
||||||
|
# _ _ ____ _
|
||||||
|
# Project ___| | | | _ \| |
|
||||||
|
# / __| | | | |_) | |
|
||||||
|
# | (__| |_| | _ <| |___
|
||||||
|
# \___|\___/|_| \_\_____|
|
||||||
|
#
|
||||||
|
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
#
|
||||||
|
# This software is licensed as described in the file COPYING, which
|
||||||
|
# you should have received as part of this distribution. The terms
|
||||||
|
# are also available at https://curl.se/docs/copyright.html.
|
||||||
|
#
|
||||||
|
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
|
||||||
|
# copies of the Software, and permit persons to whom the Software is
|
||||||
|
# furnished to do so, under the terms of the COPYING file.
|
||||||
|
#
|
||||||
|
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
|
||||||
|
# KIND, either express or implied.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: curl
|
||||||
|
#
|
||||||
|
###########################################################################
|
||||||
|
find_path(BEARSSL_INCLUDE_DIRS bearssl.h)
|
||||||
|
|
||||||
|
find_library(BEARSSL_LIBRARY bearssl)
|
||||||
|
|
||||||
|
include(FindPackageHandleStandardArgs)
|
||||||
|
find_package_handle_standard_args(BEARSSL DEFAULT_MSG
|
||||||
|
BEARSSL_INCLUDE_DIRS BEARSSL_LIBRARY)
|
||||||
|
|
||||||
|
mark_as_advanced(BEARSSL_INCLUDE_DIRS BEARSSL_LIBRARY)
|
43
src/dependencies/curl-8.8.0/CMake/FindBrotli.cmake
Normal file
43
src/dependencies/curl-8.8.0/CMake/FindBrotli.cmake
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
#***************************************************************************
|
||||||
|
# _ _ ____ _
|
||||||
|
# Project ___| | | | _ \| |
|
||||||
|
# / __| | | | |_) | |
|
||||||
|
# | (__| |_| | _ <| |___
|
||||||
|
# \___|\___/|_| \_\_____|
|
||||||
|
#
|
||||||
|
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
#
|
||||||
|
# This software is licensed as described in the file COPYING, which
|
||||||
|
# you should have received as part of this distribution. The terms
|
||||||
|
# are also available at https://curl.se/docs/copyright.html.
|
||||||
|
#
|
||||||
|
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
|
||||||
|
# copies of the Software, and permit persons to whom the Software is
|
||||||
|
# furnished to do so, under the terms of the COPYING file.
|
||||||
|
#
|
||||||
|
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
|
||||||
|
# KIND, either express or implied.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: curl
|
||||||
|
#
|
||||||
|
###########################################################################
|
||||||
|
include(FindPackageHandleStandardArgs)
|
||||||
|
|
||||||
|
find_path(BROTLI_INCLUDE_DIR "brotli/decode.h")
|
||||||
|
|
||||||
|
find_library(BROTLICOMMON_LIBRARY NAMES brotlicommon)
|
||||||
|
find_library(BROTLIDEC_LIBRARY NAMES brotlidec)
|
||||||
|
|
||||||
|
find_package_handle_standard_args(Brotli
|
||||||
|
FOUND_VAR
|
||||||
|
BROTLI_FOUND
|
||||||
|
REQUIRED_VARS
|
||||||
|
BROTLIDEC_LIBRARY
|
||||||
|
BROTLICOMMON_LIBRARY
|
||||||
|
BROTLI_INCLUDE_DIR
|
||||||
|
FAIL_MESSAGE
|
||||||
|
"Could NOT find Brotli"
|
||||||
|
)
|
||||||
|
|
||||||
|
set(BROTLI_INCLUDE_DIRS ${BROTLI_INCLUDE_DIR})
|
||||||
|
set(BROTLI_LIBRARIES ${BROTLICOMMON_LIBRARY} ${BROTLIDEC_LIBRARY})
|
47
src/dependencies/curl-8.8.0/CMake/FindCARES.cmake
Normal file
47
src/dependencies/curl-8.8.0/CMake/FindCARES.cmake
Normal file
|
@ -0,0 +1,47 @@
|
||||||
|
#***************************************************************************
|
||||||
|
# _ _ ____ _
|
||||||
|
# Project ___| | | | _ \| |
|
||||||
|
# / __| | | | |_) | |
|
||||||
|
# | (__| |_| | _ <| |___
|
||||||
|
# \___|\___/|_| \_\_____|
|
||||||
|
#
|
||||||
|
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
#
|
||||||
|
# This software is licensed as described in the file COPYING, which
|
||||||
|
# you should have received as part of this distribution. The terms
|
||||||
|
# are also available at https://curl.se/docs/copyright.html.
|
||||||
|
#
|
||||||
|
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
|
||||||
|
# copies of the Software, and permit persons to whom the Software is
|
||||||
|
# furnished to do so, under the terms of the COPYING file.
|
||||||
|
#
|
||||||
|
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
|
||||||
|
# KIND, either express or implied.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: curl
|
||||||
|
#
|
||||||
|
###########################################################################
|
||||||
|
# - Find c-ares
|
||||||
|
# Find the c-ares includes and library
|
||||||
|
# This module defines
|
||||||
|
# CARES_INCLUDE_DIR, where to find ares.h, etc.
|
||||||
|
# CARES_LIBRARIES, the libraries needed to use c-ares.
|
||||||
|
# CARES_FOUND, If false, do not try to use c-ares.
|
||||||
|
# also defined, but not for general use are
|
||||||
|
# CARES_LIBRARY, where to find the c-ares library.
|
||||||
|
|
||||||
|
find_path(CARES_INCLUDE_DIR ares.h)
|
||||||
|
|
||||||
|
set(CARES_NAMES ${CARES_NAMES} cares)
|
||||||
|
find_library(CARES_LIBRARY
|
||||||
|
NAMES ${CARES_NAMES}
|
||||||
|
)
|
||||||
|
|
||||||
|
include(FindPackageHandleStandardArgs)
|
||||||
|
find_package_handle_standard_args(CARES
|
||||||
|
REQUIRED_VARS CARES_LIBRARY CARES_INCLUDE_DIR)
|
||||||
|
|
||||||
|
mark_as_advanced(
|
||||||
|
CARES_LIBRARY
|
||||||
|
CARES_INCLUDE_DIR
|
||||||
|
)
|
312
src/dependencies/curl-8.8.0/CMake/FindGSS.cmake
Normal file
312
src/dependencies/curl-8.8.0/CMake/FindGSS.cmake
Normal file
|
@ -0,0 +1,312 @@
|
||||||
|
#***************************************************************************
|
||||||
|
# _ _ ____ _
|
||||||
|
# Project ___| | | | _ \| |
|
||||||
|
# / __| | | | |_) | |
|
||||||
|
# | (__| |_| | _ <| |___
|
||||||
|
# \___|\___/|_| \_\_____|
|
||||||
|
#
|
||||||
|
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
#
|
||||||
|
# This software is licensed as described in the file COPYING, which
|
||||||
|
# you should have received as part of this distribution. The terms
|
||||||
|
# are also available at https://curl.se/docs/copyright.html.
|
||||||
|
#
|
||||||
|
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
|
||||||
|
# copies of the Software, and permit persons to whom the Software is
|
||||||
|
# furnished to do so, under the terms of the COPYING file.
|
||||||
|
#
|
||||||
|
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
|
||||||
|
# KIND, either express or implied.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: curl
|
||||||
|
#
|
||||||
|
###########################################################################
|
||||||
|
# - Try to find the GSS Kerberos library
|
||||||
|
# Once done this will define
|
||||||
|
#
|
||||||
|
# GSS_ROOT_DIR - Set this variable to the root installation of GSS
|
||||||
|
#
|
||||||
|
# Read-Only variables:
|
||||||
|
# GSS_FOUND - system has the Heimdal library
|
||||||
|
# GSS_FLAVOUR - "MIT" or "Heimdal" if anything found.
|
||||||
|
# GSS_INCLUDE_DIR - the Heimdal include directory
|
||||||
|
# GSS_LIBRARIES - The libraries needed to use GSS
|
||||||
|
# GSS_LINK_DIRECTORIES - Directories to add to linker search path
|
||||||
|
# GSS_LINKER_FLAGS - Additional linker flags
|
||||||
|
# GSS_COMPILER_FLAGS - Additional compiler flags
|
||||||
|
# GSS_VERSION - This is set to version advertised by pkg-config or read from manifest.
|
||||||
|
# In case the library is found but no version info available it'll be set to "unknown"
|
||||||
|
|
||||||
|
set(_MIT_MODNAME mit-krb5-gssapi)
|
||||||
|
set(_HEIMDAL_MODNAME heimdal-gssapi)
|
||||||
|
|
||||||
|
include(CheckIncludeFile)
|
||||||
|
include(CheckIncludeFiles)
|
||||||
|
include(CheckTypeSize)
|
||||||
|
|
||||||
|
set(_GSS_ROOT_HINTS
|
||||||
|
"${GSS_ROOT_DIR}"
|
||||||
|
"$ENV{GSS_ROOT_DIR}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# try to find library using system pkg-config if user didn't specify root dir
|
||||||
|
if(NOT GSS_ROOT_DIR AND NOT "$ENV{GSS_ROOT_DIR}")
|
||||||
|
if(UNIX)
|
||||||
|
find_package(PkgConfig QUIET)
|
||||||
|
pkg_search_module(_GSS_PKG ${_MIT_MODNAME} ${_HEIMDAL_MODNAME})
|
||||||
|
list(APPEND _GSS_ROOT_HINTS "${_GSS_PKG_PREFIX}")
|
||||||
|
elseif(WIN32)
|
||||||
|
list(APPEND _GSS_ROOT_HINTS "[HKEY_LOCAL_MACHINE\\SOFTWARE\\MIT\\Kerberos;InstallDir]")
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(NOT _GSS_FOUND) #not found by pkg-config. Let's take more traditional approach.
|
||||||
|
find_file(_GSS_CONFIGURE_SCRIPT
|
||||||
|
NAMES
|
||||||
|
"krb5-config"
|
||||||
|
HINTS
|
||||||
|
${_GSS_ROOT_HINTS}
|
||||||
|
PATH_SUFFIXES
|
||||||
|
bin
|
||||||
|
NO_CMAKE_PATH
|
||||||
|
NO_CMAKE_ENVIRONMENT_PATH
|
||||||
|
)
|
||||||
|
|
||||||
|
# if not found in user-supplied directories, maybe system knows better
|
||||||
|
find_file(_GSS_CONFIGURE_SCRIPT
|
||||||
|
NAMES
|
||||||
|
"krb5-config"
|
||||||
|
PATH_SUFFIXES
|
||||||
|
bin
|
||||||
|
)
|
||||||
|
|
||||||
|
if(_GSS_CONFIGURE_SCRIPT)
|
||||||
|
execute_process(
|
||||||
|
COMMAND ${_GSS_CONFIGURE_SCRIPT} "--cflags" "gssapi"
|
||||||
|
OUTPUT_VARIABLE _GSS_CFLAGS
|
||||||
|
RESULT_VARIABLE _GSS_CONFIGURE_FAILED
|
||||||
|
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||||
|
)
|
||||||
|
message(STATUS "CFLAGS: ${_GSS_CFLAGS}")
|
||||||
|
if(NOT _GSS_CONFIGURE_FAILED) # 0 means success
|
||||||
|
# should also work in an odd case when multiple directories are given
|
||||||
|
string(STRIP "${_GSS_CFLAGS}" _GSS_CFLAGS)
|
||||||
|
string(REGEX REPLACE " +-I" ";" _GSS_CFLAGS "${_GSS_CFLAGS}")
|
||||||
|
string(REGEX REPLACE " +-([^I][^ \\t;]*)" ";-\\1" _GSS_CFLAGS "${_GSS_CFLAGS}")
|
||||||
|
|
||||||
|
foreach(_flag ${_GSS_CFLAGS})
|
||||||
|
if(_flag MATCHES "^-I.*")
|
||||||
|
string(REGEX REPLACE "^-I" "" _val "${_flag}")
|
||||||
|
list(APPEND _GSS_INCLUDE_DIR "${_val}")
|
||||||
|
else()
|
||||||
|
list(APPEND _GSS_COMPILER_FLAGS "${_flag}")
|
||||||
|
endif()
|
||||||
|
endforeach()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
execute_process(
|
||||||
|
COMMAND ${_GSS_CONFIGURE_SCRIPT} "--libs" "gssapi"
|
||||||
|
OUTPUT_VARIABLE _GSS_LIB_FLAGS
|
||||||
|
RESULT_VARIABLE _GSS_CONFIGURE_FAILED
|
||||||
|
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||||
|
)
|
||||||
|
message(STATUS "LDFLAGS: ${_GSS_LIB_FLAGS}")
|
||||||
|
|
||||||
|
if(NOT _GSS_CONFIGURE_FAILED) # 0 means success
|
||||||
|
# this script gives us libraries and link directories. Blah. We have to deal with it.
|
||||||
|
string(STRIP "${_GSS_LIB_FLAGS}" _GSS_LIB_FLAGS)
|
||||||
|
string(REGEX REPLACE " +-(L|l)" ";-\\1" _GSS_LIB_FLAGS "${_GSS_LIB_FLAGS}")
|
||||||
|
string(REGEX REPLACE " +-([^Ll][^ \\t;]*)" ";-\\1" _GSS_LIB_FLAGS "${_GSS_LIB_FLAGS}")
|
||||||
|
|
||||||
|
foreach(_flag ${_GSS_LIB_FLAGS})
|
||||||
|
if(_flag MATCHES "^-l.*")
|
||||||
|
string(REGEX REPLACE "^-l" "" _val "${_flag}")
|
||||||
|
list(APPEND _GSS_LIBRARIES "${_val}")
|
||||||
|
elseif(_flag MATCHES "^-L.*")
|
||||||
|
string(REGEX REPLACE "^-L" "" _val "${_flag}")
|
||||||
|
list(APPEND _GSS_LINK_DIRECTORIES "${_val}")
|
||||||
|
else()
|
||||||
|
list(APPEND _GSS_LINKER_FLAGS "${_flag}")
|
||||||
|
endif()
|
||||||
|
endforeach()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
execute_process(
|
||||||
|
COMMAND ${_GSS_CONFIGURE_SCRIPT} "--version"
|
||||||
|
OUTPUT_VARIABLE _GSS_VERSION
|
||||||
|
RESULT_VARIABLE _GSS_CONFIGURE_FAILED
|
||||||
|
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||||
|
)
|
||||||
|
|
||||||
|
# older versions may not have the "--version" parameter. In this case we just don't care.
|
||||||
|
if(_GSS_CONFIGURE_FAILED)
|
||||||
|
set(_GSS_VERSION 0)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
execute_process(
|
||||||
|
COMMAND ${_GSS_CONFIGURE_SCRIPT} "--vendor"
|
||||||
|
OUTPUT_VARIABLE _GSS_VENDOR
|
||||||
|
RESULT_VARIABLE _GSS_CONFIGURE_FAILED
|
||||||
|
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||||
|
)
|
||||||
|
|
||||||
|
# older versions may not have the "--vendor" parameter. In this case we just don't care.
|
||||||
|
if(_GSS_CONFIGURE_FAILED)
|
||||||
|
set(GSS_FLAVOUR "Heimdal") # most probably, shouldn't really matter
|
||||||
|
else()
|
||||||
|
if(_GSS_VENDOR MATCHES ".*H|heimdal.*")
|
||||||
|
set(GSS_FLAVOUR "Heimdal")
|
||||||
|
else()
|
||||||
|
set(GSS_FLAVOUR "MIT")
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
else() # either there is no config script or we are on a platform that doesn't provide one (Windows?)
|
||||||
|
|
||||||
|
find_path(_GSS_INCLUDE_DIR
|
||||||
|
NAMES
|
||||||
|
"gssapi/gssapi.h"
|
||||||
|
HINTS
|
||||||
|
${_GSS_ROOT_HINTS}
|
||||||
|
PATH_SUFFIXES
|
||||||
|
include
|
||||||
|
inc
|
||||||
|
)
|
||||||
|
|
||||||
|
if(_GSS_INCLUDE_DIR) #jay, we've found something
|
||||||
|
set(CMAKE_REQUIRED_INCLUDES "${_GSS_INCLUDE_DIR}")
|
||||||
|
check_include_files( "gssapi/gssapi_generic.h;gssapi/gssapi_krb5.h" _GSS_HAVE_MIT_HEADERS)
|
||||||
|
|
||||||
|
if(_GSS_HAVE_MIT_HEADERS)
|
||||||
|
set(GSS_FLAVOUR "MIT")
|
||||||
|
else()
|
||||||
|
# prevent compiling the header - just check if we can include it
|
||||||
|
list(APPEND CMAKE_REQUIRED_DEFINITIONS -D__ROKEN_H__)
|
||||||
|
check_include_file( "roken.h" _GSS_HAVE_ROKEN_H)
|
||||||
|
|
||||||
|
check_include_file( "heimdal/roken.h" _GSS_HAVE_HEIMDAL_ROKEN_H)
|
||||||
|
if(_GSS_HAVE_ROKEN_H OR _GSS_HAVE_HEIMDAL_ROKEN_H)
|
||||||
|
set(GSS_FLAVOUR "Heimdal")
|
||||||
|
endif()
|
||||||
|
list(REMOVE_ITEM CMAKE_REQUIRED_DEFINITIONS -D__ROKEN_H__)
|
||||||
|
endif()
|
||||||
|
else()
|
||||||
|
# I'm not convinced if this is the right way but this is what autotools do at the moment
|
||||||
|
find_path(_GSS_INCLUDE_DIR
|
||||||
|
NAMES
|
||||||
|
"gssapi.h"
|
||||||
|
HINTS
|
||||||
|
${_GSS_ROOT_HINTS}
|
||||||
|
PATH_SUFFIXES
|
||||||
|
include
|
||||||
|
inc
|
||||||
|
)
|
||||||
|
|
||||||
|
if(_GSS_INCLUDE_DIR)
|
||||||
|
set(GSS_FLAVOUR "Heimdal")
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# if we have headers, check if we can link libraries
|
||||||
|
if(GSS_FLAVOUR)
|
||||||
|
set(_GSS_LIBDIR_SUFFIXES "")
|
||||||
|
set(_GSS_LIBDIR_HINTS ${_GSS_ROOT_HINTS})
|
||||||
|
get_filename_component(_GSS_CALCULATED_POTENTIAL_ROOT "${_GSS_INCLUDE_DIR}" PATH)
|
||||||
|
list(APPEND _GSS_LIBDIR_HINTS ${_GSS_CALCULATED_POTENTIAL_ROOT})
|
||||||
|
|
||||||
|
if(WIN32)
|
||||||
|
if(CMAKE_SIZEOF_VOID_P EQUAL 8)
|
||||||
|
list(APPEND _GSS_LIBDIR_SUFFIXES "lib/AMD64")
|
||||||
|
if(GSS_FLAVOUR STREQUAL "MIT")
|
||||||
|
set(_GSS_LIBNAME "gssapi64")
|
||||||
|
else()
|
||||||
|
set(_GSS_LIBNAME "libgssapi")
|
||||||
|
endif()
|
||||||
|
else()
|
||||||
|
list(APPEND _GSS_LIBDIR_SUFFIXES "lib/i386")
|
||||||
|
if(GSS_FLAVOUR STREQUAL "MIT")
|
||||||
|
set(_GSS_LIBNAME "gssapi32")
|
||||||
|
else()
|
||||||
|
set(_GSS_LIBNAME "libgssapi")
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
else()
|
||||||
|
list(APPEND _GSS_LIBDIR_SUFFIXES "lib;lib64") # those suffixes are not checked for HINTS
|
||||||
|
if(GSS_FLAVOUR STREQUAL "MIT")
|
||||||
|
set(_GSS_LIBNAME "gssapi_krb5")
|
||||||
|
else()
|
||||||
|
set(_GSS_LIBNAME "gssapi")
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
find_library(_GSS_LIBRARIES
|
||||||
|
NAMES
|
||||||
|
${_GSS_LIBNAME}
|
||||||
|
HINTS
|
||||||
|
${_GSS_LIBDIR_HINTS}
|
||||||
|
PATH_SUFFIXES
|
||||||
|
${_GSS_LIBDIR_SUFFIXES}
|
||||||
|
)
|
||||||
|
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
else()
|
||||||
|
if(_GSS_PKG_${_MIT_MODNAME}_VERSION)
|
||||||
|
set(GSS_FLAVOUR "MIT")
|
||||||
|
set(_GSS_VERSION _GSS_PKG_${_MIT_MODNAME}_VERSION)
|
||||||
|
else()
|
||||||
|
set(GSS_FLAVOUR "Heimdal")
|
||||||
|
set(_GSS_VERSION _GSS_PKG_${_MIT_HEIMDAL}_VERSION)
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
set(GSS_INCLUDE_DIR ${_GSS_INCLUDE_DIR})
|
||||||
|
set(GSS_LIBRARIES ${_GSS_LIBRARIES})
|
||||||
|
set(GSS_LINK_DIRECTORIES ${_GSS_LINK_DIRECTORIES})
|
||||||
|
set(GSS_LINKER_FLAGS ${_GSS_LINKER_FLAGS})
|
||||||
|
set(GSS_COMPILER_FLAGS ${_GSS_COMPILER_FLAGS})
|
||||||
|
set(GSS_VERSION ${_GSS_VERSION})
|
||||||
|
|
||||||
|
if(GSS_FLAVOUR)
|
||||||
|
if(NOT GSS_VERSION AND GSS_FLAVOUR STREQUAL "Heimdal")
|
||||||
|
if(CMAKE_SIZEOF_VOID_P EQUAL 8)
|
||||||
|
set(HEIMDAL_MANIFEST_FILE "Heimdal.Application.amd64.manifest")
|
||||||
|
else()
|
||||||
|
set(HEIMDAL_MANIFEST_FILE "Heimdal.Application.x86.manifest")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(EXISTS "${GSS_INCLUDE_DIR}/${HEIMDAL_MANIFEST_FILE}")
|
||||||
|
file(STRINGS "${GSS_INCLUDE_DIR}/${HEIMDAL_MANIFEST_FILE}" heimdal_version_str
|
||||||
|
REGEX "^.*version=\"[0-9]\\.[^\"]+\".*$")
|
||||||
|
|
||||||
|
string(REGEX MATCH "[0-9]\\.[^\"]+"
|
||||||
|
GSS_VERSION "${heimdal_version_str}")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(NOT GSS_VERSION)
|
||||||
|
set(GSS_VERSION "Heimdal Unknown")
|
||||||
|
endif()
|
||||||
|
elseif(NOT GSS_VERSION AND GSS_FLAVOUR STREQUAL "MIT")
|
||||||
|
get_filename_component(_MIT_VERSION "[HKEY_LOCAL_MACHINE\\SOFTWARE\\MIT\\Kerberos\\SDK\\CurrentVersion;VersionString]" NAME CACHE)
|
||||||
|
if(WIN32 AND _MIT_VERSION)
|
||||||
|
set(GSS_VERSION "${_MIT_VERSION}")
|
||||||
|
else()
|
||||||
|
set(GSS_VERSION "MIT Unknown")
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
include(FindPackageHandleStandardArgs)
|
||||||
|
|
||||||
|
set(_GSS_REQUIRED_VARS GSS_LIBRARIES GSS_FLAVOUR)
|
||||||
|
|
||||||
|
find_package_handle_standard_args(GSS
|
||||||
|
REQUIRED_VARS
|
||||||
|
${_GSS_REQUIRED_VARS}
|
||||||
|
VERSION_VAR
|
||||||
|
GSS_VERSION
|
||||||
|
FAIL_MESSAGE
|
||||||
|
"Could NOT find GSS, try to set the path to GSS root folder in the system variable GSS_ROOT_DIR"
|
||||||
|
)
|
||||||
|
|
||||||
|
mark_as_advanced(GSS_INCLUDE_DIR GSS_LIBRARIES)
|
45
src/dependencies/curl-8.8.0/CMake/FindLibPSL.cmake
Normal file
45
src/dependencies/curl-8.8.0/CMake/FindLibPSL.cmake
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
#***************************************************************************
|
||||||
|
# _ _ ____ _
|
||||||
|
# Project ___| | | | _ \| |
|
||||||
|
# / __| | | | |_) | |
|
||||||
|
# | (__| |_| | _ <| |___
|
||||||
|
# \___|\___/|_| \_\_____|
|
||||||
|
#
|
||||||
|
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
#
|
||||||
|
# This software is licensed as described in the file COPYING, which
|
||||||
|
# you should have received as part of this distribution. The terms
|
||||||
|
# are also available at https://curl.se/docs/copyright.html.
|
||||||
|
#
|
||||||
|
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
|
||||||
|
# copies of the Software, and permit persons to whom the Software is
|
||||||
|
# furnished to do so, under the terms of the COPYING file.
|
||||||
|
#
|
||||||
|
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
|
||||||
|
# KIND, either express or implied.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: curl
|
||||||
|
#
|
||||||
|
###########################################################################
|
||||||
|
# - Try to find the libpsl library
|
||||||
|
# Once done this will define
|
||||||
|
#
|
||||||
|
# LIBPSL_FOUND - system has the libpsl library
|
||||||
|
# LIBPSL_INCLUDE_DIR - the libpsl include directory
|
||||||
|
# LIBPSL_LIBRARY - the libpsl library name
|
||||||
|
|
||||||
|
find_path(LIBPSL_INCLUDE_DIR libpsl.h)
|
||||||
|
|
||||||
|
find_library(LIBPSL_LIBRARY NAMES psl libpsl)
|
||||||
|
|
||||||
|
if(LIBPSL_INCLUDE_DIR)
|
||||||
|
file(STRINGS "${LIBPSL_INCLUDE_DIR}/libpsl.h" libpsl_version_str REGEX "^#define[\t ]+PSL_VERSION[\t ]+\"(.*)\"")
|
||||||
|
string(REGEX REPLACE "^.*\"([^\"]+)\"" "\\1" LIBPSL_VERSION "${libpsl_version_str}")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
include(FindPackageHandleStandardArgs)
|
||||||
|
find_package_handle_standard_args(LibPSL
|
||||||
|
REQUIRED_VARS LIBPSL_LIBRARY LIBPSL_INCLUDE_DIR
|
||||||
|
VERSION_VAR LIBPSL_VERSION)
|
||||||
|
|
||||||
|
mark_as_advanced(LIBPSL_INCLUDE_DIR LIBPSL_LIBRARY)
|
45
src/dependencies/curl-8.8.0/CMake/FindLibSSH2.cmake
Normal file
45
src/dependencies/curl-8.8.0/CMake/FindLibSSH2.cmake
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
#***************************************************************************
|
||||||
|
# _ _ ____ _
|
||||||
|
# Project ___| | | | _ \| |
|
||||||
|
# / __| | | | |_) | |
|
||||||
|
# | (__| |_| | _ <| |___
|
||||||
|
# \___|\___/|_| \_\_____|
|
||||||
|
#
|
||||||
|
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
#
|
||||||
|
# This software is licensed as described in the file COPYING, which
|
||||||
|
# you should have received as part of this distribution. The terms
|
||||||
|
# are also available at https://curl.se/docs/copyright.html.
|
||||||
|
#
|
||||||
|
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
|
||||||
|
# copies of the Software, and permit persons to whom the Software is
|
||||||
|
# furnished to do so, under the terms of the COPYING file.
|
||||||
|
#
|
||||||
|
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
|
||||||
|
# KIND, either express or implied.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: curl
|
||||||
|
#
|
||||||
|
###########################################################################
|
||||||
|
# - Try to find the libssh2 library
|
||||||
|
# Once done this will define
|
||||||
|
#
|
||||||
|
# LIBSSH2_FOUND - system has the libssh2 library
|
||||||
|
# LIBSSH2_INCLUDE_DIR - the libssh2 include directory
|
||||||
|
# LIBSSH2_LIBRARY - the libssh2 library name
|
||||||
|
|
||||||
|
find_path(LIBSSH2_INCLUDE_DIR libssh2.h)
|
||||||
|
|
||||||
|
find_library(LIBSSH2_LIBRARY NAMES ssh2 libssh2)
|
||||||
|
|
||||||
|
if(LIBSSH2_INCLUDE_DIR)
|
||||||
|
file(STRINGS "${LIBSSH2_INCLUDE_DIR}/libssh2.h" libssh2_version_str REGEX "^#define[\t ]+LIBSSH2_VERSION[\t ]+\"(.*)\"")
|
||||||
|
string(REGEX REPLACE "^.*\"([^\"]+)\"" "\\1" LIBSSH2_VERSION "${libssh2_version_str}")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
include(FindPackageHandleStandardArgs)
|
||||||
|
find_package_handle_standard_args(LibSSH2
|
||||||
|
REQUIRED_VARS LIBSSH2_LIBRARY LIBSSH2_INCLUDE_DIR
|
||||||
|
VERSION_VAR LIBSSH2_VERSION)
|
||||||
|
|
||||||
|
mark_as_advanced(LIBSSH2_INCLUDE_DIR LIBSSH2_LIBRARY)
|
70
src/dependencies/curl-8.8.0/CMake/FindMSH3.cmake
Normal file
70
src/dependencies/curl-8.8.0/CMake/FindMSH3.cmake
Normal file
|
@ -0,0 +1,70 @@
|
||||||
|
#***************************************************************************
|
||||||
|
# _ _ ____ _
|
||||||
|
# Project ___| | | | _ \| |
|
||||||
|
# / __| | | | |_) | |
|
||||||
|
# | (__| |_| | _ <| |___
|
||||||
|
# \___|\___/|_| \_\_____|
|
||||||
|
#
|
||||||
|
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
#
|
||||||
|
# This software is licensed as described in the file COPYING, which
|
||||||
|
# you should have received as part of this distribution. The terms
|
||||||
|
# are also available at https://curl.se/docs/copyright.html.
|
||||||
|
#
|
||||||
|
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
|
||||||
|
# copies of the Software, and permit persons to whom the Software is
|
||||||
|
# furnished to do so, under the terms of the COPYING file.
|
||||||
|
#
|
||||||
|
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
|
||||||
|
# KIND, either express or implied.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: curl
|
||||||
|
#
|
||||||
|
###########################################################################
|
||||||
|
|
||||||
|
#[=======================================================================[.rst:
|
||||||
|
FindMSH3
|
||||||
|
----------
|
||||||
|
|
||||||
|
Find the msh3 library
|
||||||
|
|
||||||
|
Result Variables
|
||||||
|
^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
``MSH3_FOUND``
|
||||||
|
System has msh3
|
||||||
|
``MSH3_INCLUDE_DIRS``
|
||||||
|
The msh3 include directories.
|
||||||
|
``MSH3_LIBRARIES``
|
||||||
|
The libraries needed to use msh3
|
||||||
|
#]=======================================================================]
|
||||||
|
if(UNIX)
|
||||||
|
find_package(PkgConfig QUIET)
|
||||||
|
pkg_search_module(PC_MSH3 libmsh3)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
find_path(MSH3_INCLUDE_DIR msh3.h
|
||||||
|
HINTS
|
||||||
|
${PC_MSH3_INCLUDEDIR}
|
||||||
|
${PC_MSH3_INCLUDE_DIRS}
|
||||||
|
)
|
||||||
|
|
||||||
|
find_library(MSH3_LIBRARY NAMES msh3
|
||||||
|
HINTS
|
||||||
|
${PC_MSH3_LIBDIR}
|
||||||
|
${PC_MSH3_LIBRARY_DIRS}
|
||||||
|
)
|
||||||
|
|
||||||
|
include(FindPackageHandleStandardArgs)
|
||||||
|
find_package_handle_standard_args(MSH3
|
||||||
|
REQUIRED_VARS
|
||||||
|
MSH3_LIBRARY
|
||||||
|
MSH3_INCLUDE_DIR
|
||||||
|
)
|
||||||
|
|
||||||
|
if(MSH3_FOUND)
|
||||||
|
set(MSH3_LIBRARIES ${MSH3_LIBRARY})
|
||||||
|
set(MSH3_INCLUDE_DIRS ${MSH3_INCLUDE_DIR})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
mark_as_advanced(MSH3_INCLUDE_DIRS MSH3_LIBRARIES)
|
36
src/dependencies/curl-8.8.0/CMake/FindMbedTLS.cmake
Normal file
36
src/dependencies/curl-8.8.0/CMake/FindMbedTLS.cmake
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
#***************************************************************************
|
||||||
|
# _ _ ____ _
|
||||||
|
# Project ___| | | | _ \| |
|
||||||
|
# / __| | | | |_) | |
|
||||||
|
# | (__| |_| | _ <| |___
|
||||||
|
# \___|\___/|_| \_\_____|
|
||||||
|
#
|
||||||
|
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
#
|
||||||
|
# This software is licensed as described in the file COPYING, which
|
||||||
|
# you should have received as part of this distribution. The terms
|
||||||
|
# are also available at https://curl.se/docs/copyright.html.
|
||||||
|
#
|
||||||
|
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
|
||||||
|
# copies of the Software, and permit persons to whom the Software is
|
||||||
|
# furnished to do so, under the terms of the COPYING file.
|
||||||
|
#
|
||||||
|
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
|
||||||
|
# KIND, either express or implied.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: curl
|
||||||
|
#
|
||||||
|
###########################################################################
|
||||||
|
find_path(MBEDTLS_INCLUDE_DIRS mbedtls/ssl.h)
|
||||||
|
|
||||||
|
find_library(MBEDTLS_LIBRARY mbedtls)
|
||||||
|
find_library(MBEDX509_LIBRARY mbedx509)
|
||||||
|
find_library(MBEDCRYPTO_LIBRARY mbedcrypto)
|
||||||
|
|
||||||
|
set(MBEDTLS_LIBRARIES "${MBEDTLS_LIBRARY}" "${MBEDX509_LIBRARY}" "${MBEDCRYPTO_LIBRARY}")
|
||||||
|
|
||||||
|
include(FindPackageHandleStandardArgs)
|
||||||
|
find_package_handle_standard_args(MbedTLS DEFAULT_MSG
|
||||||
|
MBEDTLS_INCLUDE_DIRS MBEDTLS_LIBRARY MBEDX509_LIBRARY MBEDCRYPTO_LIBRARY)
|
||||||
|
|
||||||
|
mark_as_advanced(MBEDTLS_INCLUDE_DIRS MBEDTLS_LIBRARY MBEDX509_LIBRARY MBEDCRYPTO_LIBRARY)
|
41
src/dependencies/curl-8.8.0/CMake/FindNGHTTP2.cmake
Normal file
41
src/dependencies/curl-8.8.0/CMake/FindNGHTTP2.cmake
Normal file
|
@ -0,0 +1,41 @@
|
||||||
|
#***************************************************************************
|
||||||
|
# _ _ ____ _
|
||||||
|
# Project ___| | | | _ \| |
|
||||||
|
# / __| | | | |_) | |
|
||||||
|
# | (__| |_| | _ <| |___
|
||||||
|
# \___|\___/|_| \_\_____|
|
||||||
|
#
|
||||||
|
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
#
|
||||||
|
# This software is licensed as described in the file COPYING, which
|
||||||
|
# you should have received as part of this distribution. The terms
|
||||||
|
# are also available at https://curl.se/docs/copyright.html.
|
||||||
|
#
|
||||||
|
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
|
||||||
|
# copies of the Software, and permit persons to whom the Software is
|
||||||
|
# furnished to do so, under the terms of the COPYING file.
|
||||||
|
#
|
||||||
|
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
|
||||||
|
# KIND, either express or implied.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: curl
|
||||||
|
#
|
||||||
|
###########################################################################
|
||||||
|
include(FindPackageHandleStandardArgs)
|
||||||
|
|
||||||
|
find_path(NGHTTP2_INCLUDE_DIR "nghttp2/nghttp2.h")
|
||||||
|
|
||||||
|
find_library(NGHTTP2_LIBRARY NAMES nghttp2 nghttp2_static)
|
||||||
|
|
||||||
|
find_package_handle_standard_args(NGHTTP2
|
||||||
|
FOUND_VAR
|
||||||
|
NGHTTP2_FOUND
|
||||||
|
REQUIRED_VARS
|
||||||
|
NGHTTP2_LIBRARY
|
||||||
|
NGHTTP2_INCLUDE_DIR
|
||||||
|
)
|
||||||
|
|
||||||
|
set(NGHTTP2_INCLUDE_DIRS ${NGHTTP2_INCLUDE_DIR})
|
||||||
|
set(NGHTTP2_LIBRARIES ${NGHTTP2_LIBRARY})
|
||||||
|
|
||||||
|
mark_as_advanced(NGHTTP2_INCLUDE_DIRS NGHTTP2_LIBRARIES)
|
78
src/dependencies/curl-8.8.0/CMake/FindNGHTTP3.cmake
Normal file
78
src/dependencies/curl-8.8.0/CMake/FindNGHTTP3.cmake
Normal file
|
@ -0,0 +1,78 @@
|
||||||
|
#***************************************************************************
|
||||||
|
# _ _ ____ _
|
||||||
|
# Project ___| | | | _ \| |
|
||||||
|
# / __| | | | |_) | |
|
||||||
|
# | (__| |_| | _ <| |___
|
||||||
|
# \___|\___/|_| \_\_____|
|
||||||
|
#
|
||||||
|
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
#
|
||||||
|
# This software is licensed as described in the file COPYING, which
|
||||||
|
# you should have received as part of this distribution. The terms
|
||||||
|
# are also available at https://curl.se/docs/copyright.html.
|
||||||
|
#
|
||||||
|
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
|
||||||
|
# copies of the Software, and permit persons to whom the Software is
|
||||||
|
# furnished to do so, under the terms of the COPYING file.
|
||||||
|
#
|
||||||
|
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
|
||||||
|
# KIND, either express or implied.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: curl
|
||||||
|
#
|
||||||
|
###########################################################################
|
||||||
|
|
||||||
|
#[=======================================================================[.rst:
|
||||||
|
FindNGHTTP3
|
||||||
|
----------
|
||||||
|
|
||||||
|
Find the nghttp3 library
|
||||||
|
|
||||||
|
Result Variables
|
||||||
|
^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
``NGHTTP3_FOUND``
|
||||||
|
System has nghttp3
|
||||||
|
``NGHTTP3_INCLUDE_DIRS``
|
||||||
|
The nghttp3 include directories.
|
||||||
|
``NGHTTP3_LIBRARIES``
|
||||||
|
The libraries needed to use nghttp3
|
||||||
|
``NGHTTP3_VERSION``
|
||||||
|
version of nghttp3.
|
||||||
|
#]=======================================================================]
|
||||||
|
|
||||||
|
if(UNIX)
|
||||||
|
find_package(PkgConfig QUIET)
|
||||||
|
pkg_search_module(PC_NGHTTP3 libnghttp3)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
find_path(NGHTTP3_INCLUDE_DIR nghttp3/nghttp3.h
|
||||||
|
HINTS
|
||||||
|
${PC_NGHTTP3_INCLUDEDIR}
|
||||||
|
${PC_NGHTTP3_INCLUDE_DIRS}
|
||||||
|
)
|
||||||
|
|
||||||
|
find_library(NGHTTP3_LIBRARY NAMES nghttp3
|
||||||
|
HINTS
|
||||||
|
${PC_NGHTTP3_LIBDIR}
|
||||||
|
${PC_NGHTTP3_LIBRARY_DIRS}
|
||||||
|
)
|
||||||
|
|
||||||
|
if(PC_NGHTTP3_VERSION)
|
||||||
|
set(NGHTTP3_VERSION ${PC_NGHTTP3_VERSION})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
include(FindPackageHandleStandardArgs)
|
||||||
|
find_package_handle_standard_args(NGHTTP3
|
||||||
|
REQUIRED_VARS
|
||||||
|
NGHTTP3_LIBRARY
|
||||||
|
NGHTTP3_INCLUDE_DIR
|
||||||
|
VERSION_VAR NGHTTP3_VERSION
|
||||||
|
)
|
||||||
|
|
||||||
|
if(NGHTTP3_FOUND)
|
||||||
|
set(NGHTTP3_LIBRARIES ${NGHTTP3_LIBRARY})
|
||||||
|
set(NGHTTP3_INCLUDE_DIRS ${NGHTTP3_INCLUDE_DIR})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
mark_as_advanced(NGHTTP3_INCLUDE_DIRS NGHTTP3_LIBRARIES)
|
117
src/dependencies/curl-8.8.0/CMake/FindNGTCP2.cmake
Normal file
117
src/dependencies/curl-8.8.0/CMake/FindNGTCP2.cmake
Normal file
|
@ -0,0 +1,117 @@
|
||||||
|
#***************************************************************************
|
||||||
|
# _ _ ____ _
|
||||||
|
# Project ___| | | | _ \| |
|
||||||
|
# / __| | | | |_) | |
|
||||||
|
# | (__| |_| | _ <| |___
|
||||||
|
# \___|\___/|_| \_\_____|
|
||||||
|
#
|
||||||
|
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
#
|
||||||
|
# This software is licensed as described in the file COPYING, which
|
||||||
|
# you should have received as part of this distribution. The terms
|
||||||
|
# are also available at https://curl.se/docs/copyright.html.
|
||||||
|
#
|
||||||
|
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
|
||||||
|
# copies of the Software, and permit persons to whom the Software is
|
||||||
|
# furnished to do so, under the terms of the COPYING file.
|
||||||
|
#
|
||||||
|
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
|
||||||
|
# KIND, either express or implied.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: curl
|
||||||
|
#
|
||||||
|
###########################################################################
|
||||||
|
|
||||||
|
#[=======================================================================[.rst:
|
||||||
|
FindNGTCP2
|
||||||
|
----------
|
||||||
|
|
||||||
|
Find the ngtcp2 library
|
||||||
|
|
||||||
|
This module accepts optional COMPONENTS to control the crypto library (these are
|
||||||
|
mutually exclusive)::
|
||||||
|
|
||||||
|
quictls, LibreSSL: Use libngtcp2_crypto_quictls
|
||||||
|
BoringSSL, AWS-LC: Use libngtcp2_crypto_boringssl
|
||||||
|
wolfSSL: Use libngtcp2_crypto_wolfssl
|
||||||
|
GnuTLS: Use libngtcp2_crypto_gnutls
|
||||||
|
|
||||||
|
Result Variables
|
||||||
|
^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
``NGTCP2_FOUND``
|
||||||
|
System has ngtcp2
|
||||||
|
``NGTCP2_INCLUDE_DIRS``
|
||||||
|
The ngtcp2 include directories.
|
||||||
|
``NGTCP2_LIBRARIES``
|
||||||
|
The libraries needed to use ngtcp2
|
||||||
|
``NGTCP2_VERSION``
|
||||||
|
version of ngtcp2.
|
||||||
|
#]=======================================================================]
|
||||||
|
|
||||||
|
if(UNIX)
|
||||||
|
find_package(PkgConfig QUIET)
|
||||||
|
pkg_search_module(PC_NGTCP2 libngtcp2)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
find_path(NGTCP2_INCLUDE_DIR ngtcp2/ngtcp2.h
|
||||||
|
HINTS
|
||||||
|
${PC_NGTCP2_INCLUDEDIR}
|
||||||
|
${PC_NGTCP2_INCLUDE_DIRS}
|
||||||
|
)
|
||||||
|
|
||||||
|
find_library(NGTCP2_LIBRARY NAMES ngtcp2
|
||||||
|
HINTS
|
||||||
|
${PC_NGTCP2_LIBDIR}
|
||||||
|
${PC_NGTCP2_LIBRARY_DIRS}
|
||||||
|
)
|
||||||
|
|
||||||
|
if(PC_NGTCP2_VERSION)
|
||||||
|
set(NGTCP2_VERSION ${PC_NGTCP2_VERSION})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(NGTCP2_FIND_COMPONENTS)
|
||||||
|
set(NGTCP2_CRYPTO_BACKEND "")
|
||||||
|
foreach(component IN LISTS NGTCP2_FIND_COMPONENTS)
|
||||||
|
if(component MATCHES "^(BoringSSL|quictls|wolfSSL|GnuTLS)")
|
||||||
|
if(NGTCP2_CRYPTO_BACKEND)
|
||||||
|
message(FATAL_ERROR "NGTCP2: Only one crypto library can be selected")
|
||||||
|
endif()
|
||||||
|
set(NGTCP2_CRYPTO_BACKEND ${component})
|
||||||
|
endif()
|
||||||
|
endforeach()
|
||||||
|
|
||||||
|
if(NGTCP2_CRYPTO_BACKEND)
|
||||||
|
string(TOLOWER "ngtcp2_crypto_${NGTCP2_CRYPTO_BACKEND}" _crypto_library)
|
||||||
|
if(UNIX)
|
||||||
|
pkg_search_module(PC_${_crypto_library} lib${_crypto_library})
|
||||||
|
endif()
|
||||||
|
find_library(${_crypto_library}_LIBRARY
|
||||||
|
NAMES
|
||||||
|
${_crypto_library}
|
||||||
|
HINTS
|
||||||
|
${PC_${_crypto_library}_LIBDIR}
|
||||||
|
${PC_${_crypto_library}_LIBRARY_DIRS}
|
||||||
|
)
|
||||||
|
if(${_crypto_library}_LIBRARY)
|
||||||
|
set(NGTCP2_${NGTCP2_CRYPTO_BACKEND}_FOUND TRUE)
|
||||||
|
set(NGTCP2_CRYPTO_LIBRARY ${${_crypto_library}_LIBRARY})
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
include(FindPackageHandleStandardArgs)
|
||||||
|
find_package_handle_standard_args(NGTCP2
|
||||||
|
REQUIRED_VARS
|
||||||
|
NGTCP2_LIBRARY
|
||||||
|
NGTCP2_INCLUDE_DIR
|
||||||
|
VERSION_VAR NGTCP2_VERSION
|
||||||
|
HANDLE_COMPONENTS
|
||||||
|
)
|
||||||
|
|
||||||
|
if(NGTCP2_FOUND)
|
||||||
|
set(NGTCP2_LIBRARIES ${NGTCP2_LIBRARY} ${NGTCP2_CRYPTO_LIBRARY})
|
||||||
|
set(NGTCP2_INCLUDE_DIRS ${NGTCP2_INCLUDE_DIR})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
mark_as_advanced(NGTCP2_INCLUDE_DIRS NGTCP2_LIBRARIES)
|
70
src/dependencies/curl-8.8.0/CMake/FindQUICHE.cmake
Normal file
70
src/dependencies/curl-8.8.0/CMake/FindQUICHE.cmake
Normal file
|
@ -0,0 +1,70 @@
|
||||||
|
#***************************************************************************
|
||||||
|
# _ _ ____ _
|
||||||
|
# Project ___| | | | _ \| |
|
||||||
|
# / __| | | | |_) | |
|
||||||
|
# | (__| |_| | _ <| |___
|
||||||
|
# \___|\___/|_| \_\_____|
|
||||||
|
#
|
||||||
|
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
#
|
||||||
|
# This software is licensed as described in the file COPYING, which
|
||||||
|
# you should have received as part of this distribution. The terms
|
||||||
|
# are also available at https://curl.se/docs/copyright.html.
|
||||||
|
#
|
||||||
|
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
|
||||||
|
# copies of the Software, and permit persons to whom the Software is
|
||||||
|
# furnished to do so, under the terms of the COPYING file.
|
||||||
|
#
|
||||||
|
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
|
||||||
|
# KIND, either express or implied.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: curl
|
||||||
|
#
|
||||||
|
###########################################################################
|
||||||
|
|
||||||
|
#[=======================================================================[.rst:
|
||||||
|
FindQUICHE
|
||||||
|
----------
|
||||||
|
|
||||||
|
Find the quiche library
|
||||||
|
|
||||||
|
Result Variables
|
||||||
|
^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
``QUICHE_FOUND``
|
||||||
|
System has quiche
|
||||||
|
``QUICHE_INCLUDE_DIRS``
|
||||||
|
The quiche include directories.
|
||||||
|
``QUICHE_LIBRARIES``
|
||||||
|
The libraries needed to use quiche
|
||||||
|
#]=======================================================================]
|
||||||
|
if(UNIX)
|
||||||
|
find_package(PkgConfig QUIET)
|
||||||
|
pkg_search_module(PC_QUICHE quiche)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
find_path(QUICHE_INCLUDE_DIR quiche.h
|
||||||
|
HINTS
|
||||||
|
${PC_QUICHE_INCLUDEDIR}
|
||||||
|
${PC_QUICHE_INCLUDE_DIRS}
|
||||||
|
)
|
||||||
|
|
||||||
|
find_library(QUICHE_LIBRARY NAMES quiche
|
||||||
|
HINTS
|
||||||
|
${PC_QUICHE_LIBDIR}
|
||||||
|
${PC_QUICHE_LIBRARY_DIRS}
|
||||||
|
)
|
||||||
|
|
||||||
|
include(FindPackageHandleStandardArgs)
|
||||||
|
find_package_handle_standard_args(QUICHE
|
||||||
|
REQUIRED_VARS
|
||||||
|
QUICHE_LIBRARY
|
||||||
|
QUICHE_INCLUDE_DIR
|
||||||
|
)
|
||||||
|
|
||||||
|
if(QUICHE_FOUND)
|
||||||
|
set(QUICHE_LIBRARIES ${QUICHE_LIBRARY})
|
||||||
|
set(QUICHE_INCLUDE_DIRS ${QUICHE_INCLUDE_DIR})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
mark_as_advanced(QUICHE_INCLUDE_DIRS QUICHE_LIBRARIES)
|
36
src/dependencies/curl-8.8.0/CMake/FindWolfSSL.cmake
Normal file
36
src/dependencies/curl-8.8.0/CMake/FindWolfSSL.cmake
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
#***************************************************************************
|
||||||
|
# _ _ ____ _
|
||||||
|
# Project ___| | | | _ \| |
|
||||||
|
# / __| | | | |_) | |
|
||||||
|
# | (__| |_| | _ <| |___
|
||||||
|
# \___|\___/|_| \_\_____|
|
||||||
|
#
|
||||||
|
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
#
|
||||||
|
# This software is licensed as described in the file COPYING, which
|
||||||
|
# you should have received as part of this distribution. The terms
|
||||||
|
# are also available at https://curl.se/docs/copyright.html.
|
||||||
|
#
|
||||||
|
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
|
||||||
|
# copies of the Software, and permit persons to whom the Software is
|
||||||
|
# furnished to do so, under the terms of the COPYING file.
|
||||||
|
#
|
||||||
|
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
|
||||||
|
# KIND, either express or implied.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: curl
|
||||||
|
#
|
||||||
|
###########################################################################
|
||||||
|
find_path(WolfSSL_INCLUDE_DIR NAMES wolfssl/ssl.h)
|
||||||
|
find_library(WolfSSL_LIBRARY NAMES wolfssl)
|
||||||
|
mark_as_advanced(WolfSSL_INCLUDE_DIR WolfSSL_LIBRARY)
|
||||||
|
|
||||||
|
include(FindPackageHandleStandardArgs)
|
||||||
|
find_package_handle_standard_args(WolfSSL
|
||||||
|
REQUIRED_VARS WolfSSL_INCLUDE_DIR WolfSSL_LIBRARY
|
||||||
|
)
|
||||||
|
|
||||||
|
if(WolfSSL_FOUND)
|
||||||
|
set(WolfSSL_INCLUDE_DIRS ${WolfSSL_INCLUDE_DIR})
|
||||||
|
set(WolfSSL_LIBRARIES ${WolfSSL_LIBRARY})
|
||||||
|
endif()
|
78
src/dependencies/curl-8.8.0/CMake/FindZstd.cmake
Normal file
78
src/dependencies/curl-8.8.0/CMake/FindZstd.cmake
Normal file
|
@ -0,0 +1,78 @@
|
||||||
|
#***************************************************************************
|
||||||
|
# _ _ ____ _
|
||||||
|
# Project ___| | | | _ \| |
|
||||||
|
# / __| | | | |_) | |
|
||||||
|
# | (__| |_| | _ <| |___
|
||||||
|
# \___|\___/|_| \_\_____|
|
||||||
|
#
|
||||||
|
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
#
|
||||||
|
# This software is licensed as described in the file COPYING, which
|
||||||
|
# you should have received as part of this distribution. The terms
|
||||||
|
# are also available at https://curl.se/docs/copyright.html.
|
||||||
|
#
|
||||||
|
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
|
||||||
|
# copies of the Software, and permit persons to whom the Software is
|
||||||
|
# furnished to do so, under the terms of the COPYING file.
|
||||||
|
#
|
||||||
|
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
|
||||||
|
# KIND, either express or implied.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: curl
|
||||||
|
#
|
||||||
|
###########################################################################
|
||||||
|
|
||||||
|
#[=======================================================================[.rst:
|
||||||
|
FindZstd
|
||||||
|
----------
|
||||||
|
|
||||||
|
Find the zstd library
|
||||||
|
|
||||||
|
Result Variables
|
||||||
|
^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
``Zstd_FOUND``
|
||||||
|
System has zstd
|
||||||
|
``Zstd_INCLUDE_DIRS``
|
||||||
|
The zstd include directories.
|
||||||
|
``Zstd_LIBRARIES``
|
||||||
|
The libraries needed to use zstd
|
||||||
|
#]=======================================================================]
|
||||||
|
|
||||||
|
if(UNIX)
|
||||||
|
find_package(PkgConfig QUIET)
|
||||||
|
pkg_search_module(PC_Zstd libzstd)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
find_path(Zstd_INCLUDE_DIR zstd.h
|
||||||
|
HINTS
|
||||||
|
${PC_Zstd_INCLUDEDIR}
|
||||||
|
${PC_Zstd_INCLUDE_DIRS}
|
||||||
|
)
|
||||||
|
|
||||||
|
find_library(Zstd_LIBRARY NAMES zstd
|
||||||
|
HINTS
|
||||||
|
${PC_Zstd_LIBDIR}
|
||||||
|
${PC_Zstd_LIBRARY_DIRS}
|
||||||
|
)
|
||||||
|
|
||||||
|
if(Zstd_INCLUDE_DIR)
|
||||||
|
file(READ "${Zstd_INCLUDE_DIR}/zstd.h" _zstd_header)
|
||||||
|
string(REGEX MATCH ".*define ZSTD_VERSION_MAJOR *([0-9]+).*define ZSTD_VERSION_MINOR *([0-9]+).*define ZSTD_VERSION_RELEASE *([0-9]+)" _zstd_ver "${_zstd_header}")
|
||||||
|
set(Zstd_VERSION "${CMAKE_MATCH_1}.${CMAKE_MATCH_2}.${CMAKE_MATCH_3}")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
include(FindPackageHandleStandardArgs)
|
||||||
|
find_package_handle_standard_args(Zstd
|
||||||
|
REQUIRED_VARS
|
||||||
|
Zstd_LIBRARY
|
||||||
|
Zstd_INCLUDE_DIR
|
||||||
|
VERSION_VAR Zstd_VERSION
|
||||||
|
)
|
||||||
|
|
||||||
|
if(Zstd_FOUND)
|
||||||
|
set(Zstd_LIBRARIES ${Zstd_LIBRARY})
|
||||||
|
set(Zstd_INCLUDE_DIRS ${Zstd_INCLUDE_DIR})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
mark_as_advanced(Zstd_INCLUDE_DIRS Zstd_LIBRARIES)
|
80
src/dependencies/curl-8.8.0/CMake/Macros.cmake
Normal file
80
src/dependencies/curl-8.8.0/CMake/Macros.cmake
Normal file
|
@ -0,0 +1,80 @@
|
||||||
|
#***************************************************************************
|
||||||
|
# _ _ ____ _
|
||||||
|
# Project ___| | | | _ \| |
|
||||||
|
# / __| | | | |_) | |
|
||||||
|
# | (__| |_| | _ <| |___
|
||||||
|
# \___|\___/|_| \_\_____|
|
||||||
|
#
|
||||||
|
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
#
|
||||||
|
# This software is licensed as described in the file COPYING, which
|
||||||
|
# you should have received as part of this distribution. The terms
|
||||||
|
# are also available at https://curl.se/docs/copyright.html.
|
||||||
|
#
|
||||||
|
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
|
||||||
|
# copies of the Software, and permit persons to whom the Software is
|
||||||
|
# furnished to do so, under the terms of the COPYING file.
|
||||||
|
#
|
||||||
|
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
|
||||||
|
# KIND, either express or implied.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: curl
|
||||||
|
#
|
||||||
|
###########################################################################
|
||||||
|
#File defines convenience macros for available feature testing
|
||||||
|
|
||||||
|
# Check if header file exists and add it to the list.
|
||||||
|
# This macro is intended to be called multiple times with a sequence of
|
||||||
|
# possibly dependent header files. Some headers depend on others to be
|
||||||
|
# compiled correctly.
|
||||||
|
macro(check_include_file_concat FILE VARIABLE)
|
||||||
|
check_include_files("${CURL_INCLUDES};${FILE}" ${VARIABLE})
|
||||||
|
if(${VARIABLE})
|
||||||
|
set(CURL_INCLUDES ${CURL_INCLUDES} ${FILE})
|
||||||
|
set(CURL_TEST_DEFINES "${CURL_TEST_DEFINES} -D${VARIABLE}")
|
||||||
|
endif()
|
||||||
|
endmacro()
|
||||||
|
|
||||||
|
# For other curl specific tests, use this macro.
|
||||||
|
macro(curl_internal_test CURL_TEST)
|
||||||
|
if(NOT DEFINED "${CURL_TEST}")
|
||||||
|
set(MACRO_CHECK_FUNCTION_DEFINITIONS
|
||||||
|
"-D${CURL_TEST} ${CURL_TEST_DEFINES} ${CMAKE_REQUIRED_FLAGS}")
|
||||||
|
if(CMAKE_REQUIRED_LIBRARIES)
|
||||||
|
set(CURL_TEST_ADD_LIBRARIES
|
||||||
|
"-DLINK_LIBRARIES:STRING=${CMAKE_REQUIRED_LIBRARIES}")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
message(STATUS "Performing Test ${CURL_TEST}")
|
||||||
|
try_compile(${CURL_TEST}
|
||||||
|
${CMAKE_BINARY_DIR}
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/CMake/CurlTests.c
|
||||||
|
CMAKE_FLAGS -DCOMPILE_DEFINITIONS:STRING=${MACRO_CHECK_FUNCTION_DEFINITIONS}
|
||||||
|
"${CURL_TEST_ADD_LIBRARIES}"
|
||||||
|
OUTPUT_VARIABLE OUTPUT)
|
||||||
|
if(${CURL_TEST})
|
||||||
|
set(${CURL_TEST} 1 CACHE INTERNAL "Curl test ${FUNCTION}")
|
||||||
|
message(STATUS "Performing Test ${CURL_TEST} - Success")
|
||||||
|
file(APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeOutput.log
|
||||||
|
"Performing Test ${CURL_TEST} passed with the following output:\n"
|
||||||
|
"${OUTPUT}\n")
|
||||||
|
else()
|
||||||
|
message(STATUS "Performing Test ${CURL_TEST} - Failed")
|
||||||
|
set(${CURL_TEST} "" CACHE INTERNAL "Curl test ${FUNCTION}")
|
||||||
|
file(APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeError.log
|
||||||
|
"Performing Test ${CURL_TEST} failed with the following output:\n"
|
||||||
|
"${OUTPUT}\n")
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
endmacro()
|
||||||
|
|
||||||
|
macro(optional_dependency DEPENDENCY)
|
||||||
|
set(CURL_${DEPENDENCY} AUTO CACHE STRING "Build curl with ${DEPENDENCY} support (AUTO, ON or OFF)")
|
||||||
|
set_property(CACHE CURL_${DEPENDENCY} PROPERTY STRINGS AUTO ON OFF)
|
||||||
|
|
||||||
|
if(CURL_${DEPENDENCY} STREQUAL AUTO)
|
||||||
|
find_package(${DEPENDENCY})
|
||||||
|
elseif(CURL_${DEPENDENCY})
|
||||||
|
find_package(${DEPENDENCY} REQUIRED)
|
||||||
|
endif()
|
||||||
|
endmacro()
|
184
src/dependencies/curl-8.8.0/CMake/OtherTests.cmake
Normal file
184
src/dependencies/curl-8.8.0/CMake/OtherTests.cmake
Normal file
|
@ -0,0 +1,184 @@
|
||||||
|
#***************************************************************************
|
||||||
|
# _ _ ____ _
|
||||||
|
# Project ___| | | | _ \| |
|
||||||
|
# / __| | | | |_) | |
|
||||||
|
# | (__| |_| | _ <| |___
|
||||||
|
# \___|\___/|_| \_\_____|
|
||||||
|
#
|
||||||
|
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
#
|
||||||
|
# This software is licensed as described in the file COPYING, which
|
||||||
|
# you should have received as part of this distribution. The terms
|
||||||
|
# are also available at https://curl.se/docs/copyright.html.
|
||||||
|
#
|
||||||
|
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
|
||||||
|
# copies of the Software, and permit persons to whom the Software is
|
||||||
|
# furnished to do so, under the terms of the COPYING file.
|
||||||
|
#
|
||||||
|
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
|
||||||
|
# KIND, either express or implied.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: curl
|
||||||
|
#
|
||||||
|
###########################################################################
|
||||||
|
include(CheckCSourceCompiles)
|
||||||
|
include(CheckCSourceRuns)
|
||||||
|
include(CheckTypeSize)
|
||||||
|
|
||||||
|
macro(add_header_include check header)
|
||||||
|
if(${check})
|
||||||
|
set(_source_epilogue "${_source_epilogue}
|
||||||
|
#include <${header}>")
|
||||||
|
endif()
|
||||||
|
endmacro()
|
||||||
|
|
||||||
|
set(CMAKE_TRY_COMPILE_TARGET_TYPE STATIC_LIBRARY)
|
||||||
|
|
||||||
|
if(NOT DEFINED HAVE_STRUCT_SOCKADDR_STORAGE)
|
||||||
|
set(CMAKE_EXTRA_INCLUDE_FILES)
|
||||||
|
if(WIN32)
|
||||||
|
set(CMAKE_EXTRA_INCLUDE_FILES "winsock2.h")
|
||||||
|
set(CMAKE_REQUIRED_DEFINITIONS "-DWIN32_LEAN_AND_MEAN")
|
||||||
|
set(CMAKE_REQUIRED_LIBRARIES "ws2_32")
|
||||||
|
elseif(HAVE_SYS_SOCKET_H)
|
||||||
|
set(CMAKE_EXTRA_INCLUDE_FILES "sys/socket.h")
|
||||||
|
endif()
|
||||||
|
check_type_size("struct sockaddr_storage" SIZEOF_STRUCT_SOCKADDR_STORAGE)
|
||||||
|
set(HAVE_STRUCT_SOCKADDR_STORAGE ${HAVE_SIZEOF_STRUCT_SOCKADDR_STORAGE})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(NOT WIN32)
|
||||||
|
set(_source_epilogue "#undef inline")
|
||||||
|
add_header_include(HAVE_SYS_TYPES_H "sys/types.h")
|
||||||
|
add_header_include(HAVE_SYS_SOCKET_H "sys/socket.h")
|
||||||
|
check_c_source_compiles("${_source_epilogue}
|
||||||
|
int main(void)
|
||||||
|
{
|
||||||
|
int flag = MSG_NOSIGNAL;
|
||||||
|
(void)flag;
|
||||||
|
return 0;
|
||||||
|
}" HAVE_MSG_NOSIGNAL)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
set(_source_epilogue "#undef inline")
|
||||||
|
add_header_include(HAVE_SYS_TIME_H "sys/time.h")
|
||||||
|
check_c_source_compiles("${_source_epilogue}
|
||||||
|
#include <time.h>
|
||||||
|
int main(void)
|
||||||
|
{
|
||||||
|
struct timeval ts;
|
||||||
|
ts.tv_sec = 0;
|
||||||
|
ts.tv_usec = 0;
|
||||||
|
(void)ts;
|
||||||
|
return 0;
|
||||||
|
}" HAVE_STRUCT_TIMEVAL)
|
||||||
|
|
||||||
|
unset(CMAKE_TRY_COMPILE_TARGET_TYPE)
|
||||||
|
|
||||||
|
if(NOT CMAKE_CROSSCOMPILING AND NOT APPLE)
|
||||||
|
set(_source_epilogue "#undef inline")
|
||||||
|
add_header_include(HAVE_SYS_POLL_H "sys/poll.h")
|
||||||
|
add_header_include(HAVE_POLL_H "poll.h")
|
||||||
|
check_c_source_runs("${_source_epilogue}
|
||||||
|
#include <stdlib.h>
|
||||||
|
#include <sys/time.h>
|
||||||
|
int main(void)
|
||||||
|
{
|
||||||
|
if(0 != poll(0, 0, 10)) {
|
||||||
|
return 1; /* fail */
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
/* detect the 10.12 poll() breakage */
|
||||||
|
struct timeval before, after;
|
||||||
|
int rc;
|
||||||
|
size_t us;
|
||||||
|
|
||||||
|
gettimeofday(&before, NULL);
|
||||||
|
rc = poll(NULL, 0, 500);
|
||||||
|
gettimeofday(&after, NULL);
|
||||||
|
|
||||||
|
us = (after.tv_sec - before.tv_sec) * 1000000 +
|
||||||
|
(after.tv_usec - before.tv_usec);
|
||||||
|
|
||||||
|
if(us < 400000) {
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return 0;
|
||||||
|
}" HAVE_POLL_FINE)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# Detect HAVE_GETADDRINFO_THREADSAFE
|
||||||
|
|
||||||
|
if(WIN32)
|
||||||
|
set(HAVE_GETADDRINFO_THREADSAFE ${HAVE_GETADDRINFO})
|
||||||
|
elseif(NOT HAVE_GETADDRINFO)
|
||||||
|
set(HAVE_GETADDRINFO_THREADSAFE FALSE)
|
||||||
|
elseif(APPLE OR
|
||||||
|
CMAKE_SYSTEM_NAME STREQUAL "AIX" OR
|
||||||
|
CMAKE_SYSTEM_NAME STREQUAL "FreeBSD" OR
|
||||||
|
CMAKE_SYSTEM_NAME STREQUAL "HP-UX" OR
|
||||||
|
CMAKE_SYSTEM_NAME STREQUAL "MidnightBSD" OR
|
||||||
|
CMAKE_SYSTEM_NAME STREQUAL "NetBSD" OR
|
||||||
|
CMAKE_SYSTEM_NAME STREQUAL "SunOS")
|
||||||
|
set(HAVE_GETADDRINFO_THREADSAFE TRUE)
|
||||||
|
elseif(CMAKE_SYSTEM_NAME MATCHES "BSD")
|
||||||
|
set(HAVE_GETADDRINFO_THREADSAFE FALSE)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(NOT DEFINED HAVE_GETADDRINFO_THREADSAFE)
|
||||||
|
set(_source_epilogue "#undef inline")
|
||||||
|
add_header_include(HAVE_SYS_SOCKET_H "sys/socket.h")
|
||||||
|
add_header_include(HAVE_SYS_TIME_H "sys/time.h")
|
||||||
|
add_header_include(HAVE_NETDB_H "netdb.h")
|
||||||
|
check_c_source_compiles("${_source_epilogue}
|
||||||
|
int main(void)
|
||||||
|
{
|
||||||
|
#ifdef h_errno
|
||||||
|
return 0;
|
||||||
|
#else
|
||||||
|
force compilation error
|
||||||
|
#endif
|
||||||
|
}" HAVE_H_ERRNO)
|
||||||
|
|
||||||
|
if(NOT HAVE_H_ERRNO)
|
||||||
|
check_c_source_compiles("${_source_epilogue}
|
||||||
|
int main(void)
|
||||||
|
{
|
||||||
|
h_errno = 2;
|
||||||
|
return h_errno != 0 ? 1 : 0;
|
||||||
|
}" HAVE_H_ERRNO_ASSIGNABLE)
|
||||||
|
|
||||||
|
if(NOT HAVE_H_ERRNO_ASSIGNABLE)
|
||||||
|
check_c_source_compiles("${_source_epilogue}
|
||||||
|
int main(void)
|
||||||
|
{
|
||||||
|
#if defined(_POSIX_C_SOURCE) && (_POSIX_C_SOURCE >= 200809L)
|
||||||
|
return 0;
|
||||||
|
#elif defined(_XOPEN_SOURCE) && (_XOPEN_SOURCE >= 700)
|
||||||
|
return 0;
|
||||||
|
#else
|
||||||
|
force compilation error
|
||||||
|
#endif
|
||||||
|
}" HAVE_H_ERRNO_SBS_ISSUE_7)
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(HAVE_H_ERRNO OR HAVE_H_ERRNO_ASSIGNABLE OR HAVE_H_ERRNO_SBS_ISSUE_7)
|
||||||
|
set(HAVE_GETADDRINFO_THREADSAFE TRUE)
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(NOT WIN32 AND NOT DEFINED HAVE_CLOCK_GETTIME_MONOTONIC_RAW)
|
||||||
|
set(_source_epilogue "#undef inline")
|
||||||
|
add_header_include(HAVE_SYS_TYPES_H "sys/types.h")
|
||||||
|
add_header_include(HAVE_SYS_TIME_H "sys/time.h")
|
||||||
|
check_c_source_compiles("${_source_epilogue}
|
||||||
|
#include <time.h>
|
||||||
|
int main(void)
|
||||||
|
{
|
||||||
|
struct timespec ts;
|
||||||
|
(void)clock_gettime(CLOCK_MONOTONIC_RAW, &ts);
|
||||||
|
return 0;
|
||||||
|
}" HAVE_CLOCK_GETTIME_MONOTONIC_RAW)
|
||||||
|
endif()
|
236
src/dependencies/curl-8.8.0/CMake/PickyWarnings.cmake
Normal file
236
src/dependencies/curl-8.8.0/CMake/PickyWarnings.cmake
Normal file
|
@ -0,0 +1,236 @@
|
||||||
|
#***************************************************************************
|
||||||
|
# _ _ ____ _
|
||||||
|
# Project ___| | | | _ \| |
|
||||||
|
# / __| | | | |_) | |
|
||||||
|
# | (__| |_| | _ <| |___
|
||||||
|
# \___|\___/|_| \_\_____|
|
||||||
|
#
|
||||||
|
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
#
|
||||||
|
# This software is licensed as described in the file COPYING, which
|
||||||
|
# you should have received as part of this distribution. The terms
|
||||||
|
# are also available at https://curl.se/docs/copyright.html.
|
||||||
|
#
|
||||||
|
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
|
||||||
|
# copies of the Software, and permit persons to whom the Software is
|
||||||
|
# furnished to do so, under the terms of the COPYING file.
|
||||||
|
#
|
||||||
|
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
|
||||||
|
# KIND, either express or implied.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: curl
|
||||||
|
#
|
||||||
|
###########################################################################
|
||||||
|
include(CheckCCompilerFlag)
|
||||||
|
|
||||||
|
unset(WPICKY)
|
||||||
|
|
||||||
|
if(CURL_WERROR AND
|
||||||
|
((CMAKE_COMPILER_IS_GNUCC AND
|
||||||
|
NOT CMAKE_C_COMPILER_VERSION VERSION_LESS 5.0 AND
|
||||||
|
NOT CMAKE_VERSION VERSION_LESS 3.23.0) OR # check_symbol_exists() incompatible with GCC -pedantic-errors in earlier CMake versions
|
||||||
|
CMAKE_C_COMPILER_ID MATCHES "Clang"))
|
||||||
|
set(WPICKY "${WPICKY} -pedantic-errors")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(PICKY_COMPILER)
|
||||||
|
if(CMAKE_COMPILER_IS_GNUCC OR CMAKE_C_COMPILER_ID MATCHES "Clang")
|
||||||
|
|
||||||
|
# https://clang.llvm.org/docs/DiagnosticsReference.html
|
||||||
|
# https://gcc.gnu.org/onlinedocs/gcc/Warning-Options.html
|
||||||
|
|
||||||
|
# WPICKY_ENABLE = Options we want to enable as-is.
|
||||||
|
# WPICKY_DETECT = Options we want to test first and enable if available.
|
||||||
|
|
||||||
|
# Prefer the -Wextra alias with clang.
|
||||||
|
if(CMAKE_C_COMPILER_ID MATCHES "Clang")
|
||||||
|
set(WPICKY_ENABLE "-Wextra")
|
||||||
|
else()
|
||||||
|
set(WPICKY_ENABLE "-W")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
list(APPEND WPICKY_ENABLE
|
||||||
|
-Wall -pedantic
|
||||||
|
)
|
||||||
|
|
||||||
|
# ----------------------------------
|
||||||
|
# Add new options here, if in doubt:
|
||||||
|
# ----------------------------------
|
||||||
|
set(WPICKY_DETECT
|
||||||
|
)
|
||||||
|
|
||||||
|
# Assume these options always exist with both clang and gcc.
|
||||||
|
# Require clang 3.0 / gcc 2.95 or later.
|
||||||
|
list(APPEND WPICKY_ENABLE
|
||||||
|
-Wbad-function-cast # clang 2.7 gcc 2.95
|
||||||
|
-Wconversion # clang 2.7 gcc 2.95
|
||||||
|
-Winline # clang 1.0 gcc 1.0
|
||||||
|
-Wmissing-declarations # clang 1.0 gcc 2.7
|
||||||
|
-Wmissing-prototypes # clang 1.0 gcc 1.0
|
||||||
|
-Wnested-externs # clang 1.0 gcc 2.7
|
||||||
|
-Wno-long-long # clang 1.0 gcc 2.95
|
||||||
|
-Wno-multichar # clang 1.0 gcc 2.95
|
||||||
|
-Wpointer-arith # clang 1.0 gcc 1.4
|
||||||
|
-Wshadow # clang 1.0 gcc 2.95
|
||||||
|
-Wsign-compare # clang 1.0 gcc 2.95
|
||||||
|
-Wundef # clang 1.0 gcc 2.95
|
||||||
|
-Wunused # clang 1.1 gcc 2.95
|
||||||
|
-Wwrite-strings # clang 1.0 gcc 1.4
|
||||||
|
)
|
||||||
|
|
||||||
|
# Always enable with clang, version dependent with gcc
|
||||||
|
set(WPICKY_COMMON_OLD
|
||||||
|
-Waddress # clang 2.7 gcc 4.3
|
||||||
|
-Wattributes # clang 2.7 gcc 4.1
|
||||||
|
-Wcast-align # clang 1.0 gcc 4.2
|
||||||
|
-Wdeclaration-after-statement # clang 1.0 gcc 3.4
|
||||||
|
-Wdiv-by-zero # clang 2.7 gcc 4.1
|
||||||
|
-Wempty-body # clang 2.7 gcc 4.3
|
||||||
|
-Wendif-labels # clang 1.0 gcc 3.3
|
||||||
|
-Wfloat-equal # clang 1.0 gcc 2.96 (3.0)
|
||||||
|
-Wformat-security # clang 2.7 gcc 4.1
|
||||||
|
-Wignored-qualifiers # clang 2.8 gcc 4.3
|
||||||
|
-Wmissing-field-initializers # clang 2.7 gcc 4.1
|
||||||
|
-Wmissing-noreturn # clang 2.7 gcc 4.1
|
||||||
|
-Wno-format-nonliteral # clang 1.0 gcc 2.96 (3.0)
|
||||||
|
-Wno-system-headers # clang 1.0 gcc 3.0
|
||||||
|
# -Wpadded # clang 2.9 gcc 4.1 # Not used because we cannot change public structs
|
||||||
|
-Wold-style-definition # clang 2.7 gcc 3.4
|
||||||
|
-Wredundant-decls # clang 2.7 gcc 4.1
|
||||||
|
-Wsign-conversion # clang 2.9 gcc 4.3
|
||||||
|
-Wno-error=sign-conversion # FIXME
|
||||||
|
-Wstrict-prototypes # clang 1.0 gcc 3.3
|
||||||
|
# -Wswitch-enum # clang 2.7 gcc 4.1 # Not used because this basically disallows default case
|
||||||
|
-Wtype-limits # clang 2.7 gcc 4.3
|
||||||
|
-Wunreachable-code # clang 2.7 gcc 4.1
|
||||||
|
# -Wunused-macros # clang 2.7 gcc 4.1 # Not practical
|
||||||
|
-Wunused-parameter # clang 2.7 gcc 4.1
|
||||||
|
-Wvla # clang 2.8 gcc 4.3
|
||||||
|
)
|
||||||
|
|
||||||
|
set(WPICKY_COMMON
|
||||||
|
-Wdouble-promotion # clang 3.6 gcc 4.6 appleclang 6.3
|
||||||
|
-Wenum-conversion # clang 3.2 gcc 10.0 appleclang 4.6 g++ 11.0
|
||||||
|
-Wpragmas # clang 3.5 gcc 4.1 appleclang 6.0
|
||||||
|
-Wunused-const-variable # clang 3.4 gcc 6.0 appleclang 5.1
|
||||||
|
)
|
||||||
|
|
||||||
|
if(CMAKE_C_COMPILER_ID MATCHES "Clang")
|
||||||
|
list(APPEND WPICKY_ENABLE
|
||||||
|
${WPICKY_COMMON_OLD}
|
||||||
|
-Wshift-sign-overflow # clang 2.9
|
||||||
|
-Wshorten-64-to-32 # clang 1.0
|
||||||
|
-Wlanguage-extension-token # clang 3.0
|
||||||
|
-Wformat=2 # clang 3.0 gcc 4.8
|
||||||
|
)
|
||||||
|
# Enable based on compiler version
|
||||||
|
if((CMAKE_C_COMPILER_ID STREQUAL "Clang" AND NOT CMAKE_C_COMPILER_VERSION VERSION_LESS 3.6) OR
|
||||||
|
(CMAKE_C_COMPILER_ID STREQUAL "AppleClang" AND NOT CMAKE_C_COMPILER_VERSION VERSION_LESS 6.3))
|
||||||
|
list(APPEND WPICKY_ENABLE
|
||||||
|
${WPICKY_COMMON}
|
||||||
|
-Wunreachable-code-break # clang 3.5 appleclang 6.0
|
||||||
|
-Wheader-guard # clang 3.4 appleclang 5.1
|
||||||
|
-Wsometimes-uninitialized # clang 3.2 appleclang 4.6
|
||||||
|
)
|
||||||
|
endif()
|
||||||
|
if((CMAKE_C_COMPILER_ID STREQUAL "Clang" AND NOT CMAKE_C_COMPILER_VERSION VERSION_LESS 3.9) OR
|
||||||
|
(CMAKE_C_COMPILER_ID STREQUAL "AppleClang" AND NOT CMAKE_C_COMPILER_VERSION VERSION_LESS 8.3))
|
||||||
|
list(APPEND WPICKY_ENABLE
|
||||||
|
-Wcomma # clang 3.9 appleclang 8.3
|
||||||
|
-Wmissing-variable-declarations # clang 3.2 appleclang 4.6
|
||||||
|
)
|
||||||
|
endif()
|
||||||
|
if((CMAKE_C_COMPILER_ID STREQUAL "Clang" AND NOT CMAKE_C_COMPILER_VERSION VERSION_LESS 7.0) OR
|
||||||
|
(CMAKE_C_COMPILER_ID STREQUAL "AppleClang" AND NOT CMAKE_C_COMPILER_VERSION VERSION_LESS 10.3))
|
||||||
|
list(APPEND WPICKY_ENABLE
|
||||||
|
-Wassign-enum # clang 7.0 appleclang 10.3
|
||||||
|
-Wextra-semi-stmt # clang 7.0 appleclang 10.3
|
||||||
|
)
|
||||||
|
endif()
|
||||||
|
if((CMAKE_C_COMPILER_ID STREQUAL "Clang" AND NOT CMAKE_C_COMPILER_VERSION VERSION_LESS 10.0) OR
|
||||||
|
(CMAKE_C_COMPILER_ID STREQUAL "AppleClang" AND NOT CMAKE_C_COMPILER_VERSION VERSION_LESS 12.4))
|
||||||
|
list(APPEND WPICKY_ENABLE
|
||||||
|
-Wimplicit-fallthrough # clang 4.0 gcc 7.0 appleclang 12.4 # we have silencing markup for clang 10.0 and above only
|
||||||
|
)
|
||||||
|
endif()
|
||||||
|
else() # gcc
|
||||||
|
list(APPEND WPICKY_DETECT
|
||||||
|
${WPICKY_COMMON}
|
||||||
|
)
|
||||||
|
# Enable based on compiler version
|
||||||
|
if(NOT CMAKE_C_COMPILER_VERSION VERSION_LESS 4.3)
|
||||||
|
list(APPEND WPICKY_ENABLE
|
||||||
|
${WPICKY_COMMON_OLD}
|
||||||
|
-Wclobbered # gcc 4.3
|
||||||
|
-Wmissing-parameter-type # gcc 4.3
|
||||||
|
-Wold-style-declaration # gcc 4.3
|
||||||
|
-Wstrict-aliasing=3 # gcc 4.0
|
||||||
|
-Wtrampolines # gcc 4.3
|
||||||
|
)
|
||||||
|
endif()
|
||||||
|
if(NOT CMAKE_C_COMPILER_VERSION VERSION_LESS 4.5 AND MINGW)
|
||||||
|
list(APPEND WPICKY_ENABLE
|
||||||
|
-Wno-pedantic-ms-format # gcc 4.5 (mingw-only)
|
||||||
|
)
|
||||||
|
endif()
|
||||||
|
if(NOT CMAKE_C_COMPILER_VERSION VERSION_LESS 4.8)
|
||||||
|
list(APPEND WPICKY_ENABLE
|
||||||
|
-Wformat=2 # clang 3.0 gcc 4.8
|
||||||
|
)
|
||||||
|
endif()
|
||||||
|
if(NOT CMAKE_C_COMPILER_VERSION VERSION_LESS 5.0)
|
||||||
|
list(APPEND WPICKY_ENABLE
|
||||||
|
-Warray-bounds=2 -ftree-vrp # clang 3.0 gcc 5.0 (clang default: -Warray-bounds)
|
||||||
|
)
|
||||||
|
endif()
|
||||||
|
if(NOT CMAKE_C_COMPILER_VERSION VERSION_LESS 6.0)
|
||||||
|
list(APPEND WPICKY_ENABLE
|
||||||
|
-Wduplicated-cond # gcc 6.0
|
||||||
|
-Wnull-dereference # clang 3.0 gcc 6.0 (clang default)
|
||||||
|
-fdelete-null-pointer-checks
|
||||||
|
-Wshift-negative-value # clang 3.7 gcc 6.0 (clang default)
|
||||||
|
-Wshift-overflow=2 # clang 3.0 gcc 6.0 (clang default: -Wshift-overflow)
|
||||||
|
)
|
||||||
|
endif()
|
||||||
|
if(NOT CMAKE_C_COMPILER_VERSION VERSION_LESS 7.0)
|
||||||
|
list(APPEND WPICKY_ENABLE
|
||||||
|
-Walloc-zero # gcc 7.0
|
||||||
|
-Wduplicated-branches # gcc 7.0
|
||||||
|
-Wformat-overflow=2 # gcc 7.0
|
||||||
|
-Wformat-truncation=2 # gcc 7.0
|
||||||
|
-Wimplicit-fallthrough # clang 4.0 gcc 7.0
|
||||||
|
-Wrestrict # gcc 7.0
|
||||||
|
)
|
||||||
|
endif()
|
||||||
|
if(NOT CMAKE_C_COMPILER_VERSION VERSION_LESS 10.0)
|
||||||
|
list(APPEND WPICKY_ENABLE
|
||||||
|
-Warith-conversion # gcc 10.0
|
||||||
|
)
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
#
|
||||||
|
|
||||||
|
foreach(_CCOPT IN LISTS WPICKY_ENABLE)
|
||||||
|
set(WPICKY "${WPICKY} ${_CCOPT}")
|
||||||
|
endforeach()
|
||||||
|
|
||||||
|
foreach(_CCOPT IN LISTS WPICKY_DETECT)
|
||||||
|
# surprisingly, CHECK_C_COMPILER_FLAG needs a new variable to store each new
|
||||||
|
# test result in.
|
||||||
|
string(MAKE_C_IDENTIFIER "OPT${_CCOPT}" _optvarname)
|
||||||
|
# GCC only warns about unknown -Wno- options if there are also other diagnostic messages,
|
||||||
|
# so test for the positive form instead
|
||||||
|
string(REPLACE "-Wno-" "-W" _CCOPT_ON "${_CCOPT}")
|
||||||
|
check_c_compiler_flag(${_CCOPT_ON} ${_optvarname})
|
||||||
|
if(${_optvarname})
|
||||||
|
set(WPICKY "${WPICKY} ${_CCOPT}")
|
||||||
|
endif()
|
||||||
|
endforeach()
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(WPICKY)
|
||||||
|
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${WPICKY}")
|
||||||
|
message(STATUS "Picky compiler options:${WPICKY}")
|
||||||
|
endif()
|
191
src/dependencies/curl-8.8.0/CMake/Platforms/WindowsCache.cmake
Normal file
191
src/dependencies/curl-8.8.0/CMake/Platforms/WindowsCache.cmake
Normal file
|
@ -0,0 +1,191 @@
|
||||||
|
#***************************************************************************
|
||||||
|
# _ _ ____ _
|
||||||
|
# Project ___| | | | _ \| |
|
||||||
|
# / __| | | | |_) | |
|
||||||
|
# | (__| |_| | _ <| |___
|
||||||
|
# \___|\___/|_| \_\_____|
|
||||||
|
#
|
||||||
|
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
#
|
||||||
|
# This software is licensed as described in the file COPYING, which
|
||||||
|
# you should have received as part of this distribution. The terms
|
||||||
|
# are also available at https://curl.se/docs/copyright.html.
|
||||||
|
#
|
||||||
|
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
|
||||||
|
# copies of the Software, and permit persons to whom the Software is
|
||||||
|
# furnished to do so, under the terms of the COPYING file.
|
||||||
|
#
|
||||||
|
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
|
||||||
|
# KIND, either express or implied.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: curl
|
||||||
|
#
|
||||||
|
###########################################################################
|
||||||
|
if(NOT WIN32)
|
||||||
|
message(FATAL_ERROR "This file should be included on Windows platform only")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
set(HAVE_LOCALE_H 1)
|
||||||
|
|
||||||
|
if(MINGW)
|
||||||
|
set(HAVE_SNPRINTF 1)
|
||||||
|
set(HAVE_UNISTD_H 1)
|
||||||
|
set(HAVE_LIBGEN_H 1)
|
||||||
|
set(HAVE_STDDEF_H 1) # detected by CMake internally in check_type_size()
|
||||||
|
set(HAVE_STDBOOL_H 1)
|
||||||
|
set(HAVE_BOOL_T "${HAVE_STDBOOL_H}")
|
||||||
|
set(HAVE_STRTOLL 1)
|
||||||
|
set(HAVE_BASENAME 1)
|
||||||
|
set(HAVE_STRCASECMP 1)
|
||||||
|
set(HAVE_FTRUNCATE 1)
|
||||||
|
set(HAVE_SYS_PARAM_H 1)
|
||||||
|
set(HAVE_SYS_TIME_H 1)
|
||||||
|
set(HAVE_GETTIMEOFDAY 1)
|
||||||
|
else()
|
||||||
|
set(HAVE_LIBGEN_H 0)
|
||||||
|
set(HAVE_STRCASECMP 0)
|
||||||
|
set(HAVE_FTRUNCATE 0)
|
||||||
|
set(HAVE_SYS_PARAM_H 0)
|
||||||
|
set(HAVE_SYS_TIME_H 0)
|
||||||
|
set(HAVE_GETTIMEOFDAY 0)
|
||||||
|
if(MSVC)
|
||||||
|
set(HAVE_UNISTD_H 0)
|
||||||
|
set(HAVE_LOCALE_H 1)
|
||||||
|
set(HAVE_STDDEF_H 1) # detected by CMake internally in check_type_size()
|
||||||
|
set(HAVE_STDATOMIC_H 0)
|
||||||
|
if(NOT MSVC_VERSION LESS 1800)
|
||||||
|
set(HAVE_STDBOOL_H 1)
|
||||||
|
set(HAVE_STRTOLL 1)
|
||||||
|
else()
|
||||||
|
set(HAVE_STDBOOL_H 0)
|
||||||
|
set(HAVE_STRTOLL 0)
|
||||||
|
endif()
|
||||||
|
set(HAVE_BOOL_T "${HAVE_STDBOOL_H}")
|
||||||
|
if(NOT MSVC_VERSION LESS 1900)
|
||||||
|
set(HAVE_SNPRINTF 1)
|
||||||
|
else()
|
||||||
|
set(HAVE_SNPRINTF 0)
|
||||||
|
endif()
|
||||||
|
set(HAVE_BASENAME 0)
|
||||||
|
set(HAVE_STRTOK_R 0)
|
||||||
|
set(HAVE_FILE_OFFSET_BITS 0)
|
||||||
|
set(HAVE_ATOMIC 0)
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# Available in Windows XP and newer
|
||||||
|
set(HAVE_GETADDRINFO 1)
|
||||||
|
set(HAVE_FREEADDRINFO 1)
|
||||||
|
|
||||||
|
set(HAVE_FCHMOD 0)
|
||||||
|
set(HAVE_SOCKETPAIR 0)
|
||||||
|
set(HAVE_SENDMSG 0)
|
||||||
|
set(HAVE_ALARM 0)
|
||||||
|
set(HAVE_FCNTL 0)
|
||||||
|
set(HAVE_GETPPID 0)
|
||||||
|
set(HAVE_UTIMES 0)
|
||||||
|
set(HAVE_GETPWUID_R 0)
|
||||||
|
set(HAVE_STRERROR_R 0)
|
||||||
|
set(HAVE_SIGINTERRUPT 0)
|
||||||
|
set(HAVE_PIPE 0)
|
||||||
|
set(HAVE_IF_NAMETOINDEX 0)
|
||||||
|
set(HAVE_GETRLIMIT 0)
|
||||||
|
set(HAVE_SETRLIMIT 0)
|
||||||
|
set(HAVE_FSETXATTR 0)
|
||||||
|
set(HAVE_LIBSOCKET 0)
|
||||||
|
set(HAVE_SETLOCALE 1)
|
||||||
|
set(HAVE_SETMODE 1)
|
||||||
|
set(HAVE_GETPEERNAME 1)
|
||||||
|
set(HAVE_GETSOCKNAME 1)
|
||||||
|
set(HAVE_GETHOSTNAME 1)
|
||||||
|
set(HAVE_LIBZ 0)
|
||||||
|
|
||||||
|
set(HAVE_RECV 1)
|
||||||
|
set(HAVE_SEND 1)
|
||||||
|
set(HAVE_STROPTS_H 0)
|
||||||
|
set(HAVE_SYS_XATTR_H 0)
|
||||||
|
set(HAVE_ARC4RANDOM 0)
|
||||||
|
set(HAVE_FNMATCH 0)
|
||||||
|
set(HAVE_SCHED_YIELD 0)
|
||||||
|
set(HAVE_ARPA_INET_H 0)
|
||||||
|
set(HAVE_FCNTL_H 1)
|
||||||
|
set(HAVE_IFADDRS_H 0)
|
||||||
|
set(HAVE_IO_H 1)
|
||||||
|
set(HAVE_NETDB_H 0)
|
||||||
|
set(HAVE_NETINET_IN_H 0)
|
||||||
|
set(HAVE_NETINET_TCP_H 0)
|
||||||
|
set(HAVE_NETINET_UDP_H 0)
|
||||||
|
set(HAVE_NET_IF_H 0)
|
||||||
|
set(HAVE_IOCTL_SIOCGIFADDR 0)
|
||||||
|
set(HAVE_POLL_H 0)
|
||||||
|
set(HAVE_POLL_FINE 0)
|
||||||
|
set(HAVE_PWD_H 0)
|
||||||
|
set(HAVE_STRINGS_H 0) # mingw-w64 has it (wrapper to string.h)
|
||||||
|
set(HAVE_SYS_FILIO_H 0)
|
||||||
|
set(HAVE_SYS_WAIT_H 0)
|
||||||
|
set(HAVE_SYS_IOCTL_H 0)
|
||||||
|
set(HAVE_SYS_POLL_H 0)
|
||||||
|
set(HAVE_SYS_RESOURCE_H 0)
|
||||||
|
set(HAVE_SYS_SELECT_H 0)
|
||||||
|
set(HAVE_SYS_SOCKET_H 0)
|
||||||
|
set(HAVE_SYS_SOCKIO_H 0)
|
||||||
|
set(HAVE_SYS_STAT_H 1)
|
||||||
|
set(HAVE_SYS_TYPES_H 1)
|
||||||
|
set(HAVE_SYS_UN_H 0)
|
||||||
|
set(HAVE_SYS_UTIME_H 1)
|
||||||
|
set(HAVE_TERMIOS_H 0)
|
||||||
|
set(HAVE_TERMIO_H 0)
|
||||||
|
set(HAVE_UTIME_H 0) # mingw-w64 has it (wrapper to sys/utime.h)
|
||||||
|
|
||||||
|
set(HAVE_DIRENT_H 0)
|
||||||
|
set(HAVE_OPENDIR 0)
|
||||||
|
|
||||||
|
set(HAVE_FSEEKO 0)
|
||||||
|
set(HAVE__FSEEKI64 1)
|
||||||
|
set(HAVE_SOCKET 1)
|
||||||
|
set(HAVE_SELECT 1)
|
||||||
|
set(HAVE_STRDUP 1)
|
||||||
|
set(HAVE_STRICMP 1)
|
||||||
|
set(HAVE_STRCMPI 1)
|
||||||
|
set(HAVE_MEMRCHR 0)
|
||||||
|
set(HAVE_CLOSESOCKET 1)
|
||||||
|
set(HAVE_SIGSETJMP 0)
|
||||||
|
set(HAVE_SOCKADDR_IN6_SIN6_SCOPE_ID 1)
|
||||||
|
set(HAVE_GETPASS_R 0)
|
||||||
|
set(HAVE_GETPWUID 0)
|
||||||
|
set(HAVE_GETEUID 0)
|
||||||
|
set(HAVE_UTIME 1)
|
||||||
|
set(HAVE_GMTIME_R 0)
|
||||||
|
set(HAVE_GETHOSTBYNAME_R 0)
|
||||||
|
set(HAVE_SIGNAL 1)
|
||||||
|
set(HAVE_SIGACTION 0)
|
||||||
|
set(HAVE_LINUX_TCP_H 0)
|
||||||
|
set(HAVE_GLIBC_STRERROR_R 0)
|
||||||
|
set(HAVE_MACH_ABSOLUTE_TIME 0)
|
||||||
|
set(HAVE_GETIFADDRS 0)
|
||||||
|
set(HAVE_FCNTL_O_NONBLOCK 0)
|
||||||
|
set(HAVE_IOCTLSOCKET 1)
|
||||||
|
set(HAVE_IOCTLSOCKET_CAMEL 0)
|
||||||
|
set(HAVE_IOCTLSOCKET_CAMEL_FIONBIO 0)
|
||||||
|
set(HAVE_IOCTLSOCKET_FIONBIO 1)
|
||||||
|
set(HAVE_IOCTL_FIONBIO 0)
|
||||||
|
set(HAVE_SETSOCKOPT_SO_NONBLOCK 0)
|
||||||
|
set(HAVE_POSIX_STRERROR_R 0)
|
||||||
|
set(HAVE_BUILTIN_AVAILABLE 0)
|
||||||
|
set(HAVE_MSG_NOSIGNAL 0)
|
||||||
|
set(HAVE_STRUCT_TIMEVAL 1)
|
||||||
|
set(HAVE_STRUCT_SOCKADDR_STORAGE 1)
|
||||||
|
|
||||||
|
set(HAVE_GETHOSTBYNAME_R_3 0)
|
||||||
|
set(HAVE_GETHOSTBYNAME_R_3_REENTRANT 0)
|
||||||
|
set(HAVE_GETHOSTBYNAME_R_5 0)
|
||||||
|
set(HAVE_GETHOSTBYNAME_R_5_REENTRANT 0)
|
||||||
|
set(HAVE_GETHOSTBYNAME_R_6 0)
|
||||||
|
set(HAVE_GETHOSTBYNAME_R_6_REENTRANT 0)
|
||||||
|
|
||||||
|
set(HAVE_O_NONBLOCK 0)
|
||||||
|
set(HAVE_IN_ADDR_T 0)
|
||||||
|
set(STDC_HEADERS 1)
|
||||||
|
|
||||||
|
set(HAVE_SIZEOF_SUSECONDS_T 0)
|
||||||
|
set(HAVE_SIZEOF_SA_FAMILY_T 0)
|
35
src/dependencies/curl-8.8.0/CMake/Utilities.cmake
Normal file
35
src/dependencies/curl-8.8.0/CMake/Utilities.cmake
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
#***************************************************************************
|
||||||
|
# _ _ ____ _
|
||||||
|
# Project ___| | | | _ \| |
|
||||||
|
# / __| | | | |_) | |
|
||||||
|
# | (__| |_| | _ <| |___
|
||||||
|
# \___|\___/|_| \_\_____|
|
||||||
|
#
|
||||||
|
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
#
|
||||||
|
# This software is licensed as described in the file COPYING, which
|
||||||
|
# you should have received as part of this distribution. The terms
|
||||||
|
# are also available at https://curl.se/docs/copyright.html.
|
||||||
|
#
|
||||||
|
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
|
||||||
|
# copies of the Software, and permit persons to whom the Software is
|
||||||
|
# furnished to do so, under the terms of the COPYING file.
|
||||||
|
#
|
||||||
|
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
|
||||||
|
# KIND, either express or implied.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: curl
|
||||||
|
#
|
||||||
|
###########################################################################
|
||||||
|
# File containing various utilities
|
||||||
|
|
||||||
|
# Returns number of arguments that evaluate to true
|
||||||
|
function(count_true output_count_var)
|
||||||
|
set(lst_len 0)
|
||||||
|
foreach(option_var IN LISTS ARGN)
|
||||||
|
if(${option_var})
|
||||||
|
math(EXPR lst_len "${lst_len} + 1")
|
||||||
|
endif()
|
||||||
|
endforeach()
|
||||||
|
set(${output_count_var} ${lst_len} PARENT_SCOPE)
|
||||||
|
endfunction()
|
49
src/dependencies/curl-8.8.0/CMake/cmake_uninstall.cmake.in
Normal file
49
src/dependencies/curl-8.8.0/CMake/cmake_uninstall.cmake.in
Normal file
|
@ -0,0 +1,49 @@
|
||||||
|
#***************************************************************************
|
||||||
|
# _ _ ____ _
|
||||||
|
# Project ___| | | | _ \| |
|
||||||
|
# / __| | | | |_) | |
|
||||||
|
# | (__| |_| | _ <| |___
|
||||||
|
# \___|\___/|_| \_\_____|
|
||||||
|
#
|
||||||
|
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
#
|
||||||
|
# This software is licensed as described in the file COPYING, which
|
||||||
|
# you should have received as part of this distribution. The terms
|
||||||
|
# are also available at https://curl.se/docs/copyright.html.
|
||||||
|
#
|
||||||
|
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
|
||||||
|
# copies of the Software, and permit persons to whom the Software is
|
||||||
|
# furnished to do so, under the terms of the COPYING file.
|
||||||
|
#
|
||||||
|
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
|
||||||
|
# KIND, either express or implied.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: curl
|
||||||
|
#
|
||||||
|
###########################################################################
|
||||||
|
if(NOT EXISTS "@CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt")
|
||||||
|
message(FATAL_ERROR "Cannot find install manifest: @CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(NOT DEFINED CMAKE_INSTALL_PREFIX)
|
||||||
|
set(CMAKE_INSTALL_PREFIX "@CMAKE_INSTALL_PREFIX@")
|
||||||
|
endif()
|
||||||
|
message(${CMAKE_INSTALL_PREFIX})
|
||||||
|
|
||||||
|
file(READ "@CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt" files)
|
||||||
|
string(REGEX REPLACE "\n" ";" files "${files}")
|
||||||
|
foreach(file ${files})
|
||||||
|
message(STATUS "Uninstalling $ENV{DESTDIR}${file}")
|
||||||
|
if(IS_SYMLINK "$ENV{DESTDIR}${file}" OR EXISTS "$ENV{DESTDIR}${file}")
|
||||||
|
exec_program(
|
||||||
|
"@CMAKE_COMMAND@" ARGS "-E remove \"$ENV{DESTDIR}${file}\""
|
||||||
|
OUTPUT_VARIABLE rm_out
|
||||||
|
RETURN_VALUE rm_retval
|
||||||
|
)
|
||||||
|
if(NOT "${rm_retval}" STREQUAL 0)
|
||||||
|
message(FATAL_ERROR "Problem when removing $ENV{DESTDIR}${file}")
|
||||||
|
endif()
|
||||||
|
else()
|
||||||
|
message(STATUS "File $ENV{DESTDIR}${file} does not exist.")
|
||||||
|
endif()
|
||||||
|
endforeach()
|
40
src/dependencies/curl-8.8.0/CMake/curl-config.cmake.in
Normal file
40
src/dependencies/curl-8.8.0/CMake/curl-config.cmake.in
Normal file
|
@ -0,0 +1,40 @@
|
||||||
|
#***************************************************************************
|
||||||
|
# _ _ ____ _
|
||||||
|
# Project ___| | | | _ \| |
|
||||||
|
# / __| | | | |_) | |
|
||||||
|
# | (__| |_| | _ <| |___
|
||||||
|
# \___|\___/|_| \_\_____|
|
||||||
|
#
|
||||||
|
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
#
|
||||||
|
# This software is licensed as described in the file COPYING, which
|
||||||
|
# you should have received as part of this distribution. The terms
|
||||||
|
# are also available at https://curl.se/docs/copyright.html.
|
||||||
|
#
|
||||||
|
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
|
||||||
|
# copies of the Software, and permit persons to whom the Software is
|
||||||
|
# furnished to do so, under the terms of the COPYING file.
|
||||||
|
#
|
||||||
|
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
|
||||||
|
# KIND, either express or implied.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: curl
|
||||||
|
#
|
||||||
|
###########################################################################
|
||||||
|
@PACKAGE_INIT@
|
||||||
|
|
||||||
|
include(CMakeFindDependencyMacro)
|
||||||
|
if(@USE_OPENSSL@)
|
||||||
|
find_dependency(OpenSSL @OPENSSL_VERSION_MAJOR@)
|
||||||
|
endif()
|
||||||
|
if(@USE_ZLIB@)
|
||||||
|
find_dependency(ZLIB @ZLIB_VERSION_MAJOR@)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
include("${CMAKE_CURRENT_LIST_DIR}/@TARGETS_EXPORT_NAME@.cmake")
|
||||||
|
check_required_components("@PROJECT_NAME@")
|
||||||
|
|
||||||
|
# Alias for either shared or static library
|
||||||
|
if(NOT TARGET @PROJECT_NAME@::libcurl)
|
||||||
|
add_library(@PROJECT_NAME@::libcurl ALIAS @PROJECT_NAME@::@LIB_SELECTED@)
|
||||||
|
endif()
|
1889
src/dependencies/curl-8.8.0/CMakeLists.txt
Normal file
1889
src/dependencies/curl-8.8.0/CMakeLists.txt
Normal file
File diff suppressed because it is too large
Load diff
22
src/dependencies/curl-8.8.0/COPYING
Normal file
22
src/dependencies/curl-8.8.0/COPYING
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
COPYRIGHT AND PERMISSION NOTICE
|
||||||
|
|
||||||
|
Copyright (c) 1996 - 2024, Daniel Stenberg, <daniel@haxx.se>, and many
|
||||||
|
contributors, see the THANKS file.
|
||||||
|
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Permission to use, copy, modify, and distribute this software for any purpose
|
||||||
|
with or without fee is hereby granted, provided that the above copyright
|
||||||
|
notice and this permission notice appear in all copies.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF THIRD PARTY RIGHTS. IN
|
||||||
|
NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||||
|
DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||||
|
OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
|
||||||
|
OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
|
|
||||||
|
Except as contained in this notice, the name of a copyright holder shall not
|
||||||
|
be used in advertising or otherwise to promote the sale, use or other dealings
|
||||||
|
in this Software without prior written authorization of the copyright holder.
|
41
src/dependencies/curl-8.8.0/Dockerfile
Normal file
41
src/dependencies/curl-8.8.0/Dockerfile
Normal file
|
@ -0,0 +1,41 @@
|
||||||
|
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: curl
|
||||||
|
|
||||||
|
# Self-contained build environment to match the release environment.
|
||||||
|
#
|
||||||
|
# Build and set the timestamp for the date corresponding to the release
|
||||||
|
#
|
||||||
|
# docker build --build-arg SOURCE_DATE_EPOCH=1711526400 --build-arg UID=$(id -u) --build-arg GID=$(id -g) -t curl/curl .
|
||||||
|
#
|
||||||
|
# Then run commands from within the build environment, for example
|
||||||
|
#
|
||||||
|
# docker run --rm -it -u $(id -u):$(id -g) -v $(pwd):/usr/src -w /usr/src curl/curl autoreconf -fi
|
||||||
|
# docker run --rm -it -u $(id -u):$(id -g) -v $(pwd):/usr/src -w /usr/src curl/curl ./configure --without-ssl --without-libpsl
|
||||||
|
# docker run --rm -it -u $(id -u):$(id -g) -v $(pwd):/usr/src -w /usr/src curl/curl make
|
||||||
|
# docker run --rm -it -u $(id -u):$(id -g) -v $(pwd):/usr/src -w /usr/src curl/curl ./maketgz 8.7.1
|
||||||
|
#
|
||||||
|
# or get into a shell in the build environment, for example
|
||||||
|
#
|
||||||
|
# docker run --rm -it -u $(id -u):$(id -g) -v (pwd):/usr/src -w /usr/src curl/curl bash
|
||||||
|
# $ autoreconf -fi
|
||||||
|
# $ ./configure --without-ssl --without-libpsl
|
||||||
|
# $ make
|
||||||
|
# $ ./maketgz 8.7.1
|
||||||
|
|
||||||
|
# To update, get the latest digest e.g. from https://hub.docker.com/_/debian/tags
|
||||||
|
FROM debian:bookworm-slim@sha256:911821c26cc366231183098f489068afff2d55cf56911cb5b7bd32796538dfe1
|
||||||
|
|
||||||
|
RUN apt-get update -qq && apt-get install -qq -y --no-install-recommends \
|
||||||
|
build-essential make autoconf automake libtool git perl zip zlib1g-dev gawk && \
|
||||||
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
ARG UID=1000 GID=1000
|
||||||
|
|
||||||
|
RUN groupadd --gid $UID dev && \
|
||||||
|
useradd --uid $UID --gid dev --shell /bin/bash --create-home dev
|
||||||
|
|
||||||
|
USER dev:dev
|
||||||
|
|
||||||
|
ARG SOURCE_DATE_EPOCH
|
||||||
|
ENV SOURCE_DATE_EPOCH=${SOURCE_DATE_EPOCH:-1}
|
71
src/dependencies/curl-8.8.0/Makefile
Normal file
71
src/dependencies/curl-8.8.0/Makefile
Normal file
|
@ -0,0 +1,71 @@
|
||||||
|
#***************************************************************************
|
||||||
|
# _ _ ____ _
|
||||||
|
# Project ___| | | | _ \| |
|
||||||
|
# / __| | | | |_) | |
|
||||||
|
# | (__| |_| | _ <| |___
|
||||||
|
# \___|\___/|_| \_\_____|
|
||||||
|
#
|
||||||
|
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
#
|
||||||
|
# This software is licensed as described in the file COPYING, which
|
||||||
|
# you should have received as part of this distribution. The terms
|
||||||
|
# are also available at https://curl.se/docs/copyright.html.
|
||||||
|
#
|
||||||
|
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
|
||||||
|
# copies of the Software, and permit persons to whom the Software is
|
||||||
|
# furnished to do so, under the terms of the COPYING file.
|
||||||
|
#
|
||||||
|
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
|
||||||
|
# KIND, either express or implied.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: curl
|
||||||
|
#
|
||||||
|
###########################################################################
|
||||||
|
|
||||||
|
all:
|
||||||
|
./configure
|
||||||
|
make
|
||||||
|
|
||||||
|
ssl:
|
||||||
|
./configure --with-openssl
|
||||||
|
make
|
||||||
|
|
||||||
|
vc:
|
||||||
|
cd winbuild
|
||||||
|
nmake /f Makefile.vc MACHINE=x86
|
||||||
|
|
||||||
|
vc-x64:
|
||||||
|
cd winbuild
|
||||||
|
nmake /f Makefile.vc MACHINE=x64
|
||||||
|
|
||||||
|
djgpp%:
|
||||||
|
$(MAKE) -C lib -f Makefile.mk CFG=$@ CROSSPREFIX=i586-pc-msdosdjgpp-
|
||||||
|
$(MAKE) -C src -f Makefile.mk CFG=$@ CROSSPREFIX=i586-pc-msdosdjgpp-
|
||||||
|
|
||||||
|
cygwin:
|
||||||
|
./configure
|
||||||
|
make
|
||||||
|
|
||||||
|
cygwin-ssl:
|
||||||
|
./configure --with-openssl
|
||||||
|
make
|
||||||
|
|
||||||
|
amiga%:
|
||||||
|
$(MAKE) -C lib -f Makefile.mk CFG=$@ CROSSPREFIX=m68k-amigaos-
|
||||||
|
$(MAKE) -C src -f Makefile.mk CFG=$@ CROSSPREFIX=m68k-amigaos-
|
||||||
|
|
||||||
|
unix: all
|
||||||
|
|
||||||
|
unix-ssl: ssl
|
||||||
|
|
||||||
|
linux: all
|
||||||
|
|
||||||
|
linux-ssl: ssl
|
||||||
|
|
||||||
|
ca-bundle: scripts/mk-ca-bundle.pl
|
||||||
|
@echo "generate a fresh ca-bundle.crt"
|
||||||
|
@perl $< -b -l -u lib/ca-bundle.crt
|
||||||
|
|
||||||
|
ca-firefox: lib/firefox-db2pem.sh
|
||||||
|
@echo "generate a fresh ca-bundle.crt"
|
||||||
|
./lib/firefox-db2pem.sh lib/ca-bundle.crt
|
231
src/dependencies/curl-8.8.0/Makefile.am
Normal file
231
src/dependencies/curl-8.8.0/Makefile.am
Normal file
|
@ -0,0 +1,231 @@
|
||||||
|
#***************************************************************************
|
||||||
|
# _ _ ____ _
|
||||||
|
# Project ___| | | | _ \| |
|
||||||
|
# / __| | | | |_) | |
|
||||||
|
# | (__| |_| | _ <| |___
|
||||||
|
# \___|\___/|_| \_\_____|
|
||||||
|
#
|
||||||
|
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
#
|
||||||
|
# This software is licensed as described in the file COPYING, which
|
||||||
|
# you should have received as part of this distribution. The terms
|
||||||
|
# are also available at https://curl.se/docs/copyright.html.
|
||||||
|
#
|
||||||
|
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
|
||||||
|
# copies of the Software, and permit persons to whom the Software is
|
||||||
|
# furnished to do so, under the terms of the COPYING file.
|
||||||
|
#
|
||||||
|
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
|
||||||
|
# KIND, either express or implied.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: curl
|
||||||
|
#
|
||||||
|
###########################################################################
|
||||||
|
|
||||||
|
AUTOMAKE_OPTIONS = foreign
|
||||||
|
|
||||||
|
ACLOCAL_AMFLAGS = -I m4
|
||||||
|
|
||||||
|
CMAKE_DIST = \
|
||||||
|
CMake/cmake_uninstall.cmake.in \
|
||||||
|
CMake/CMakeConfigurableFile.in \
|
||||||
|
CMake/curl-config.cmake.in \
|
||||||
|
CMake/CurlSymbolHiding.cmake \
|
||||||
|
CMake/CurlTests.c \
|
||||||
|
CMake/FindBearSSL.cmake \
|
||||||
|
CMake/FindBrotli.cmake \
|
||||||
|
CMake/FindCARES.cmake \
|
||||||
|
CMake/FindGSS.cmake \
|
||||||
|
CMake/FindLibPSL.cmake \
|
||||||
|
CMake/FindLibSSH2.cmake \
|
||||||
|
CMake/FindMbedTLS.cmake \
|
||||||
|
CMake/FindMSH3.cmake \
|
||||||
|
CMake/FindNGHTTP2.cmake \
|
||||||
|
CMake/FindNGHTTP3.cmake \
|
||||||
|
CMake/FindNGTCP2.cmake \
|
||||||
|
CMake/FindQUICHE.cmake \
|
||||||
|
CMake/FindWolfSSL.cmake \
|
||||||
|
CMake/FindZstd.cmake \
|
||||||
|
CMake/Macros.cmake \
|
||||||
|
CMake/OtherTests.cmake \
|
||||||
|
CMake/PickyWarnings.cmake \
|
||||||
|
CMake/Platforms/WindowsCache.cmake \
|
||||||
|
CMake/Utilities.cmake \
|
||||||
|
CMakeLists.txt
|
||||||
|
|
||||||
|
VC_DIST = projects/README.md \
|
||||||
|
projects/build-openssl.bat \
|
||||||
|
projects/build-wolfssl.bat \
|
||||||
|
projects/checksrc.bat \
|
||||||
|
projects/generate.bat \
|
||||||
|
projects/wolfssl_options.h \
|
||||||
|
projects/wolfssl_override.props
|
||||||
|
|
||||||
|
WINBUILD_DIST = winbuild/README.md winbuild/gen_resp_file.bat \
|
||||||
|
winbuild/MakefileBuild.vc winbuild/Makefile.vc winbuild/makedebug.cmd
|
||||||
|
|
||||||
|
PLAN9_DIST = plan9/include/mkfile \
|
||||||
|
plan9/include/mkfile \
|
||||||
|
plan9/mkfile.proto \
|
||||||
|
plan9/mkfile \
|
||||||
|
plan9/README \
|
||||||
|
plan9/lib/mkfile.inc \
|
||||||
|
plan9/lib/mkfile \
|
||||||
|
plan9/src/mkfile.inc \
|
||||||
|
plan9/src/mkfile
|
||||||
|
|
||||||
|
EXTRA_DIST = CHANGES COPYING maketgz Makefile.dist curl-config.in \
|
||||||
|
RELEASE-NOTES buildconf libcurl.pc.in $(CMAKE_DIST) $(VC_DIST) \
|
||||||
|
$(WINBUILD_DIST) $(PLAN9_DIST) lib/libcurl.vers.in buildconf.bat \
|
||||||
|
libcurl.def Dockerfile
|
||||||
|
|
||||||
|
CLEANFILES = $(VC14_LIBVCXPROJ) $(VC14_SRCVCXPROJ) \
|
||||||
|
$(VC14_10_LIBVCXPROJ) $(VC14_10_SRCVCXPROJ) \
|
||||||
|
$(VC14_20_LIBVCXPROJ) $(VC14_20_SRCVCXPROJ) \
|
||||||
|
$(VC14_30_LIBVCXPROJ) $(VC14_30_SRCVCXPROJ)
|
||||||
|
|
||||||
|
bin_SCRIPTS = curl-config
|
||||||
|
|
||||||
|
SUBDIRS = lib docs src scripts
|
||||||
|
DIST_SUBDIRS = $(SUBDIRS) tests packages scripts include docs
|
||||||
|
|
||||||
|
pkgconfigdir = $(libdir)/pkgconfig
|
||||||
|
pkgconfig_DATA = libcurl.pc
|
||||||
|
|
||||||
|
# List of files required to generate VC IDE .dsp, .vcproj and .vcxproj files
|
||||||
|
include lib/Makefile.inc
|
||||||
|
include src/Makefile.inc
|
||||||
|
|
||||||
|
dist-hook:
|
||||||
|
rm -rf $(top_builddir)/tests/log
|
||||||
|
find $(distdir) -name "*.dist" -exec rm {} \;
|
||||||
|
(distit=`find $(srcdir) -name "*.dist" | grep -v ./ares/`; \
|
||||||
|
for file in $$distit; do \
|
||||||
|
strip=`echo $$file | sed -e s/^$(srcdir)// -e s/\.dist//`; \
|
||||||
|
cp -p $$file $(distdir)$$strip; \
|
||||||
|
done)
|
||||||
|
|
||||||
|
check: test examples check-docs
|
||||||
|
|
||||||
|
if CROSSCOMPILING
|
||||||
|
test-full: test
|
||||||
|
test-torture: test
|
||||||
|
|
||||||
|
test:
|
||||||
|
@echo "NOTICE: we can't run the tests when cross-compiling!"
|
||||||
|
|
||||||
|
else
|
||||||
|
|
||||||
|
test:
|
||||||
|
@(cd tests; $(MAKE) all quiet-test)
|
||||||
|
|
||||||
|
test-full:
|
||||||
|
@(cd tests; $(MAKE) all full-test)
|
||||||
|
|
||||||
|
test-nonflaky:
|
||||||
|
@(cd tests; $(MAKE) all nonflaky-test)
|
||||||
|
|
||||||
|
test-torture:
|
||||||
|
@(cd tests; $(MAKE) all torture-test)
|
||||||
|
|
||||||
|
test-event:
|
||||||
|
@(cd tests; $(MAKE) all event-test)
|
||||||
|
|
||||||
|
test-am:
|
||||||
|
@(cd tests; $(MAKE) all am-test)
|
||||||
|
|
||||||
|
test-ci:
|
||||||
|
@(cd tests; $(MAKE) all ci-test)
|
||||||
|
|
||||||
|
endif
|
||||||
|
|
||||||
|
examples:
|
||||||
|
@(cd docs/examples; $(MAKE) check)
|
||||||
|
|
||||||
|
check-docs:
|
||||||
|
@(cd docs/libcurl; $(MAKE) check)
|
||||||
|
|
||||||
|
# Build source and binary rpms. For rpm-3.0 and above, the ~/.rpmmacros
|
||||||
|
# must contain the following line:
|
||||||
|
# %_topdir /home/loic/local/rpm
|
||||||
|
# and that /home/loic/local/rpm contains the directory SOURCES, BUILD etc.
|
||||||
|
#
|
||||||
|
# cd /home/loic/local/rpm ; mkdir -p SOURCES BUILD RPMS/i386 SPECS SRPMS
|
||||||
|
#
|
||||||
|
# If additional configure flags are needed to build the package, add the
|
||||||
|
# following in ~/.rpmmacros
|
||||||
|
# %configure CFLAGS="%{optflags}" ./configure %{_target_platform} --prefix=%{_prefix} ${AM_CONFIGFLAGS}
|
||||||
|
# and run make rpm in the following way:
|
||||||
|
# AM_CONFIGFLAGS='--with-uri=/home/users/loic/local/RedHat-6.2' make rpm
|
||||||
|
#
|
||||||
|
|
||||||
|
rpms:
|
||||||
|
$(MAKE) RPMDIST=curl rpm
|
||||||
|
$(MAKE) RPMDIST=curl-ssl rpm
|
||||||
|
|
||||||
|
rpm:
|
||||||
|
RPM_TOPDIR=`rpm --showrc | $(PERL) -n -e 'print if(s/.*_topdir\s+(.*)/$$1/)'` ; \
|
||||||
|
cp $(srcdir)/packages/Linux/RPM/$(RPMDIST).spec $$RPM_TOPDIR/SPECS ; \
|
||||||
|
cp $(PACKAGE)-$(VERSION).tar.gz $$RPM_TOPDIR/SOURCES ; \
|
||||||
|
rpm -ba --clean --rmsource $$RPM_TOPDIR/SPECS/$(RPMDIST).spec ; \
|
||||||
|
mv $$RPM_TOPDIR/RPMS/i386/$(RPMDIST)-*.rpm . ; \
|
||||||
|
mv $$RPM_TOPDIR/SRPMS/$(RPMDIST)-*.src.rpm .
|
||||||
|
|
||||||
|
#
|
||||||
|
# Build a Solaris pkgadd format file
|
||||||
|
# run 'make pkgadd' once you've done './configure' and 'make' to make a Solaris pkgadd format
|
||||||
|
# file (which ends up back in this directory).
|
||||||
|
# The pkgadd file is in 'pkgtrans' format, so to install on Solaris, do
|
||||||
|
# pkgadd -d ./HAXXcurl-*
|
||||||
|
#
|
||||||
|
|
||||||
|
# gak - libtool requires an absolute directory, hence the pwd below...
|
||||||
|
pkgadd:
|
||||||
|
umask 022 ; \
|
||||||
|
$(MAKE) install DESTDIR=`/bin/pwd`/packages/Solaris/root ; \
|
||||||
|
cat COPYING > $(srcdir)/packages/Solaris/copyright ; \
|
||||||
|
cd $(srcdir)/packages/Solaris && $(MAKE) package
|
||||||
|
|
||||||
|
#
|
||||||
|
# Build a cygwin binary tarball installation file
|
||||||
|
# resulting .tar.bz2 file will end up at packages/Win32/cygwin
|
||||||
|
cygwinbin:
|
||||||
|
$(MAKE) -C packages/Win32/cygwin cygwinbin
|
||||||
|
|
||||||
|
# We extend the standard install with a custom hook:
|
||||||
|
if BUILD_DOCS
|
||||||
|
install-data-hook:
|
||||||
|
(cd include && $(MAKE) install)
|
||||||
|
(cd docs && $(MAKE) install)
|
||||||
|
(cd docs/libcurl && $(MAKE) install)
|
||||||
|
else
|
||||||
|
install-data-hook:
|
||||||
|
(cd include && $(MAKE) install)
|
||||||
|
(cd docs && $(MAKE) install)
|
||||||
|
endif
|
||||||
|
|
||||||
|
# We extend the standard uninstall with a custom hook:
|
||||||
|
uninstall-hook:
|
||||||
|
(cd include && $(MAKE) uninstall)
|
||||||
|
(cd docs && $(MAKE) uninstall)
|
||||||
|
(cd docs/libcurl && $(MAKE) uninstall)
|
||||||
|
|
||||||
|
ca-bundle: $(srcdir)/scripts/mk-ca-bundle.pl
|
||||||
|
@echo "generating a fresh ca-bundle.crt"
|
||||||
|
@perl $(srcdir)/scripts/mk-ca-bundle.pl -b -l -u lib/ca-bundle.crt
|
||||||
|
|
||||||
|
ca-firefox: $(srcdir)/scripts/firefox-db2pem.sh
|
||||||
|
@echo "generating a fresh ca-bundle.crt"
|
||||||
|
$(srcdir)/scripts/firefox-db2pem.sh lib/ca-bundle.crt
|
||||||
|
|
||||||
|
checksrc:
|
||||||
|
(cd lib && $(MAKE) checksrc)
|
||||||
|
(cd src && $(MAKE) checksrc)
|
||||||
|
(cd tests && $(MAKE) checksrc)
|
||||||
|
(cd include/curl && $(MAKE) checksrc)
|
||||||
|
(cd docs/examples && $(MAKE) checksrc)
|
||||||
|
(cd packages && $(MAKE) checksrc)
|
||||||
|
|
||||||
|
tidy:
|
||||||
|
(cd src && $(MAKE) tidy)
|
||||||
|
(cd lib && $(MAKE) tidy)
|
1788
src/dependencies/curl-8.8.0/Makefile.in
Normal file
1788
src/dependencies/curl-8.8.0/Makefile.in
Normal file
File diff suppressed because it is too large
Load diff
55
src/dependencies/curl-8.8.0/README
Normal file
55
src/dependencies/curl-8.8.0/README
Normal file
|
@ -0,0 +1,55 @@
|
||||||
|
_ _ ____ _
|
||||||
|
___| | | | _ \| |
|
||||||
|
/ __| | | | |_) | |
|
||||||
|
| (__| |_| | _ <| |___
|
||||||
|
\___|\___/|_| \_\_____|
|
||||||
|
|
||||||
|
README
|
||||||
|
|
||||||
|
Curl is a command line tool for transferring data specified with URL
|
||||||
|
syntax. Find out how to use curl by reading the curl.1 man page or the
|
||||||
|
MANUAL document. Find out how to install Curl by reading the INSTALL
|
||||||
|
document.
|
||||||
|
|
||||||
|
libcurl is the library curl is using to do its job. It is readily
|
||||||
|
available to be used by your software. Read the libcurl.3 man page to
|
||||||
|
learn how.
|
||||||
|
|
||||||
|
You find answers to the most frequent questions we get in the FAQ document.
|
||||||
|
|
||||||
|
Study the COPYING file for distribution terms.
|
||||||
|
|
||||||
|
Those documents and more can be found in the docs/ directory.
|
||||||
|
|
||||||
|
CONTACT
|
||||||
|
|
||||||
|
If you have problems, questions, ideas or suggestions, please contact us
|
||||||
|
by posting to a suitable mailing list. See https://curl.se/mail/
|
||||||
|
|
||||||
|
All contributors to the project are listed in the THANKS document.
|
||||||
|
|
||||||
|
WEBSITE
|
||||||
|
|
||||||
|
Visit the curl website for the latest news and downloads:
|
||||||
|
|
||||||
|
https://curl.se/
|
||||||
|
|
||||||
|
GIT
|
||||||
|
|
||||||
|
To download the latest source code off the GIT server, do this:
|
||||||
|
|
||||||
|
git clone https://github.com/curl/curl.git
|
||||||
|
|
||||||
|
(you will get a directory named curl created, filled with the source code)
|
||||||
|
|
||||||
|
SECURITY PROBLEMS
|
||||||
|
|
||||||
|
Report suspected security problems via our HackerOne page and not in public.
|
||||||
|
|
||||||
|
https://hackerone.com/curl
|
||||||
|
|
||||||
|
NOTICE
|
||||||
|
|
||||||
|
Curl contains pieces of source code that is Copyright (c) 1998, 1999
|
||||||
|
Kungliga Tekniska Högskolan. This notice is included here to comply with the
|
||||||
|
distribution terms.
|
505
src/dependencies/curl-8.8.0/RELEASE-NOTES
Normal file
505
src/dependencies/curl-8.8.0/RELEASE-NOTES
Normal file
|
@ -0,0 +1,505 @@
|
||||||
|
curl and libcurl 8.8.0
|
||||||
|
|
||||||
|
Public curl releases: 257
|
||||||
|
Command line options: 259
|
||||||
|
curl_easy_setopt() options: 305
|
||||||
|
Public functions in libcurl: 94
|
||||||
|
Contributors: 3173
|
||||||
|
|
||||||
|
This release includes the following changes:
|
||||||
|
|
||||||
|
o curl_version_info: provide librtmp version [73]
|
||||||
|
o file: add support for directory listings [63]
|
||||||
|
o idn: add native AppleIDN (icucore) support for macOS/iOS [95]
|
||||||
|
o lib: add curl_multi_waitfds [34]
|
||||||
|
o mbedTLS: implement CURLOPT_SSL_CIPHER_LIST option [103]
|
||||||
|
o NTLM_WB: drop support [67]
|
||||||
|
o TLS: add support for ECH (Encrypted Client Hello) [109]
|
||||||
|
o urlapi: add CURLU_GET_EMPTY for empty queries and fragments [111]
|
||||||
|
|
||||||
|
This release includes the following bugfixes:
|
||||||
|
|
||||||
|
o appveyor: drop unnecessary `--clean-first` cmake option [197]
|
||||||
|
o appveyor: guard against crash-build with VS2008 [193]
|
||||||
|
o appveyor: make gcc 6 mingw64 job build-only [152]
|
||||||
|
o asyn-thread: fix curl_global_cleanup crash in Windows [161]
|
||||||
|
o asyn-thread: fix Curl_thread_create result check [162]
|
||||||
|
o autotools: delete unused functions [177]
|
||||||
|
o autotools: fix `HAVE_IOCTLSOCKET_FIONBIO` test for gcc 14 [186]
|
||||||
|
o autotools: only probe for SGI MIPS compilers on IRIX [213]
|
||||||
|
o bearssl: fix compiler warnings [43]
|
||||||
|
o bearssl: use common code for cipher suite lookup [126]
|
||||||
|
o bufq: remove duplicate word in comment [154]
|
||||||
|
o BUG-BOUNTY.md: clarify the third party situation [210]
|
||||||
|
o build: prefer `USE_IPV6` macro internally (was: `ENABLE_IPV6`) [85]
|
||||||
|
o build: remove MacOSX-Framework script [60]
|
||||||
|
o cd2nroff/manage: use UTC when SOURCE_DATE_EPOCH is set [36]
|
||||||
|
o cf-https-connect: use timeouts as unsigned ints [143]
|
||||||
|
o cf-socket: don't try getting local IP without socket [188]
|
||||||
|
o cf-socket: remove references to l_ip, l_port [9]
|
||||||
|
o ci: add curl-for-win builds: Linux MUSL, macOS, Windows [68]
|
||||||
|
o cmake: add `BUILD_EXAMPLES` option to build examples [128]
|
||||||
|
o cmake: add librtmp/rtmpdump option and detection [108]
|
||||||
|
o cmake: check fseeko after detecting HAVE_FILE_OFFSET_BITS [64]
|
||||||
|
o cmake: do not pass linker flags to the static library tool [203]
|
||||||
|
o cmake: enable `-pedantic-errors` for clang when `CURL_WERROR=ON` [47]
|
||||||
|
o cmake: FindNGHTTP2 add static lib name to find_library call [141]
|
||||||
|
o cmake: fix `CURL_WERROR=ON` for old CMake and use it in GHA/linux-old [48]
|
||||||
|
o cmake: fix `HAVE_IOCTLSOCKET_FIONBIO` test with gcc 14 [179]
|
||||||
|
o cmake: fixup `DEPENDS` filename [51]
|
||||||
|
o cmake: forward `USE_LIBRTMP` option to C [59]
|
||||||
|
o cmake: generate misc manpages and install `mk-ca-bundle.pl` [24]
|
||||||
|
o cmake: initialize `BUILD_TESTING` before first use [227]
|
||||||
|
o cmake: speed up libcurl doc building again [15]
|
||||||
|
o cmake: tidy-up to use `WORKING_DIRECTORY` [23]
|
||||||
|
o cmake: use namespaced custom target names [80]
|
||||||
|
o cmdline-docs: fix make install with configure --disable-docs [1]
|
||||||
|
o configure: error on missing perl if docs or manual is enabled [135]
|
||||||
|
o configure: make --disable-docs imply --disable-manual [2]
|
||||||
|
o content_encoding: brotli and others, pass through 0-length writes [5]
|
||||||
|
o content_encoding: ignore duplicate chunked encoding [137]
|
||||||
|
o content_encoding: reject transfer-encoding after chunked [200]
|
||||||
|
o contrithanks: honor `CURLWWW` variable [69]
|
||||||
|
o curl-confopts.m4: define CARES_NO_DEPRECATED when c-ares is used [17]
|
||||||
|
o curl.h: change CURL_SSLVERSION_* from enum to defines [132]
|
||||||
|
o curl: make --help adapt to the terminal width [11]
|
||||||
|
o curl: use curl_getenv instead of the curlx_ version [20]
|
||||||
|
o Curl_creader_read: init two variables to avoid using them uninited [99]
|
||||||
|
o curl_easy_pause.md: use correct defines in example [187]
|
||||||
|
o curl_getdate.md: document two-digit year handling [127]
|
||||||
|
o curl_global_trace.md: shorten the description [29]
|
||||||
|
o curl_multibyte: remove access() function wrapper for Windows [163]
|
||||||
|
o curl_path: make Curl_get_pathname use dynbuf [158]
|
||||||
|
o curl_setup.h: add support for IAR compiler [191]
|
||||||
|
o curl_setup.h: detect 'inline' support [133]
|
||||||
|
o curl_sha512_256: do not use workaround for NetBSD when not needed [21]
|
||||||
|
o curl_sha512_256: fix detection of OpenSSL 1.1.1 or later [8]
|
||||||
|
o curl_url_get.md: clarify queries and fragments and CURLU_GET_EMPTY [105]
|
||||||
|
o CURLINFO_REQUEST_SIZE: fixed, add tests for transfer infos reported [52]
|
||||||
|
o CURLOPT_WRITEFUNCTION.md: fix the callback proto in the example [215]
|
||||||
|
o cw-out: improved error handling [104]
|
||||||
|
o DEPRECATE.md: TLS libraries without 1.3 support [199]
|
||||||
|
o digest: replace strcpy for empty string with simple assignment [185]
|
||||||
|
o dist: `set -eu`, fix shellcheck, make reproducible and smaller tarballs [38]
|
||||||
|
o dist: add files missing from release tarball [53]
|
||||||
|
o dist: add reproducible dir entries to tarballs [56]
|
||||||
|
o dist: do not require Perl in `maketgz` [71]
|
||||||
|
o dist: remove the curl-config.1 from the tarball [28]
|
||||||
|
o dist: verify tarball reproducibility in CI [40]
|
||||||
|
o DISTROS: add patch and issues link for curl-for-win [110]
|
||||||
|
o DISTROS: Cygwin updates [44]
|
||||||
|
o dllmain: Call OpenSSL thread cleanup for Windows and Cygwin [114]
|
||||||
|
o doc: pytest `--repeat` -> `--count` [58]
|
||||||
|
o docs/cmdline-opts: invoke managen using a relative path [30]
|
||||||
|
o docs/cmdline-opts: mention STARTTLS for --ssl and --ssl-reqd [175]
|
||||||
|
o docs: add CURLOPT_NOPROGRESS to CURLOPT_XFERINFOFUNCTION example [61]
|
||||||
|
o docs: clarify CURLOPT_MAXFILESIZE and CURLOPT_MAXFILESIZE_LARGE [74]
|
||||||
|
o docs: fix some CURLINFO examples [147]
|
||||||
|
o doh: fix typo in comment [173]
|
||||||
|
o doh: remove unused function prototype [169]
|
||||||
|
o dynbuf: fix returncode on memory error [174]
|
||||||
|
o examples: fix/silence `-Wsign-conversion` [178]
|
||||||
|
o EXPERIMENTAL: add graduation requirements for each feature [166]
|
||||||
|
o file: remove useless assignment [89]
|
||||||
|
o ftp: add tracing support [181]
|
||||||
|
o ftp: fix build for CURL_DISABLE_VERBOSE_STRINGS
|
||||||
|
o ftp: fix socket leak on rare error [102]
|
||||||
|
o GHA: add NetBSD, OpenBSD, FreeBSD/arm64 and OmniOS jobs [201]
|
||||||
|
o GHA: add shellcheck job and fix warnings, shell tidy-ups [70]
|
||||||
|
o GHA: add valgrind to a wolfSSL build [37]
|
||||||
|
o GHA: on macOS remove $HOME/.curlrc [50]
|
||||||
|
o GHA: pin dependencies [194]
|
||||||
|
o gnutls: lazy init the trust settings [75]
|
||||||
|
o h3/ngtcp2: improve error handling [140]
|
||||||
|
o hash: change 'slots' to size_t from int [144]
|
||||||
|
o hash: delete unused debug function [198]
|
||||||
|
o hsts: explicitly skip blank lines [212]
|
||||||
|
o hsts: remove single-use single-line function [151]
|
||||||
|
o http tests: in CI skip test_02_23* for quiche [211]
|
||||||
|
o http2 + ngtcp2: pass CURLcode errors from callbacks [94]
|
||||||
|
o http2, http3: decouple stream state from easy handle [92]
|
||||||
|
o http2: emit RST when client write fails [65]
|
||||||
|
o http3: quiche+ngtcp2 improvements [129]
|
||||||
|
o http: acknowledge a returned error code [123]
|
||||||
|
o http: HEAD response body tolerance [170]
|
||||||
|
o http: reject HTTP major version switch mid connection [100]
|
||||||
|
o http: remove redundant check [182]
|
||||||
|
o http: with chunked POST forced, disable length check on read callback [31]
|
||||||
|
o http_aws_sigv4: remove useless assignment [88]
|
||||||
|
o idn: make Curl_idnconvert_hostname() use Curl_idn_decode() [16]
|
||||||
|
o if2ip: make the buf_size arg a size_t [142]
|
||||||
|
o INSTALL-CMAKE.md: explain `cmake -G <generator-name>` [32]
|
||||||
|
o krb5: use dynbuf [149]
|
||||||
|
o ldap: fix unused variables (seen on OmniOS) [183]
|
||||||
|
o lib/cf-h1-proxy: silence compiler warnings (gcc 14) [155]
|
||||||
|
o lib: add trace support for client reads and writes [45]
|
||||||
|
o lib: bump hash sizes to `size_t` [153]
|
||||||
|
o lib: clear the easy handle's saved errno before transfer [180]
|
||||||
|
o lib: fix compiler warnings (gcc) [222]
|
||||||
|
o lib: make protocol handlers store scheme name lowercase [159]
|
||||||
|
o lib: merge `ENABLE_QUIC` C macro into `USE_HTTP3` [84]
|
||||||
|
o lib: remove two instances of "only only" messages [160]
|
||||||
|
o lib: silence `-Wsign-conversion` in base64, strcase, mprintf [139]
|
||||||
|
o lib: silence warnings on comma misuse [91]
|
||||||
|
o lib: use `#error` instead of invalid syntax in `curl_setup_once.h` [49]
|
||||||
|
o lib: use multi instead of multi_easy for the active multi [41]
|
||||||
|
o libcurl-opts: mention pipelining less [33]
|
||||||
|
o libssh2: delete redundant feature guard [171]
|
||||||
|
o libssh2: replace `access()` with `stat()` [145]
|
||||||
|
o libssh2: set length to 0 if strdup failed [6]
|
||||||
|
o m4: fix rustls pkg-config codepath [22]
|
||||||
|
o MAIL-ETIQUETTE: convert to markdown [12]
|
||||||
|
o makefile: remove the sorting from the vc-ide action [42]
|
||||||
|
o maketgz: put docs/RELEASE-TOOL.md into the tarball [35]
|
||||||
|
o managen: fix the option sort order [150]
|
||||||
|
o mbedtls: call mbedtls_ssl_setup() after RNG callback is set [66]
|
||||||
|
o mbedtls: cut off trailing newlines from debug logs [87]
|
||||||
|
o mbedtls: fix building with v3 in CMake Unity mode [107]
|
||||||
|
o mbedtls: support TLS 1.3 [156]
|
||||||
|
o mime: avoid using access() [125]
|
||||||
|
o misc: fix typos [62]
|
||||||
|
o misc: fix typos, quoting and spelling [167]
|
||||||
|
o mprintf: check fputc error rather than matching returned character [82]
|
||||||
|
o mqtt: when Curl_xfer_recv returns error, don't use nread [101]
|
||||||
|
o multi: avoid memory-leak risk [134]
|
||||||
|
o multi: introduce SETUP state for better timeouts [26]
|
||||||
|
o multi: multi_wait improvements [131]
|
||||||
|
o multi: remove the unused Curl_preconnect function [98]
|
||||||
|
o multi: remove useless assignment [146]
|
||||||
|
o multi: timeout handles even without connection [81]
|
||||||
|
o openldap: create ldap URLs correctly for IPv6 addresses [19]
|
||||||
|
o openssl: do not set SSL_MODE_RELEASE_BUFFERS [10]
|
||||||
|
o openssl: revert keylog_callback support for LibreSSL [192]
|
||||||
|
o OS400: fix shellcheck warnings in scripts [72]
|
||||||
|
o projects: drop MSVC project files for recent versions [79]
|
||||||
|
o pytest: add DELETE tests, check server version [225]
|
||||||
|
o pytest: fixes for recent python, add FTP tests [206]
|
||||||
|
o quic: fixup duplicate static function name (for cmake unity) [77]
|
||||||
|
o quiche: expire all active transfers on connection close [116]
|
||||||
|
o quiche: trust its timeout handling [190]
|
||||||
|
o RELEASE-PROCEDURE: mention an initial working build [7]
|
||||||
|
o request: make Curl_req_init return void [96]
|
||||||
|
o request: paused upload on completed download, assess connection [54]
|
||||||
|
o reuse: add copyright + license info to individual docs/*.md files [13]
|
||||||
|
o ROADMAP: remove completed entries, mention websocket
|
||||||
|
o rustls: fix handshake done handling [207]
|
||||||
|
o rustls: fix partial send handling [224]
|
||||||
|
o rustls: remove incorrect SSLSUPP_TLS13_CIPHERSUITES flag [115]
|
||||||
|
o rustsls: fix error code on receive [230]
|
||||||
|
o sendf: fix two typos in comments [90]
|
||||||
|
o sendf: useless assignment in cr_lc_read() [120]
|
||||||
|
o setopt: acknowledge errors proper for CURLOPT_COOKIEJAR [216]
|
||||||
|
o setopt: make the setstropt_userpwd args compulsory [221]
|
||||||
|
o setopt: remove check for 'option' that is always true [219]
|
||||||
|
o setopt: warn on Curl_set*opt() uses not using the return value [176]
|
||||||
|
o smtp: result of Curl_bufq_cread was not used [78]
|
||||||
|
o socket: remove redundant call to getsockname [195]
|
||||||
|
o socketpair: fix compilation when USE_UNIX_SOCKETS is not defined [229]
|
||||||
|
o src: tidy up types, add necessary casts [217]
|
||||||
|
o telnet: check return code from fileno() [112]
|
||||||
|
o tests/http: fix compiler warning [39]
|
||||||
|
o tests: add -q as first option when invoking curl for tests [97]
|
||||||
|
o tests: check caddy server version to match test expectations [106]
|
||||||
|
o tests: enable test 1117 for hyper [119]
|
||||||
|
o tests: fix feature case in test1481 [117]
|
||||||
|
o tests: fix test 1167 to skip digit-only symbols [214]
|
||||||
|
o tests: make the unit test result type `CURLcode` [165]
|
||||||
|
o tests: Mark tftpd timer function as noreturn [168]
|
||||||
|
o tests: tidy up types in server code [220]
|
||||||
|
o tls: fix SecureTransport + BearSSL cmake unity builds [113]
|
||||||
|
o tls: remove EXAMPLEs from deprecated options [164]
|
||||||
|
o tls: use shared init code for TCP+QUIC [57]
|
||||||
|
o tool: move tool_ftruncate64 to tool_util.c [138]
|
||||||
|
o tool_cb_rea: limit rate unpause for -T . uploads [136]
|
||||||
|
o tool_cfgable: free {proxy_}cipher13_list on exit [172]
|
||||||
|
o tool_getparam: output warning for leading unicode quote character [14]
|
||||||
|
o tool_getparam: remove two redundant conditions [189]
|
||||||
|
o tool_operate: don't truncate the etag save file by default [118]
|
||||||
|
o tool_operate: init vars unconditionally in post_per_transfer [124]
|
||||||
|
o tool_paramhlp: remove duplicate assign [121]
|
||||||
|
o tool_xattr: "guess" URL scheme if none is provided [3]
|
||||||
|
o tool_xattr: in debug builds, act normally if CURL_FAKE_XATTR is not set [4]
|
||||||
|
o transfer: remove useless assignment [122]
|
||||||
|
o url: do not URL decode proxy crendentials [55]
|
||||||
|
o url: fix use of an uninitialized variable [86]
|
||||||
|
o url: make parse_login_details use memdup0 [184]
|
||||||
|
o url: remove duplicate call to Curl_conncache_remove_conn when pruning [196]
|
||||||
|
o urlapi: allow setting port number zero [76]
|
||||||
|
o urlapi: fix relative redirects to fragment-only [83]
|
||||||
|
o urldata: remove fields not used depending on used features [46]
|
||||||
|
o vauth: make two functions void that always just returned OK [218]
|
||||||
|
o version: use msnprintf instead of strncpy [157]
|
||||||
|
o vquic-tls: use correct cert name check API for wolfSSL [226]
|
||||||
|
o vquic: use CURL_FORMAT_CURL_OFF_T for 64 bit printf output [18]
|
||||||
|
o vtls: TLS session storage overhaul [130]
|
||||||
|
o wakeup_create: use FD_CLOEXEC/SOCK_CLOEXEC [223]
|
||||||
|
o warnless: delete orphan declarations [209]
|
||||||
|
o websocket: avoid memory leak in error path [148]
|
||||||
|
o winbuild: add ENABLE_WEBSOCKETS option [93]
|
||||||
|
o winbuild: use $(RC) correctly [27]
|
||||||
|
o wolfssl: plug memory leak in wolfssl_connect_step2() [25]
|
||||||
|
o x509asn1: return error on missing OID [208]
|
||||||
|
|
||||||
|
This release includes the following known bugs:
|
||||||
|
|
||||||
|
o see docs/KNOWN_BUGS (https://curl.se/docs/knownbugs.html)
|
||||||
|
|
||||||
|
Planned upcoming removals include:
|
||||||
|
|
||||||
|
o support for space-separated NOPROXY patterns
|
||||||
|
|
||||||
|
See https://curl.se/dev/deprecate.html for details
|
||||||
|
|
||||||
|
This release would not have looked like this without help, code, reports and
|
||||||
|
advice from friends like these:
|
||||||
|
|
||||||
|
Abdullah Alyan, Andrew, Antoine Bollengier, blankie, Brian Inglis,
|
||||||
|
Carlos Henrique Lima Melara, Ch40zz on github, Christian Schmitz, Chris Webb,
|
||||||
|
Colin Leroy-Mira, Dagfinn Ilmari Mannsåker, Dan Fandrich, Daniel Gustafsson,
|
||||||
|
Daniel J. H., Daniel McCarney, Daniel Stenberg, Dmitry Karpov,
|
||||||
|
Emanuele Torre, Evgeny Grin (Karlson2k), Fabian Keil, farazrbx on github,
|
||||||
|
fuzzard, Gisle Vanem, Gonçalo Carvalho, Gusted, hammlee96 on github,
|
||||||
|
Harmen Stoppels, Harry Sintonen, Hongfei Li, Ivan, Jan Macku, Jan Venekamp,
|
||||||
|
Jeff King, Jeroen Ooms, Jérôme Leclercq, Jiwoo Park,
|
||||||
|
Johann Sebastian Schicho, Jonatan Vela, Joseph Chen, Juliusz Sosinowicz,
|
||||||
|
Kailun Qin, kalvdans on github, Keitagit-kun on github, Konstantin Kuzov,
|
||||||
|
kpcyrd on github, Laramie Leavitt, LigH, Lucas Nussbaum,
|
||||||
|
magisterquis on hackerone, Marcel Raad, Matt Jolly, Max Dymond, Mel Zuser,
|
||||||
|
Michael Kaufmann, Michael Litwak, Michał Antoniak, Nathan Moinvaziri,
|
||||||
|
Orgad Shaneh, Patrick Monnerat, Paul Gilmartin, Paul Howarth,
|
||||||
|
Pavel Kropachev, Pavel Pavlov, Philip Heiduck, Rahul Krishna M, RainRat,
|
||||||
|
Ray Satiro, renovate[bot], riastradh on github, Robert Moreton,
|
||||||
|
Sanjay Pujare, Sergey Bronnikov, Sergey Ogryzkov, Sergio Durigan Junior,
|
||||||
|
southernedge on github, Stefan Eissing, Stephen Farrell, Tal Regev,
|
||||||
|
Tatsuhiro Tsujikawa, Tobias Stoeckmann, Toon Claes, Trumeet on github,
|
||||||
|
Trzik on github, Viktor Szakats, zmcx16 on github
|
||||||
|
(85 contributors)
|
||||||
|
|
||||||
|
References to bug reports and discussions on issues:
|
||||||
|
|
||||||
|
[1] = https://curl.se/bug/?i=13198
|
||||||
|
[2] = https://curl.se/bug/?i=13191
|
||||||
|
[3] = https://curl.se/bug/?i=13205
|
||||||
|
[4] = https://curl.se/bug/?i=13220
|
||||||
|
[5] = https://curl.se/bug/?i=13209
|
||||||
|
[6] = https://curl.se/bug/?i=13213
|
||||||
|
[7] = https://curl.se/bug/?i=13216
|
||||||
|
[8] = https://curl.se/bug/?i=13208
|
||||||
|
[9] = https://curl.se/bug/?i=13210
|
||||||
|
[10] = https://curl.se/bug/?i=13203
|
||||||
|
[11] = https://curl.se/bug/?i=13171
|
||||||
|
[12] = https://curl.se/bug/?i=13247
|
||||||
|
[13] = https://curl.se/bug/?i=13245
|
||||||
|
[14] = https://curl.se/bug/?i=13214
|
||||||
|
[15] = https://curl.se/bug/?i=13207
|
||||||
|
[16] = https://curl.se/bug/?i=13236
|
||||||
|
[17] = https://curl.se/bug/?i=13240
|
||||||
|
[18] = https://curl.se/bug/?i=13224
|
||||||
|
[19] = https://curl.se/bug/?i=13228
|
||||||
|
[20] = https://curl.se/bug/?i=13230
|
||||||
|
[21] = https://curl.se/bug/?i=13225
|
||||||
|
[22] = https://curl.se/bug/?i=13200
|
||||||
|
[23] = https://curl.se/bug/?i=13206
|
||||||
|
[24] = https://curl.se/bug/?i=13197
|
||||||
|
[25] = https://curl.se/bug/?i=13272
|
||||||
|
[26] = https://curl.se/bug/?i=13371
|
||||||
|
[27] = https://curl.se/bug/?i=13267
|
||||||
|
[28] = https://curl.se/bug/?i=13268
|
||||||
|
[29] = https://curl.se/bug/?i=13263
|
||||||
|
[30] = https://curl.se/bug/?i=13281
|
||||||
|
[31] = https://curl.se/bug/?i=13229
|
||||||
|
[32] = https://curl.se/bug/?i=13244
|
||||||
|
[33] = https://curl.se/bug/?i=13254
|
||||||
|
[34] = https://curl.se/bug/?i=13135
|
||||||
|
[35] = https://curl.se/bug/?i=13239
|
||||||
|
[36] = https://curl.se/bug/?i=13242
|
||||||
|
[37] = https://curl.se/bug/?i=13274
|
||||||
|
[38] = https://curl.se/bug/?i=13299
|
||||||
|
[39] = https://curl.se/bug/?i=13301
|
||||||
|
[40] = https://curl.se/bug/?i=13327
|
||||||
|
[41] = https://curl.se/bug/?i=12665
|
||||||
|
[42] = https://curl.se/bug/?i=13294
|
||||||
|
[43] = https://curl.se/bug/?i=13290
|
||||||
|
[44] = https://curl.se/bug/?i=13258
|
||||||
|
[45] = https://curl.se/bug/?i=13223
|
||||||
|
[46] = https://curl.se/bug/?i=13188
|
||||||
|
[47] = https://curl.se/bug/?i=13286
|
||||||
|
[48] = https://curl.se/bug/?i=13282
|
||||||
|
[49] = https://curl.se/bug/?i=13287
|
||||||
|
[50] = https://curl.se/bug/?i=13284
|
||||||
|
[51] = https://curl.se/bug/?i=13283
|
||||||
|
[52] = https://curl.se/bug/?i=13269
|
||||||
|
[53] = https://curl.se/bug/?i=13346
|
||||||
|
[54] = https://curl.se/bug/?i=13260
|
||||||
|
[55] = https://curl.se/bug/?i=13265
|
||||||
|
[56] = https://curl.se/bug/?i=13322
|
||||||
|
[57] = https://curl.se/bug/?i=13172
|
||||||
|
[58] = https://curl.se/bug/?i=13218
|
||||||
|
[59] = https://curl.se/bug/?i=13364
|
||||||
|
[60] = https://curl.se/bug/?i=13313
|
||||||
|
[61] = https://curl.se/bug/?i=13348
|
||||||
|
[62] = https://curl.se/bug/?i=13344
|
||||||
|
[63] = https://curl.se/bug/?i=13137
|
||||||
|
[64] = https://curl.se/bug/?i=13264
|
||||||
|
[65] = https://curl.se/bug/?i=13292
|
||||||
|
[66] = https://curl.se/bug/?i=13314
|
||||||
|
[67] = https://curl.se/bug/?i=13249
|
||||||
|
[68] = https://curl.se/bug/?i=13335
|
||||||
|
[69] = https://curl.se/bug/?i=13315
|
||||||
|
[70] = https://curl.se/bug/?i=13307
|
||||||
|
[71] = https://curl.se/bug/?i=13310
|
||||||
|
[72] = https://curl.se/bug/?i=13309
|
||||||
|
[73] = https://curl.se/bug/?i=13368
|
||||||
|
[74] = https://curl.se/bug/?i=13372
|
||||||
|
[75] = https://curl.se/bug/?i=13339
|
||||||
|
[76] = https://curl.se/bug/?i=13427
|
||||||
|
[77] = https://curl.se/bug/?i=13332
|
||||||
|
[78] = https://curl.se/bug/?i=13398
|
||||||
|
[79] = https://curl.se/bug/?i=13311
|
||||||
|
[80] = https://curl.se/bug/?i=13324
|
||||||
|
[81] = https://curl.se/bug/?i=13276
|
||||||
|
[82] = https://curl.se/bug/?i=13367
|
||||||
|
[83] = https://curl.se/bug/?i=13394
|
||||||
|
[84] = https://curl.se/bug/?i=13352
|
||||||
|
[85] = https://curl.se/bug/?i=13349
|
||||||
|
[86] = https://curl.se/bug/?i=13399
|
||||||
|
[87] = https://curl.se/bug/?i=13321
|
||||||
|
[88] = https://curl.se/bug/?i=13426
|
||||||
|
[89] = https://curl.se/bug/?i=13425
|
||||||
|
[90] = https://curl.se/bug/?i=13393
|
||||||
|
[91] = https://curl.se/bug/?i=13392
|
||||||
|
[92] = https://curl.se/bug/?i=13204
|
||||||
|
[93] = https://curl.se/bug/?i=13232
|
||||||
|
[94] = https://curl.se/bug/?i=13411
|
||||||
|
[95] = https://curl.se/bug/?i=13246
|
||||||
|
[96] = https://curl.se/bug/?i=13423
|
||||||
|
[97] = https://curl.se/bug/?i=13387
|
||||||
|
[98] = https://curl.se/bug/?i=13422
|
||||||
|
[99] = https://curl.se/bug/?i=13419
|
||||||
|
[100] = https://curl.se/bug/?i=13421
|
||||||
|
[101] = https://curl.se/bug/?i=13418
|
||||||
|
[102] = https://curl.se/bug/?i=13417
|
||||||
|
[103] = https://curl.se/bug/?i=13442
|
||||||
|
[104] = https://curl.se/bug/?i=13337
|
||||||
|
[105] = https://curl.se/bug/?i=13407
|
||||||
|
[106] = https://curl.se/bug/?i=13405
|
||||||
|
[107] = https://curl.se/bug/?i=13377
|
||||||
|
[108] = https://curl.se/bug/?i=13373
|
||||||
|
[109] = https://curl.se/bug/?i=11922
|
||||||
|
[110] = https://curl.se/bug/?i=13499
|
||||||
|
[111] = https://curl.se/bug/?i=13396
|
||||||
|
[112] = https://curl.se/bug/?i=13457
|
||||||
|
[113] = https://curl.se/bug/?i=13450
|
||||||
|
[114] = https://curl.se/bug/?i=12327
|
||||||
|
[115] = https://curl.se/bug/?i=13452
|
||||||
|
[116] = https://curl.se/bug/?i=13439
|
||||||
|
[117] = https://curl.se/bug/?i=13445
|
||||||
|
[118] = https://curl.se/bug/?i=13432
|
||||||
|
[119] = https://curl.se/bug/?i=13436
|
||||||
|
[120] = https://curl.se/bug/?i=13437
|
||||||
|
[121] = https://curl.se/bug/?i=13433
|
||||||
|
[122] = https://curl.se/bug/?i=13435
|
||||||
|
[123] = https://curl.se/bug/?i=13434
|
||||||
|
[124] = https://curl.se/bug/?i=13430
|
||||||
|
[125] = https://curl.se/bug/?i=13497
|
||||||
|
[126] = https://curl.se/bug/?i=13464
|
||||||
|
[127] = https://curl.se/bug/?i=13494
|
||||||
|
[128] = https://curl.se/bug/?i=13491
|
||||||
|
[129] = https://curl.se/bug/?i=13475
|
||||||
|
[130] = https://curl.se/bug/?i=13386
|
||||||
|
[131] = https://curl.se/bug/?i=13150
|
||||||
|
[132] = https://curl.se/bug/?i=13510
|
||||||
|
[133] = https://curl.se/bug/?i=13355
|
||||||
|
[134] = https://curl.se/bug/?i=13471
|
||||||
|
[135] = https://curl.se/bug/?i=13508
|
||||||
|
[136] = https://curl.se/bug/?i=13174
|
||||||
|
[137] = https://curl.se/bug/?i=13451
|
||||||
|
[138] = https://curl.se/bug/?i=13458
|
||||||
|
[139] = https://curl.se/bug/?i=13467
|
||||||
|
[140] = https://curl.se/bug/?i=13562
|
||||||
|
[141] = https://curl.se/bug/?i=13495
|
||||||
|
[142] = https://curl.se/bug/?i=13505
|
||||||
|
[143] = https://curl.se/bug/?i=13503
|
||||||
|
[144] = https://curl.se/bug/?i=13502
|
||||||
|
[145] = https://curl.se/bug/?i=13498
|
||||||
|
[146] = https://curl.se/bug/?i=13500
|
||||||
|
[147] = https://curl.se/bug/?i=13557
|
||||||
|
[148] = https://curl.se/bug/?i=13602
|
||||||
|
[149] = https://curl.se/bug/?i=13568
|
||||||
|
[150] = https://curl.se/bug/?i=13567
|
||||||
|
[151] = https://curl.se/bug/?i=13604
|
||||||
|
[152] = https://curl.se/bug/?i=13566
|
||||||
|
[153] = https://curl.se/bug/?i=13601
|
||||||
|
[154] = https://curl.se/bug/?i=13554
|
||||||
|
[155] = https://curl.se/bug/?i=13237
|
||||||
|
[156] = https://curl.se/bug/?i=13539
|
||||||
|
[157] = https://curl.se/bug/?i=13549
|
||||||
|
[158] = https://curl.se/bug/?i=13550
|
||||||
|
[159] = https://curl.se/bug/?i=13553
|
||||||
|
[160] = https://curl.se/bug/?i=13551
|
||||||
|
[161] = https://curl.se/bug/?i=13509
|
||||||
|
[162] = https://curl.se/bug/?i=13542
|
||||||
|
[163] = https://curl.se/bug/?i=13529
|
||||||
|
[164] = https://curl.se/bug/?i=13540
|
||||||
|
[165] = https://curl.se/bug/?i=13600
|
||||||
|
[166] = https://curl.se/bug/?i=13541
|
||||||
|
[167] = https://curl.se/bug/?i=13538
|
||||||
|
[168] = https://curl.se/bug/?i=13534
|
||||||
|
[169] = https://curl.se/bug/?i=13536
|
||||||
|
[170] = https://curl.se/bug/?i=13725
|
||||||
|
[171] = https://curl.se/bug/?i=13537
|
||||||
|
[172] = https://curl.se/bug/?i=13531
|
||||||
|
[173] = https://curl.se/bug/?i=13504
|
||||||
|
[174] = https://curl.se/bug/?i=13533
|
||||||
|
[175] = https://curl.se/bug/?i=13590
|
||||||
|
[176] = https://curl.se/bug/?i=13591
|
||||||
|
[177] = https://curl.se/bug/?i=13605
|
||||||
|
[178] = https://curl.se/bug/?i=13501
|
||||||
|
[179] = https://curl.se/bug/?i=13578
|
||||||
|
[180] = https://curl.se/bug/?i=13574
|
||||||
|
[181] = https://curl.se/bug/?i=13580
|
||||||
|
[182] = https://curl.se/bug/?i=13582
|
||||||
|
[183] = https://curl.se/bug/?i=13588
|
||||||
|
[184] = https://curl.se/bug/?i=13584
|
||||||
|
[185] = https://curl.se/bug/?i=13586
|
||||||
|
[186] = https://curl.se/bug/?i=13579
|
||||||
|
[187] = https://curl.se/bug/?i=13664
|
||||||
|
[188] = https://curl.se/bug/?i=13577
|
||||||
|
[189] = https://curl.se/bug/?i=13576
|
||||||
|
[190] = https://curl.se/bug/?i=13581
|
||||||
|
[191] = https://curl.se/bug/?i=13728
|
||||||
|
[192] = https://curl.se/bug/?i=13672
|
||||||
|
[193] = https://curl.se/bug/?i=13654
|
||||||
|
[194] = https://curl.se/bug/?i=13628
|
||||||
|
[195] = https://curl.se/bug/?i=13655
|
||||||
|
[196] = https://curl.se/bug/?i=13710
|
||||||
|
[197] = https://curl.se/bug/?i=13707
|
||||||
|
[198] = https://curl.se/bug/?i=13729
|
||||||
|
[199] = https://curl.se/bug/?i=13544
|
||||||
|
[200] = https://curl.se/bug/?i=13733
|
||||||
|
[201] = https://curl.se/bug/?i=13583
|
||||||
|
[203] = https://curl.se/bug/?i=13697
|
||||||
|
[206] = https://curl.se/bug/?i=13661
|
||||||
|
[207] = https://curl.se/bug/?i=13686
|
||||||
|
[208] = https://curl.se/bug/?i=13684
|
||||||
|
[209] = https://curl.se/bug/?i=13639
|
||||||
|
[210] = https://curl.se/bug/?i=13560
|
||||||
|
[211] = https://curl.se/bug/?i=13638
|
||||||
|
[212] = https://curl.se/bug/?i=13603
|
||||||
|
[213] = https://curl.se/bug/?i=13611
|
||||||
|
[214] = https://curl.se/bug/?i=13634
|
||||||
|
[215] = https://curl.se/bug/?i=13681
|
||||||
|
[216] = https://curl.se/bug/?i=13624
|
||||||
|
[217] = https://curl.se/bug/?i=13614
|
||||||
|
[218] = https://curl.se/bug/?i=13621
|
||||||
|
[219] = https://curl.se/bug/?i=13619
|
||||||
|
[220] = https://curl.se/bug/?i=13610
|
||||||
|
[221] = https://curl.se/bug/?i=13608
|
||||||
|
[222] = https://curl.se/bug/?i=13643
|
||||||
|
[223] = https://curl.se/bug/?i=13618
|
||||||
|
[224] = https://curl.se/bug/?i=13676
|
||||||
|
[225] = https://curl.se/bug/?i=13679
|
||||||
|
[226] = https://curl.se/bug/?i=13487
|
||||||
|
[227] = https://curl.se/bug/?i=13668
|
||||||
|
[229] = https://curl.se/bug/?i=13666
|
||||||
|
[230] = https://curl.se/bug/?i=13670
|
1663
src/dependencies/curl-8.8.0/acinclude.m4
Normal file
1663
src/dependencies/curl-8.8.0/acinclude.m4
Normal file
File diff suppressed because it is too large
Load diff
1252
src/dependencies/curl-8.8.0/aclocal.m4
vendored
Normal file
1252
src/dependencies/curl-8.8.0/aclocal.m4
vendored
Normal file
File diff suppressed because it is too large
Load diff
348
src/dependencies/curl-8.8.0/compile
Executable file
348
src/dependencies/curl-8.8.0/compile
Executable file
|
@ -0,0 +1,348 @@
|
||||||
|
#! /bin/sh
|
||||||
|
# Wrapper for compilers which do not understand '-c -o'.
|
||||||
|
|
||||||
|
scriptversion=2018-03-07.03; # UTC
|
||||||
|
|
||||||
|
# Copyright (C) 1999-2021 Free Software Foundation, Inc.
|
||||||
|
# Written by Tom Tromey <tromey@cygnus.com>.
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation; either version 2, or (at your option)
|
||||||
|
# any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
# As a special exception to the GNU General Public License, if you
|
||||||
|
# distribute this file as part of a program that contains a
|
||||||
|
# configuration script generated by Autoconf, you may include it under
|
||||||
|
# the same distribution terms that you use for the rest of that program.
|
||||||
|
|
||||||
|
# This file is maintained in Automake, please report
|
||||||
|
# bugs to <bug-automake@gnu.org> or send patches to
|
||||||
|
# <automake-patches@gnu.org>.
|
||||||
|
|
||||||
|
nl='
|
||||||
|
'
|
||||||
|
|
||||||
|
# We need space, tab and new line, in precisely that order. Quoting is
|
||||||
|
# there to prevent tools from complaining about whitespace usage.
|
||||||
|
IFS=" "" $nl"
|
||||||
|
|
||||||
|
file_conv=
|
||||||
|
|
||||||
|
# func_file_conv build_file lazy
|
||||||
|
# Convert a $build file to $host form and store it in $file
|
||||||
|
# Currently only supports Windows hosts. If the determined conversion
|
||||||
|
# type is listed in (the comma separated) LAZY, no conversion will
|
||||||
|
# take place.
|
||||||
|
func_file_conv ()
|
||||||
|
{
|
||||||
|
file=$1
|
||||||
|
case $file in
|
||||||
|
/ | /[!/]*) # absolute file, and not a UNC file
|
||||||
|
if test -z "$file_conv"; then
|
||||||
|
# lazily determine how to convert abs files
|
||||||
|
case `uname -s` in
|
||||||
|
MINGW*)
|
||||||
|
file_conv=mingw
|
||||||
|
;;
|
||||||
|
CYGWIN* | MSYS*)
|
||||||
|
file_conv=cygwin
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
file_conv=wine
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
fi
|
||||||
|
case $file_conv/,$2, in
|
||||||
|
*,$file_conv,*)
|
||||||
|
;;
|
||||||
|
mingw/*)
|
||||||
|
file=`cmd //C echo "$file " | sed -e 's/"\(.*\) " *$/\1/'`
|
||||||
|
;;
|
||||||
|
cygwin/* | msys/*)
|
||||||
|
file=`cygpath -m "$file" || echo "$file"`
|
||||||
|
;;
|
||||||
|
wine/*)
|
||||||
|
file=`winepath -w "$file" || echo "$file"`
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
}
|
||||||
|
|
||||||
|
# func_cl_dashL linkdir
|
||||||
|
# Make cl look for libraries in LINKDIR
|
||||||
|
func_cl_dashL ()
|
||||||
|
{
|
||||||
|
func_file_conv "$1"
|
||||||
|
if test -z "$lib_path"; then
|
||||||
|
lib_path=$file
|
||||||
|
else
|
||||||
|
lib_path="$lib_path;$file"
|
||||||
|
fi
|
||||||
|
linker_opts="$linker_opts -LIBPATH:$file"
|
||||||
|
}
|
||||||
|
|
||||||
|
# func_cl_dashl library
|
||||||
|
# Do a library search-path lookup for cl
|
||||||
|
func_cl_dashl ()
|
||||||
|
{
|
||||||
|
lib=$1
|
||||||
|
found=no
|
||||||
|
save_IFS=$IFS
|
||||||
|
IFS=';'
|
||||||
|
for dir in $lib_path $LIB
|
||||||
|
do
|
||||||
|
IFS=$save_IFS
|
||||||
|
if $shared && test -f "$dir/$lib.dll.lib"; then
|
||||||
|
found=yes
|
||||||
|
lib=$dir/$lib.dll.lib
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
if test -f "$dir/$lib.lib"; then
|
||||||
|
found=yes
|
||||||
|
lib=$dir/$lib.lib
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
if test -f "$dir/lib$lib.a"; then
|
||||||
|
found=yes
|
||||||
|
lib=$dir/lib$lib.a
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
IFS=$save_IFS
|
||||||
|
|
||||||
|
if test "$found" != yes; then
|
||||||
|
lib=$lib.lib
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# func_cl_wrapper cl arg...
|
||||||
|
# Adjust compile command to suit cl
|
||||||
|
func_cl_wrapper ()
|
||||||
|
{
|
||||||
|
# Assume a capable shell
|
||||||
|
lib_path=
|
||||||
|
shared=:
|
||||||
|
linker_opts=
|
||||||
|
for arg
|
||||||
|
do
|
||||||
|
if test -n "$eat"; then
|
||||||
|
eat=
|
||||||
|
else
|
||||||
|
case $1 in
|
||||||
|
-o)
|
||||||
|
# configure might choose to run compile as 'compile cc -o foo foo.c'.
|
||||||
|
eat=1
|
||||||
|
case $2 in
|
||||||
|
*.o | *.[oO][bB][jJ])
|
||||||
|
func_file_conv "$2"
|
||||||
|
set x "$@" -Fo"$file"
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
func_file_conv "$2"
|
||||||
|
set x "$@" -Fe"$file"
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
;;
|
||||||
|
-I)
|
||||||
|
eat=1
|
||||||
|
func_file_conv "$2" mingw
|
||||||
|
set x "$@" -I"$file"
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
-I*)
|
||||||
|
func_file_conv "${1#-I}" mingw
|
||||||
|
set x "$@" -I"$file"
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
-l)
|
||||||
|
eat=1
|
||||||
|
func_cl_dashl "$2"
|
||||||
|
set x "$@" "$lib"
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
-l*)
|
||||||
|
func_cl_dashl "${1#-l}"
|
||||||
|
set x "$@" "$lib"
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
-L)
|
||||||
|
eat=1
|
||||||
|
func_cl_dashL "$2"
|
||||||
|
;;
|
||||||
|
-L*)
|
||||||
|
func_cl_dashL "${1#-L}"
|
||||||
|
;;
|
||||||
|
-static)
|
||||||
|
shared=false
|
||||||
|
;;
|
||||||
|
-Wl,*)
|
||||||
|
arg=${1#-Wl,}
|
||||||
|
save_ifs="$IFS"; IFS=','
|
||||||
|
for flag in $arg; do
|
||||||
|
IFS="$save_ifs"
|
||||||
|
linker_opts="$linker_opts $flag"
|
||||||
|
done
|
||||||
|
IFS="$save_ifs"
|
||||||
|
;;
|
||||||
|
-Xlinker)
|
||||||
|
eat=1
|
||||||
|
linker_opts="$linker_opts $2"
|
||||||
|
;;
|
||||||
|
-*)
|
||||||
|
set x "$@" "$1"
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
*.cc | *.CC | *.cxx | *.CXX | *.[cC]++)
|
||||||
|
func_file_conv "$1"
|
||||||
|
set x "$@" -Tp"$file"
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
*.c | *.cpp | *.CPP | *.lib | *.LIB | *.Lib | *.OBJ | *.obj | *.[oO])
|
||||||
|
func_file_conv "$1" mingw
|
||||||
|
set x "$@" "$file"
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
set x "$@" "$1"
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
fi
|
||||||
|
shift
|
||||||
|
done
|
||||||
|
if test -n "$linker_opts"; then
|
||||||
|
linker_opts="-link$linker_opts"
|
||||||
|
fi
|
||||||
|
exec "$@" $linker_opts
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
eat=
|
||||||
|
|
||||||
|
case $1 in
|
||||||
|
'')
|
||||||
|
echo "$0: No command. Try '$0 --help' for more information." 1>&2
|
||||||
|
exit 1;
|
||||||
|
;;
|
||||||
|
-h | --h*)
|
||||||
|
cat <<\EOF
|
||||||
|
Usage: compile [--help] [--version] PROGRAM [ARGS]
|
||||||
|
|
||||||
|
Wrapper for compilers which do not understand '-c -o'.
|
||||||
|
Remove '-o dest.o' from ARGS, run PROGRAM with the remaining
|
||||||
|
arguments, and rename the output as expected.
|
||||||
|
|
||||||
|
If you are trying to build a whole package this is not the
|
||||||
|
right script to run: please start by reading the file 'INSTALL'.
|
||||||
|
|
||||||
|
Report bugs to <bug-automake@gnu.org>.
|
||||||
|
EOF
|
||||||
|
exit $?
|
||||||
|
;;
|
||||||
|
-v | --v*)
|
||||||
|
echo "compile $scriptversion"
|
||||||
|
exit $?
|
||||||
|
;;
|
||||||
|
cl | *[/\\]cl | cl.exe | *[/\\]cl.exe | \
|
||||||
|
icl | *[/\\]icl | icl.exe | *[/\\]icl.exe )
|
||||||
|
func_cl_wrapper "$@" # Doesn't return...
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
ofile=
|
||||||
|
cfile=
|
||||||
|
|
||||||
|
for arg
|
||||||
|
do
|
||||||
|
if test -n "$eat"; then
|
||||||
|
eat=
|
||||||
|
else
|
||||||
|
case $1 in
|
||||||
|
-o)
|
||||||
|
# configure might choose to run compile as 'compile cc -o foo foo.c'.
|
||||||
|
# So we strip '-o arg' only if arg is an object.
|
||||||
|
eat=1
|
||||||
|
case $2 in
|
||||||
|
*.o | *.obj)
|
||||||
|
ofile=$2
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
set x "$@" -o "$2"
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
;;
|
||||||
|
*.c)
|
||||||
|
cfile=$1
|
||||||
|
set x "$@" "$1"
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
set x "$@" "$1"
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
fi
|
||||||
|
shift
|
||||||
|
done
|
||||||
|
|
||||||
|
if test -z "$ofile" || test -z "$cfile"; then
|
||||||
|
# If no '-o' option was seen then we might have been invoked from a
|
||||||
|
# pattern rule where we don't need one. That is ok -- this is a
|
||||||
|
# normal compilation that the losing compiler can handle. If no
|
||||||
|
# '.c' file was seen then we are probably linking. That is also
|
||||||
|
# ok.
|
||||||
|
exec "$@"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Name of file we expect compiler to create.
|
||||||
|
cofile=`echo "$cfile" | sed 's|^.*[\\/]||; s|^[a-zA-Z]:||; s/\.c$/.o/'`
|
||||||
|
|
||||||
|
# Create the lock directory.
|
||||||
|
# Note: use '[/\\:.-]' here to ensure that we don't use the same name
|
||||||
|
# that we are using for the .o file. Also, base the name on the expected
|
||||||
|
# object file name, since that is what matters with a parallel build.
|
||||||
|
lockdir=`echo "$cofile" | sed -e 's|[/\\:.-]|_|g'`.d
|
||||||
|
while true; do
|
||||||
|
if mkdir "$lockdir" >/dev/null 2>&1; then
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
sleep 1
|
||||||
|
done
|
||||||
|
# FIXME: race condition here if user kills between mkdir and trap.
|
||||||
|
trap "rmdir '$lockdir'; exit 1" 1 2 15
|
||||||
|
|
||||||
|
# Run the compile.
|
||||||
|
"$@"
|
||||||
|
ret=$?
|
||||||
|
|
||||||
|
if test -f "$cofile"; then
|
||||||
|
test "$cofile" = "$ofile" || mv "$cofile" "$ofile"
|
||||||
|
elif test -f "${cofile}bj"; then
|
||||||
|
test "${cofile}bj" = "$ofile" || mv "${cofile}bj" "$ofile"
|
||||||
|
fi
|
||||||
|
|
||||||
|
rmdir "$lockdir"
|
||||||
|
exit $ret
|
||||||
|
|
||||||
|
# Local Variables:
|
||||||
|
# mode: shell-script
|
||||||
|
# sh-indentation: 2
|
||||||
|
# eval: (add-hook 'before-save-hook 'time-stamp)
|
||||||
|
# time-stamp-start: "scriptversion="
|
||||||
|
# time-stamp-format: "%:y-%02m-%02d.%02H"
|
||||||
|
# time-stamp-time-zone: "UTC0"
|
||||||
|
# time-stamp-end: "; # UTC"
|
||||||
|
# End:
|
1754
src/dependencies/curl-8.8.0/config.guess
vendored
Executable file
1754
src/dependencies/curl-8.8.0/config.guess
vendored
Executable file
File diff suppressed because it is too large
Load diff
1890
src/dependencies/curl-8.8.0/config.sub
vendored
Executable file
1890
src/dependencies/curl-8.8.0/config.sub
vendored
Executable file
File diff suppressed because it is too large
Load diff
49133
src/dependencies/curl-8.8.0/configure
vendored
Executable file
49133
src/dependencies/curl-8.8.0/configure
vendored
Executable file
File diff suppressed because it is too large
Load diff
5051
src/dependencies/curl-8.8.0/configure.ac
Normal file
5051
src/dependencies/curl-8.8.0/configure.ac
Normal file
File diff suppressed because it is too large
Load diff
193
src/dependencies/curl-8.8.0/curl-config.in
Normal file
193
src/dependencies/curl-8.8.0/curl-config.in
Normal file
|
@ -0,0 +1,193 @@
|
||||||
|
#!/bin/sh
|
||||||
|
#***************************************************************************
|
||||||
|
# _ _ ____ _
|
||||||
|
# Project ___| | | | _ \| |
|
||||||
|
# / __| | | | |_) | |
|
||||||
|
# | (__| |_| | _ <| |___
|
||||||
|
# \___|\___/|_| \_\_____|
|
||||||
|
#
|
||||||
|
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
#
|
||||||
|
# This software is licensed as described in the file COPYING, which
|
||||||
|
# you should have received as part of this distribution. The terms
|
||||||
|
# are also available at https://curl.se/docs/copyright.html.
|
||||||
|
#
|
||||||
|
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
|
||||||
|
# copies of the Software, and permit persons to whom the Software is
|
||||||
|
# furnished to do so, under the terms of the COPYING file.
|
||||||
|
#
|
||||||
|
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
|
||||||
|
# KIND, either express or implied.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: curl
|
||||||
|
#
|
||||||
|
###########################################################################
|
||||||
|
|
||||||
|
prefix="@prefix@"
|
||||||
|
# Used in @libdir@
|
||||||
|
# shellcheck disable=SC2034
|
||||||
|
exec_prefix=@exec_prefix@
|
||||||
|
# shellcheck disable=SC2034
|
||||||
|
includedir=@includedir@
|
||||||
|
cppflag_curl_staticlib=@CPPFLAG_CURL_STATICLIB@
|
||||||
|
|
||||||
|
usage()
|
||||||
|
{
|
||||||
|
cat <<EOF
|
||||||
|
Usage: curl-config [OPTION]
|
||||||
|
|
||||||
|
Available values for OPTION include:
|
||||||
|
|
||||||
|
--built-shared says 'yes' if libcurl was built shared
|
||||||
|
--ca CA bundle install path
|
||||||
|
--cc compiler
|
||||||
|
--cflags preprocessor and compiler flags
|
||||||
|
--checkfor [version] check for (lib)curl of the specified version
|
||||||
|
--configure the arguments given to configure when building curl
|
||||||
|
--features newline separated list of enabled features
|
||||||
|
--help display this help and exit
|
||||||
|
--libs library linking information
|
||||||
|
--prefix curl install prefix
|
||||||
|
--protocols newline separated list of enabled protocols
|
||||||
|
--ssl-backends output the SSL backends libcurl was built to support
|
||||||
|
--static-libs static libcurl library linking information
|
||||||
|
--version output version information
|
||||||
|
--vernum output version as a hexadecimal number
|
||||||
|
EOF
|
||||||
|
|
||||||
|
exit "$1"
|
||||||
|
}
|
||||||
|
|
||||||
|
if test "$#" -eq 0; then
|
||||||
|
usage 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
while test "$#" -gt 0; do
|
||||||
|
case "$1" in
|
||||||
|
--built-shared)
|
||||||
|
echo '@ENABLE_SHARED@'
|
||||||
|
;;
|
||||||
|
|
||||||
|
--ca)
|
||||||
|
echo '@CURL_CA_BUNDLE@'
|
||||||
|
;;
|
||||||
|
|
||||||
|
--cc)
|
||||||
|
echo '@CC@'
|
||||||
|
;;
|
||||||
|
|
||||||
|
--prefix)
|
||||||
|
echo "$prefix"
|
||||||
|
;;
|
||||||
|
|
||||||
|
--feature|--features)
|
||||||
|
for feature in @SUPPORT_FEATURES@ ""; do
|
||||||
|
test -n "$feature" && echo "$feature"
|
||||||
|
done
|
||||||
|
;;
|
||||||
|
|
||||||
|
--protocols)
|
||||||
|
# shellcheck disable=SC2043
|
||||||
|
for protocol in @SUPPORT_PROTOCOLS@; do
|
||||||
|
echo "$protocol"
|
||||||
|
done
|
||||||
|
;;
|
||||||
|
|
||||||
|
--version)
|
||||||
|
echo 'libcurl @CURLVERSION@'
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
|
||||||
|
--checkfor)
|
||||||
|
checkfor=$2
|
||||||
|
cmajor=$(echo "$checkfor" | cut -d. -f1)
|
||||||
|
cminor=$(echo "$checkfor" | cut -d. -f2)
|
||||||
|
# when extracting the patch part we strip off everything after a
|
||||||
|
# dash as that's used for things like version 1.2.3-pre1
|
||||||
|
cpatch=$(echo "$checkfor" | cut -d. -f3 | cut -d- -f1)
|
||||||
|
|
||||||
|
vmajor=$(echo '@CURLVERSION@' | cut -d. -f1)
|
||||||
|
vminor=$(echo '@CURLVERSION@' | cut -d. -f2)
|
||||||
|
# when extracting the patch part we strip off everything after a
|
||||||
|
# dash as that's used for things like version 1.2.3-pre1
|
||||||
|
vpatch=$(echo '@CURLVERSION@' | cut -d. -f3 | cut -d- -f1)
|
||||||
|
|
||||||
|
if test "$vmajor" -gt "$cmajor"; then
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
if test "$vmajor" -eq "$cmajor"; then
|
||||||
|
if test "$vminor" -gt "$cminor"; then
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
if test "$vminor" -eq "$cminor"; then
|
||||||
|
if test "$cpatch" -le "$vpatch"; then
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "requested version $checkfor is newer than existing @CURLVERSION@"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
|
||||||
|
--vernum)
|
||||||
|
echo '@VERSIONNUM@'
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
|
||||||
|
--help)
|
||||||
|
usage 0
|
||||||
|
;;
|
||||||
|
|
||||||
|
--cflags)
|
||||||
|
if test "X$cppflag_curl_staticlib" = "X-DCURL_STATICLIB"; then
|
||||||
|
CPPFLAG_CURL_STATICLIB="-DCURL_STATICLIB "
|
||||||
|
else
|
||||||
|
CPPFLAG_CURL_STATICLIB=""
|
||||||
|
fi
|
||||||
|
if test "X@includedir@" = "X/usr/include"; then
|
||||||
|
echo "${CPPFLAG_CURL_STATICLIB}"
|
||||||
|
else
|
||||||
|
echo "${CPPFLAG_CURL_STATICLIB}-I@includedir@"
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
|
||||||
|
--libs)
|
||||||
|
if test "X@libdir@" != "X/usr/lib" -a "X@libdir@" != "X/usr/lib64"; then
|
||||||
|
CURLLIBDIR="-L@libdir@ "
|
||||||
|
else
|
||||||
|
CURLLIBDIR=""
|
||||||
|
fi
|
||||||
|
if test "X@ENABLE_SHARED@" = "Xno"; then
|
||||||
|
echo "${CURLLIBDIR}-lcurl @LIBCURL_LIBS@"
|
||||||
|
else
|
||||||
|
echo "${CURLLIBDIR}-lcurl"
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
|
||||||
|
--ssl-backends)
|
||||||
|
echo '@SSL_BACKENDS@'
|
||||||
|
;;
|
||||||
|
|
||||||
|
--static-libs)
|
||||||
|
if test "X@ENABLE_STATIC@" != "Xno" ; then
|
||||||
|
echo "@libdir@/libcurl.@libext@" @LDFLAGS@ @LIBCURL_LIBS@
|
||||||
|
else
|
||||||
|
echo 'curl was built with static libraries disabled' >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
|
||||||
|
--configure)
|
||||||
|
echo @CONFIGURE_OPTIONS@
|
||||||
|
;;
|
||||||
|
|
||||||
|
*)
|
||||||
|
echo "unknown option: $1"
|
||||||
|
usage 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
shift
|
||||||
|
done
|
||||||
|
|
||||||
|
exit 0
|
791
src/dependencies/curl-8.8.0/depcomp
Executable file
791
src/dependencies/curl-8.8.0/depcomp
Executable file
|
@ -0,0 +1,791 @@
|
||||||
|
#! /bin/sh
|
||||||
|
# depcomp - compile a program generating dependencies as side-effects
|
||||||
|
|
||||||
|
scriptversion=2018-03-07.03; # UTC
|
||||||
|
|
||||||
|
# Copyright (C) 1999-2021 Free Software Foundation, Inc.
|
||||||
|
|
||||||
|
# This program is free software; you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation; either version 2, or (at your option)
|
||||||
|
# any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
# As a special exception to the GNU General Public License, if you
|
||||||
|
# distribute this file as part of a program that contains a
|
||||||
|
# configuration script generated by Autoconf, you may include it under
|
||||||
|
# the same distribution terms that you use for the rest of that program.
|
||||||
|
|
||||||
|
# Originally written by Alexandre Oliva <oliva@dcc.unicamp.br>.
|
||||||
|
|
||||||
|
case $1 in
|
||||||
|
'')
|
||||||
|
echo "$0: No command. Try '$0 --help' for more information." 1>&2
|
||||||
|
exit 1;
|
||||||
|
;;
|
||||||
|
-h | --h*)
|
||||||
|
cat <<\EOF
|
||||||
|
Usage: depcomp [--help] [--version] PROGRAM [ARGS]
|
||||||
|
|
||||||
|
Run PROGRAMS ARGS to compile a file, generating dependencies
|
||||||
|
as side-effects.
|
||||||
|
|
||||||
|
Environment variables:
|
||||||
|
depmode Dependency tracking mode.
|
||||||
|
source Source file read by 'PROGRAMS ARGS'.
|
||||||
|
object Object file output by 'PROGRAMS ARGS'.
|
||||||
|
DEPDIR directory where to store dependencies.
|
||||||
|
depfile Dependency file to output.
|
||||||
|
tmpdepfile Temporary file to use when outputting dependencies.
|
||||||
|
libtool Whether libtool is used (yes/no).
|
||||||
|
|
||||||
|
Report bugs to <bug-automake@gnu.org>.
|
||||||
|
EOF
|
||||||
|
exit $?
|
||||||
|
;;
|
||||||
|
-v | --v*)
|
||||||
|
echo "depcomp $scriptversion"
|
||||||
|
exit $?
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
# Get the directory component of the given path, and save it in the
|
||||||
|
# global variables '$dir'. Note that this directory component will
|
||||||
|
# be either empty or ending with a '/' character. This is deliberate.
|
||||||
|
set_dir_from ()
|
||||||
|
{
|
||||||
|
case $1 in
|
||||||
|
*/*) dir=`echo "$1" | sed -e 's|/[^/]*$|/|'`;;
|
||||||
|
*) dir=;;
|
||||||
|
esac
|
||||||
|
}
|
||||||
|
|
||||||
|
# Get the suffix-stripped basename of the given path, and save it the
|
||||||
|
# global variable '$base'.
|
||||||
|
set_base_from ()
|
||||||
|
{
|
||||||
|
base=`echo "$1" | sed -e 's|^.*/||' -e 's/\.[^.]*$//'`
|
||||||
|
}
|
||||||
|
|
||||||
|
# If no dependency file was actually created by the compiler invocation,
|
||||||
|
# we still have to create a dummy depfile, to avoid errors with the
|
||||||
|
# Makefile "include basename.Plo" scheme.
|
||||||
|
make_dummy_depfile ()
|
||||||
|
{
|
||||||
|
echo "#dummy" > "$depfile"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Factor out some common post-processing of the generated depfile.
|
||||||
|
# Requires the auxiliary global variable '$tmpdepfile' to be set.
|
||||||
|
aix_post_process_depfile ()
|
||||||
|
{
|
||||||
|
# If the compiler actually managed to produce a dependency file,
|
||||||
|
# post-process it.
|
||||||
|
if test -f "$tmpdepfile"; then
|
||||||
|
# Each line is of the form 'foo.o: dependency.h'.
|
||||||
|
# Do two passes, one to just change these to
|
||||||
|
# $object: dependency.h
|
||||||
|
# and one to simply output
|
||||||
|
# dependency.h:
|
||||||
|
# which is needed to avoid the deleted-header problem.
|
||||||
|
{ sed -e "s,^.*\.[$lower]*:,$object:," < "$tmpdepfile"
|
||||||
|
sed -e "s,^.*\.[$lower]*:[$tab ]*,," -e 's,$,:,' < "$tmpdepfile"
|
||||||
|
} > "$depfile"
|
||||||
|
rm -f "$tmpdepfile"
|
||||||
|
else
|
||||||
|
make_dummy_depfile
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# A tabulation character.
|
||||||
|
tab=' '
|
||||||
|
# A newline character.
|
||||||
|
nl='
|
||||||
|
'
|
||||||
|
# Character ranges might be problematic outside the C locale.
|
||||||
|
# These definitions help.
|
||||||
|
upper=ABCDEFGHIJKLMNOPQRSTUVWXYZ
|
||||||
|
lower=abcdefghijklmnopqrstuvwxyz
|
||||||
|
digits=0123456789
|
||||||
|
alpha=${upper}${lower}
|
||||||
|
|
||||||
|
if test -z "$depmode" || test -z "$source" || test -z "$object"; then
|
||||||
|
echo "depcomp: Variables source, object and depmode must be set" 1>&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Dependencies for sub/bar.o or sub/bar.obj go into sub/.deps/bar.Po.
|
||||||
|
depfile=${depfile-`echo "$object" |
|
||||||
|
sed 's|[^\\/]*$|'${DEPDIR-.deps}'/&|;s|\.\([^.]*\)$|.P\1|;s|Pobj$|Po|'`}
|
||||||
|
tmpdepfile=${tmpdepfile-`echo "$depfile" | sed 's/\.\([^.]*\)$/.T\1/'`}
|
||||||
|
|
||||||
|
rm -f "$tmpdepfile"
|
||||||
|
|
||||||
|
# Avoid interferences from the environment.
|
||||||
|
gccflag= dashmflag=
|
||||||
|
|
||||||
|
# Some modes work just like other modes, but use different flags. We
|
||||||
|
# parameterize here, but still list the modes in the big case below,
|
||||||
|
# to make depend.m4 easier to write. Note that we *cannot* use a case
|
||||||
|
# here, because this file can only contain one case statement.
|
||||||
|
if test "$depmode" = hp; then
|
||||||
|
# HP compiler uses -M and no extra arg.
|
||||||
|
gccflag=-M
|
||||||
|
depmode=gcc
|
||||||
|
fi
|
||||||
|
|
||||||
|
if test "$depmode" = dashXmstdout; then
|
||||||
|
# This is just like dashmstdout with a different argument.
|
||||||
|
dashmflag=-xM
|
||||||
|
depmode=dashmstdout
|
||||||
|
fi
|
||||||
|
|
||||||
|
cygpath_u="cygpath -u -f -"
|
||||||
|
if test "$depmode" = msvcmsys; then
|
||||||
|
# This is just like msvisualcpp but w/o cygpath translation.
|
||||||
|
# Just convert the backslash-escaped backslashes to single forward
|
||||||
|
# slashes to satisfy depend.m4
|
||||||
|
cygpath_u='sed s,\\\\,/,g'
|
||||||
|
depmode=msvisualcpp
|
||||||
|
fi
|
||||||
|
|
||||||
|
if test "$depmode" = msvc7msys; then
|
||||||
|
# This is just like msvc7 but w/o cygpath translation.
|
||||||
|
# Just convert the backslash-escaped backslashes to single forward
|
||||||
|
# slashes to satisfy depend.m4
|
||||||
|
cygpath_u='sed s,\\\\,/,g'
|
||||||
|
depmode=msvc7
|
||||||
|
fi
|
||||||
|
|
||||||
|
if test "$depmode" = xlc; then
|
||||||
|
# IBM C/C++ Compilers xlc/xlC can output gcc-like dependency information.
|
||||||
|
gccflag=-qmakedep=gcc,-MF
|
||||||
|
depmode=gcc
|
||||||
|
fi
|
||||||
|
|
||||||
|
case "$depmode" in
|
||||||
|
gcc3)
|
||||||
|
## gcc 3 implements dependency tracking that does exactly what
|
||||||
|
## we want. Yay! Note: for some reason libtool 1.4 doesn't like
|
||||||
|
## it if -MD -MP comes after the -MF stuff. Hmm.
|
||||||
|
## Unfortunately, FreeBSD c89 acceptance of flags depends upon
|
||||||
|
## the command line argument order; so add the flags where they
|
||||||
|
## appear in depend2.am. Note that the slowdown incurred here
|
||||||
|
## affects only configure: in makefiles, %FASTDEP% shortcuts this.
|
||||||
|
for arg
|
||||||
|
do
|
||||||
|
case $arg in
|
||||||
|
-c) set fnord "$@" -MT "$object" -MD -MP -MF "$tmpdepfile" "$arg" ;;
|
||||||
|
*) set fnord "$@" "$arg" ;;
|
||||||
|
esac
|
||||||
|
shift # fnord
|
||||||
|
shift # $arg
|
||||||
|
done
|
||||||
|
"$@"
|
||||||
|
stat=$?
|
||||||
|
if test $stat -ne 0; then
|
||||||
|
rm -f "$tmpdepfile"
|
||||||
|
exit $stat
|
||||||
|
fi
|
||||||
|
mv "$tmpdepfile" "$depfile"
|
||||||
|
;;
|
||||||
|
|
||||||
|
gcc)
|
||||||
|
## Note that this doesn't just cater to obsosete pre-3.x GCC compilers.
|
||||||
|
## but also to in-use compilers like IMB xlc/xlC and the HP C compiler.
|
||||||
|
## (see the conditional assignment to $gccflag above).
|
||||||
|
## There are various ways to get dependency output from gcc. Here's
|
||||||
|
## why we pick this rather obscure method:
|
||||||
|
## - Don't want to use -MD because we'd like the dependencies to end
|
||||||
|
## up in a subdir. Having to rename by hand is ugly.
|
||||||
|
## (We might end up doing this anyway to support other compilers.)
|
||||||
|
## - The DEPENDENCIES_OUTPUT environment variable makes gcc act like
|
||||||
|
## -MM, not -M (despite what the docs say). Also, it might not be
|
||||||
|
## supported by the other compilers which use the 'gcc' depmode.
|
||||||
|
## - Using -M directly means running the compiler twice (even worse
|
||||||
|
## than renaming).
|
||||||
|
if test -z "$gccflag"; then
|
||||||
|
gccflag=-MD,
|
||||||
|
fi
|
||||||
|
"$@" -Wp,"$gccflag$tmpdepfile"
|
||||||
|
stat=$?
|
||||||
|
if test $stat -ne 0; then
|
||||||
|
rm -f "$tmpdepfile"
|
||||||
|
exit $stat
|
||||||
|
fi
|
||||||
|
rm -f "$depfile"
|
||||||
|
echo "$object : \\" > "$depfile"
|
||||||
|
# The second -e expression handles DOS-style file names with drive
|
||||||
|
# letters.
|
||||||
|
sed -e 's/^[^:]*: / /' \
|
||||||
|
-e 's/^['$alpha']:\/[^:]*: / /' < "$tmpdepfile" >> "$depfile"
|
||||||
|
## This next piece of magic avoids the "deleted header file" problem.
|
||||||
|
## The problem is that when a header file which appears in a .P file
|
||||||
|
## is deleted, the dependency causes make to die (because there is
|
||||||
|
## typically no way to rebuild the header). We avoid this by adding
|
||||||
|
## dummy dependencies for each header file. Too bad gcc doesn't do
|
||||||
|
## this for us directly.
|
||||||
|
## Some versions of gcc put a space before the ':'. On the theory
|
||||||
|
## that the space means something, we add a space to the output as
|
||||||
|
## well. hp depmode also adds that space, but also prefixes the VPATH
|
||||||
|
## to the object. Take care to not repeat it in the output.
|
||||||
|
## Some versions of the HPUX 10.20 sed can't process this invocation
|
||||||
|
## correctly. Breaking it into two sed invocations is a workaround.
|
||||||
|
tr ' ' "$nl" < "$tmpdepfile" \
|
||||||
|
| sed -e 's/^\\$//' -e '/^$/d' -e "s|.*$object$||" -e '/:$/d' \
|
||||||
|
| sed -e 's/$/ :/' >> "$depfile"
|
||||||
|
rm -f "$tmpdepfile"
|
||||||
|
;;
|
||||||
|
|
||||||
|
hp)
|
||||||
|
# This case exists only to let depend.m4 do its work. It works by
|
||||||
|
# looking at the text of this script. This case will never be run,
|
||||||
|
# since it is checked for above.
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
|
||||||
|
sgi)
|
||||||
|
if test "$libtool" = yes; then
|
||||||
|
"$@" "-Wp,-MDupdate,$tmpdepfile"
|
||||||
|
else
|
||||||
|
"$@" -MDupdate "$tmpdepfile"
|
||||||
|
fi
|
||||||
|
stat=$?
|
||||||
|
if test $stat -ne 0; then
|
||||||
|
rm -f "$tmpdepfile"
|
||||||
|
exit $stat
|
||||||
|
fi
|
||||||
|
rm -f "$depfile"
|
||||||
|
|
||||||
|
if test -f "$tmpdepfile"; then # yes, the sourcefile depend on other files
|
||||||
|
echo "$object : \\" > "$depfile"
|
||||||
|
# Clip off the initial element (the dependent). Don't try to be
|
||||||
|
# clever and replace this with sed code, as IRIX sed won't handle
|
||||||
|
# lines with more than a fixed number of characters (4096 in
|
||||||
|
# IRIX 6.2 sed, 8192 in IRIX 6.5). We also remove comment lines;
|
||||||
|
# the IRIX cc adds comments like '#:fec' to the end of the
|
||||||
|
# dependency line.
|
||||||
|
tr ' ' "$nl" < "$tmpdepfile" \
|
||||||
|
| sed -e 's/^.*\.o://' -e 's/#.*$//' -e '/^$/ d' \
|
||||||
|
| tr "$nl" ' ' >> "$depfile"
|
||||||
|
echo >> "$depfile"
|
||||||
|
# The second pass generates a dummy entry for each header file.
|
||||||
|
tr ' ' "$nl" < "$tmpdepfile" \
|
||||||
|
| sed -e 's/^.*\.o://' -e 's/#.*$//' -e '/^$/ d' -e 's/$/:/' \
|
||||||
|
>> "$depfile"
|
||||||
|
else
|
||||||
|
make_dummy_depfile
|
||||||
|
fi
|
||||||
|
rm -f "$tmpdepfile"
|
||||||
|
;;
|
||||||
|
|
||||||
|
xlc)
|
||||||
|
# This case exists only to let depend.m4 do its work. It works by
|
||||||
|
# looking at the text of this script. This case will never be run,
|
||||||
|
# since it is checked for above.
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
|
||||||
|
aix)
|
||||||
|
# The C for AIX Compiler uses -M and outputs the dependencies
|
||||||
|
# in a .u file. In older versions, this file always lives in the
|
||||||
|
# current directory. Also, the AIX compiler puts '$object:' at the
|
||||||
|
# start of each line; $object doesn't have directory information.
|
||||||
|
# Version 6 uses the directory in both cases.
|
||||||
|
set_dir_from "$object"
|
||||||
|
set_base_from "$object"
|
||||||
|
if test "$libtool" = yes; then
|
||||||
|
tmpdepfile1=$dir$base.u
|
||||||
|
tmpdepfile2=$base.u
|
||||||
|
tmpdepfile3=$dir.libs/$base.u
|
||||||
|
"$@" -Wc,-M
|
||||||
|
else
|
||||||
|
tmpdepfile1=$dir$base.u
|
||||||
|
tmpdepfile2=$dir$base.u
|
||||||
|
tmpdepfile3=$dir$base.u
|
||||||
|
"$@" -M
|
||||||
|
fi
|
||||||
|
stat=$?
|
||||||
|
if test $stat -ne 0; then
|
||||||
|
rm -f "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3"
|
||||||
|
exit $stat
|
||||||
|
fi
|
||||||
|
|
||||||
|
for tmpdepfile in "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3"
|
||||||
|
do
|
||||||
|
test -f "$tmpdepfile" && break
|
||||||
|
done
|
||||||
|
aix_post_process_depfile
|
||||||
|
;;
|
||||||
|
|
||||||
|
tcc)
|
||||||
|
# tcc (Tiny C Compiler) understand '-MD -MF file' since version 0.9.26
|
||||||
|
# FIXME: That version still under development at the moment of writing.
|
||||||
|
# Make that this statement remains true also for stable, released
|
||||||
|
# versions.
|
||||||
|
# It will wrap lines (doesn't matter whether long or short) with a
|
||||||
|
# trailing '\', as in:
|
||||||
|
#
|
||||||
|
# foo.o : \
|
||||||
|
# foo.c \
|
||||||
|
# foo.h \
|
||||||
|
#
|
||||||
|
# It will put a trailing '\' even on the last line, and will use leading
|
||||||
|
# spaces rather than leading tabs (at least since its commit 0394caf7
|
||||||
|
# "Emit spaces for -MD").
|
||||||
|
"$@" -MD -MF "$tmpdepfile"
|
||||||
|
stat=$?
|
||||||
|
if test $stat -ne 0; then
|
||||||
|
rm -f "$tmpdepfile"
|
||||||
|
exit $stat
|
||||||
|
fi
|
||||||
|
rm -f "$depfile"
|
||||||
|
# Each non-empty line is of the form 'foo.o : \' or ' dep.h \'.
|
||||||
|
# We have to change lines of the first kind to '$object: \'.
|
||||||
|
sed -e "s|.*:|$object :|" < "$tmpdepfile" > "$depfile"
|
||||||
|
# And for each line of the second kind, we have to emit a 'dep.h:'
|
||||||
|
# dummy dependency, to avoid the deleted-header problem.
|
||||||
|
sed -n -e 's|^ *\(.*\) *\\$|\1:|p' < "$tmpdepfile" >> "$depfile"
|
||||||
|
rm -f "$tmpdepfile"
|
||||||
|
;;
|
||||||
|
|
||||||
|
## The order of this option in the case statement is important, since the
|
||||||
|
## shell code in configure will try each of these formats in the order
|
||||||
|
## listed in this file. A plain '-MD' option would be understood by many
|
||||||
|
## compilers, so we must ensure this comes after the gcc and icc options.
|
||||||
|
pgcc)
|
||||||
|
# Portland's C compiler understands '-MD'.
|
||||||
|
# Will always output deps to 'file.d' where file is the root name of the
|
||||||
|
# source file under compilation, even if file resides in a subdirectory.
|
||||||
|
# The object file name does not affect the name of the '.d' file.
|
||||||
|
# pgcc 10.2 will output
|
||||||
|
# foo.o: sub/foo.c sub/foo.h
|
||||||
|
# and will wrap long lines using '\' :
|
||||||
|
# foo.o: sub/foo.c ... \
|
||||||
|
# sub/foo.h ... \
|
||||||
|
# ...
|
||||||
|
set_dir_from "$object"
|
||||||
|
# Use the source, not the object, to determine the base name, since
|
||||||
|
# that's sadly what pgcc will do too.
|
||||||
|
set_base_from "$source"
|
||||||
|
tmpdepfile=$base.d
|
||||||
|
|
||||||
|
# For projects that build the same source file twice into different object
|
||||||
|
# files, the pgcc approach of using the *source* file root name can cause
|
||||||
|
# problems in parallel builds. Use a locking strategy to avoid stomping on
|
||||||
|
# the same $tmpdepfile.
|
||||||
|
lockdir=$base.d-lock
|
||||||
|
trap "
|
||||||
|
echo '$0: caught signal, cleaning up...' >&2
|
||||||
|
rmdir '$lockdir'
|
||||||
|
exit 1
|
||||||
|
" 1 2 13 15
|
||||||
|
numtries=100
|
||||||
|
i=$numtries
|
||||||
|
while test $i -gt 0; do
|
||||||
|
# mkdir is a portable test-and-set.
|
||||||
|
if mkdir "$lockdir" 2>/dev/null; then
|
||||||
|
# This process acquired the lock.
|
||||||
|
"$@" -MD
|
||||||
|
stat=$?
|
||||||
|
# Release the lock.
|
||||||
|
rmdir "$lockdir"
|
||||||
|
break
|
||||||
|
else
|
||||||
|
# If the lock is being held by a different process, wait
|
||||||
|
# until the winning process is done or we timeout.
|
||||||
|
while test -d "$lockdir" && test $i -gt 0; do
|
||||||
|
sleep 1
|
||||||
|
i=`expr $i - 1`
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
i=`expr $i - 1`
|
||||||
|
done
|
||||||
|
trap - 1 2 13 15
|
||||||
|
if test $i -le 0; then
|
||||||
|
echo "$0: failed to acquire lock after $numtries attempts" >&2
|
||||||
|
echo "$0: check lockdir '$lockdir'" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if test $stat -ne 0; then
|
||||||
|
rm -f "$tmpdepfile"
|
||||||
|
exit $stat
|
||||||
|
fi
|
||||||
|
rm -f "$depfile"
|
||||||
|
# Each line is of the form `foo.o: dependent.h',
|
||||||
|
# or `foo.o: dep1.h dep2.h \', or ` dep3.h dep4.h \'.
|
||||||
|
# Do two passes, one to just change these to
|
||||||
|
# `$object: dependent.h' and one to simply `dependent.h:'.
|
||||||
|
sed "s,^[^:]*:,$object :," < "$tmpdepfile" > "$depfile"
|
||||||
|
# Some versions of the HPUX 10.20 sed can't process this invocation
|
||||||
|
# correctly. Breaking it into two sed invocations is a workaround.
|
||||||
|
sed 's,^[^:]*: \(.*\)$,\1,;s/^\\$//;/^$/d;/:$/d' < "$tmpdepfile" \
|
||||||
|
| sed -e 's/$/ :/' >> "$depfile"
|
||||||
|
rm -f "$tmpdepfile"
|
||||||
|
;;
|
||||||
|
|
||||||
|
hp2)
|
||||||
|
# The "hp" stanza above does not work with aCC (C++) and HP's ia64
|
||||||
|
# compilers, which have integrated preprocessors. The correct option
|
||||||
|
# to use with these is +Maked; it writes dependencies to a file named
|
||||||
|
# 'foo.d', which lands next to the object file, wherever that
|
||||||
|
# happens to be.
|
||||||
|
# Much of this is similar to the tru64 case; see comments there.
|
||||||
|
set_dir_from "$object"
|
||||||
|
set_base_from "$object"
|
||||||
|
if test "$libtool" = yes; then
|
||||||
|
tmpdepfile1=$dir$base.d
|
||||||
|
tmpdepfile2=$dir.libs/$base.d
|
||||||
|
"$@" -Wc,+Maked
|
||||||
|
else
|
||||||
|
tmpdepfile1=$dir$base.d
|
||||||
|
tmpdepfile2=$dir$base.d
|
||||||
|
"$@" +Maked
|
||||||
|
fi
|
||||||
|
stat=$?
|
||||||
|
if test $stat -ne 0; then
|
||||||
|
rm -f "$tmpdepfile1" "$tmpdepfile2"
|
||||||
|
exit $stat
|
||||||
|
fi
|
||||||
|
|
||||||
|
for tmpdepfile in "$tmpdepfile1" "$tmpdepfile2"
|
||||||
|
do
|
||||||
|
test -f "$tmpdepfile" && break
|
||||||
|
done
|
||||||
|
if test -f "$tmpdepfile"; then
|
||||||
|
sed -e "s,^.*\.[$lower]*:,$object:," "$tmpdepfile" > "$depfile"
|
||||||
|
# Add 'dependent.h:' lines.
|
||||||
|
sed -ne '2,${
|
||||||
|
s/^ *//
|
||||||
|
s/ \\*$//
|
||||||
|
s/$/:/
|
||||||
|
p
|
||||||
|
}' "$tmpdepfile" >> "$depfile"
|
||||||
|
else
|
||||||
|
make_dummy_depfile
|
||||||
|
fi
|
||||||
|
rm -f "$tmpdepfile" "$tmpdepfile2"
|
||||||
|
;;
|
||||||
|
|
||||||
|
tru64)
|
||||||
|
# The Tru64 compiler uses -MD to generate dependencies as a side
|
||||||
|
# effect. 'cc -MD -o foo.o ...' puts the dependencies into 'foo.o.d'.
|
||||||
|
# At least on Alpha/Redhat 6.1, Compaq CCC V6.2-504 seems to put
|
||||||
|
# dependencies in 'foo.d' instead, so we check for that too.
|
||||||
|
# Subdirectories are respected.
|
||||||
|
set_dir_from "$object"
|
||||||
|
set_base_from "$object"
|
||||||
|
|
||||||
|
if test "$libtool" = yes; then
|
||||||
|
# Libtool generates 2 separate objects for the 2 libraries. These
|
||||||
|
# two compilations output dependencies in $dir.libs/$base.o.d and
|
||||||
|
# in $dir$base.o.d. We have to check for both files, because
|
||||||
|
# one of the two compilations can be disabled. We should prefer
|
||||||
|
# $dir$base.o.d over $dir.libs/$base.o.d because the latter is
|
||||||
|
# automatically cleaned when .libs/ is deleted, while ignoring
|
||||||
|
# the former would cause a distcleancheck panic.
|
||||||
|
tmpdepfile1=$dir$base.o.d # libtool 1.5
|
||||||
|
tmpdepfile2=$dir.libs/$base.o.d # Likewise.
|
||||||
|
tmpdepfile3=$dir.libs/$base.d # Compaq CCC V6.2-504
|
||||||
|
"$@" -Wc,-MD
|
||||||
|
else
|
||||||
|
tmpdepfile1=$dir$base.d
|
||||||
|
tmpdepfile2=$dir$base.d
|
||||||
|
tmpdepfile3=$dir$base.d
|
||||||
|
"$@" -MD
|
||||||
|
fi
|
||||||
|
|
||||||
|
stat=$?
|
||||||
|
if test $stat -ne 0; then
|
||||||
|
rm -f "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3"
|
||||||
|
exit $stat
|
||||||
|
fi
|
||||||
|
|
||||||
|
for tmpdepfile in "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3"
|
||||||
|
do
|
||||||
|
test -f "$tmpdepfile" && break
|
||||||
|
done
|
||||||
|
# Same post-processing that is required for AIX mode.
|
||||||
|
aix_post_process_depfile
|
||||||
|
;;
|
||||||
|
|
||||||
|
msvc7)
|
||||||
|
if test "$libtool" = yes; then
|
||||||
|
showIncludes=-Wc,-showIncludes
|
||||||
|
else
|
||||||
|
showIncludes=-showIncludes
|
||||||
|
fi
|
||||||
|
"$@" $showIncludes > "$tmpdepfile"
|
||||||
|
stat=$?
|
||||||
|
grep -v '^Note: including file: ' "$tmpdepfile"
|
||||||
|
if test $stat -ne 0; then
|
||||||
|
rm -f "$tmpdepfile"
|
||||||
|
exit $stat
|
||||||
|
fi
|
||||||
|
rm -f "$depfile"
|
||||||
|
echo "$object : \\" > "$depfile"
|
||||||
|
# The first sed program below extracts the file names and escapes
|
||||||
|
# backslashes for cygpath. The second sed program outputs the file
|
||||||
|
# name when reading, but also accumulates all include files in the
|
||||||
|
# hold buffer in order to output them again at the end. This only
|
||||||
|
# works with sed implementations that can handle large buffers.
|
||||||
|
sed < "$tmpdepfile" -n '
|
||||||
|
/^Note: including file: *\(.*\)/ {
|
||||||
|
s//\1/
|
||||||
|
s/\\/\\\\/g
|
||||||
|
p
|
||||||
|
}' | $cygpath_u | sort -u | sed -n '
|
||||||
|
s/ /\\ /g
|
||||||
|
s/\(.*\)/'"$tab"'\1 \\/p
|
||||||
|
s/.\(.*\) \\/\1:/
|
||||||
|
H
|
||||||
|
$ {
|
||||||
|
s/.*/'"$tab"'/
|
||||||
|
G
|
||||||
|
p
|
||||||
|
}' >> "$depfile"
|
||||||
|
echo >> "$depfile" # make sure the fragment doesn't end with a backslash
|
||||||
|
rm -f "$tmpdepfile"
|
||||||
|
;;
|
||||||
|
|
||||||
|
msvc7msys)
|
||||||
|
# This case exists only to let depend.m4 do its work. It works by
|
||||||
|
# looking at the text of this script. This case will never be run,
|
||||||
|
# since it is checked for above.
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
|
||||||
|
#nosideeffect)
|
||||||
|
# This comment above is used by automake to tell side-effect
|
||||||
|
# dependency tracking mechanisms from slower ones.
|
||||||
|
|
||||||
|
dashmstdout)
|
||||||
|
# Important note: in order to support this mode, a compiler *must*
|
||||||
|
# always write the preprocessed file to stdout, regardless of -o.
|
||||||
|
"$@" || exit $?
|
||||||
|
|
||||||
|
# Remove the call to Libtool.
|
||||||
|
if test "$libtool" = yes; then
|
||||||
|
while test "X$1" != 'X--mode=compile'; do
|
||||||
|
shift
|
||||||
|
done
|
||||||
|
shift
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Remove '-o $object'.
|
||||||
|
IFS=" "
|
||||||
|
for arg
|
||||||
|
do
|
||||||
|
case $arg in
|
||||||
|
-o)
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
$object)
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
set fnord "$@" "$arg"
|
||||||
|
shift # fnord
|
||||||
|
shift # $arg
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
test -z "$dashmflag" && dashmflag=-M
|
||||||
|
# Require at least two characters before searching for ':'
|
||||||
|
# in the target name. This is to cope with DOS-style filenames:
|
||||||
|
# a dependency such as 'c:/foo/bar' could be seen as target 'c' otherwise.
|
||||||
|
"$@" $dashmflag |
|
||||||
|
sed "s|^[$tab ]*[^:$tab ][^:][^:]*:[$tab ]*|$object: |" > "$tmpdepfile"
|
||||||
|
rm -f "$depfile"
|
||||||
|
cat < "$tmpdepfile" > "$depfile"
|
||||||
|
# Some versions of the HPUX 10.20 sed can't process this sed invocation
|
||||||
|
# correctly. Breaking it into two sed invocations is a workaround.
|
||||||
|
tr ' ' "$nl" < "$tmpdepfile" \
|
||||||
|
| sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' \
|
||||||
|
| sed -e 's/$/ :/' >> "$depfile"
|
||||||
|
rm -f "$tmpdepfile"
|
||||||
|
;;
|
||||||
|
|
||||||
|
dashXmstdout)
|
||||||
|
# This case only exists to satisfy depend.m4. It is never actually
|
||||||
|
# run, as this mode is specially recognized in the preamble.
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
|
||||||
|
makedepend)
|
||||||
|
"$@" || exit $?
|
||||||
|
# Remove any Libtool call
|
||||||
|
if test "$libtool" = yes; then
|
||||||
|
while test "X$1" != 'X--mode=compile'; do
|
||||||
|
shift
|
||||||
|
done
|
||||||
|
shift
|
||||||
|
fi
|
||||||
|
# X makedepend
|
||||||
|
shift
|
||||||
|
cleared=no eat=no
|
||||||
|
for arg
|
||||||
|
do
|
||||||
|
case $cleared in
|
||||||
|
no)
|
||||||
|
set ""; shift
|
||||||
|
cleared=yes ;;
|
||||||
|
esac
|
||||||
|
if test $eat = yes; then
|
||||||
|
eat=no
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
case "$arg" in
|
||||||
|
-D*|-I*)
|
||||||
|
set fnord "$@" "$arg"; shift ;;
|
||||||
|
# Strip any option that makedepend may not understand. Remove
|
||||||
|
# the object too, otherwise makedepend will parse it as a source file.
|
||||||
|
-arch)
|
||||||
|
eat=yes ;;
|
||||||
|
-*|$object)
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
set fnord "$@" "$arg"; shift ;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
obj_suffix=`echo "$object" | sed 's/^.*\././'`
|
||||||
|
touch "$tmpdepfile"
|
||||||
|
${MAKEDEPEND-makedepend} -o"$obj_suffix" -f"$tmpdepfile" "$@"
|
||||||
|
rm -f "$depfile"
|
||||||
|
# makedepend may prepend the VPATH from the source file name to the object.
|
||||||
|
# No need to regex-escape $object, excess matching of '.' is harmless.
|
||||||
|
sed "s|^.*\($object *:\)|\1|" "$tmpdepfile" > "$depfile"
|
||||||
|
# Some versions of the HPUX 10.20 sed can't process the last invocation
|
||||||
|
# correctly. Breaking it into two sed invocations is a workaround.
|
||||||
|
sed '1,2d' "$tmpdepfile" \
|
||||||
|
| tr ' ' "$nl" \
|
||||||
|
| sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' \
|
||||||
|
| sed -e 's/$/ :/' >> "$depfile"
|
||||||
|
rm -f "$tmpdepfile" "$tmpdepfile".bak
|
||||||
|
;;
|
||||||
|
|
||||||
|
cpp)
|
||||||
|
# Important note: in order to support this mode, a compiler *must*
|
||||||
|
# always write the preprocessed file to stdout.
|
||||||
|
"$@" || exit $?
|
||||||
|
|
||||||
|
# Remove the call to Libtool.
|
||||||
|
if test "$libtool" = yes; then
|
||||||
|
while test "X$1" != 'X--mode=compile'; do
|
||||||
|
shift
|
||||||
|
done
|
||||||
|
shift
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Remove '-o $object'.
|
||||||
|
IFS=" "
|
||||||
|
for arg
|
||||||
|
do
|
||||||
|
case $arg in
|
||||||
|
-o)
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
$object)
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
set fnord "$@" "$arg"
|
||||||
|
shift # fnord
|
||||||
|
shift # $arg
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
"$@" -E \
|
||||||
|
| sed -n -e '/^# [0-9][0-9]* "\([^"]*\)".*/ s:: \1 \\:p' \
|
||||||
|
-e '/^#line [0-9][0-9]* "\([^"]*\)".*/ s:: \1 \\:p' \
|
||||||
|
| sed '$ s: \\$::' > "$tmpdepfile"
|
||||||
|
rm -f "$depfile"
|
||||||
|
echo "$object : \\" > "$depfile"
|
||||||
|
cat < "$tmpdepfile" >> "$depfile"
|
||||||
|
sed < "$tmpdepfile" '/^$/d;s/^ //;s/ \\$//;s/$/ :/' >> "$depfile"
|
||||||
|
rm -f "$tmpdepfile"
|
||||||
|
;;
|
||||||
|
|
||||||
|
msvisualcpp)
|
||||||
|
# Important note: in order to support this mode, a compiler *must*
|
||||||
|
# always write the preprocessed file to stdout.
|
||||||
|
"$@" || exit $?
|
||||||
|
|
||||||
|
# Remove the call to Libtool.
|
||||||
|
if test "$libtool" = yes; then
|
||||||
|
while test "X$1" != 'X--mode=compile'; do
|
||||||
|
shift
|
||||||
|
done
|
||||||
|
shift
|
||||||
|
fi
|
||||||
|
|
||||||
|
IFS=" "
|
||||||
|
for arg
|
||||||
|
do
|
||||||
|
case "$arg" in
|
||||||
|
-o)
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
$object)
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
"-Gm"|"/Gm"|"-Gi"|"/Gi"|"-ZI"|"/ZI")
|
||||||
|
set fnord "$@"
|
||||||
|
shift
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
set fnord "$@" "$arg"
|
||||||
|
shift
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
"$@" -E 2>/dev/null |
|
||||||
|
sed -n '/^#line [0-9][0-9]* "\([^"]*\)"/ s::\1:p' | $cygpath_u | sort -u > "$tmpdepfile"
|
||||||
|
rm -f "$depfile"
|
||||||
|
echo "$object : \\" > "$depfile"
|
||||||
|
sed < "$tmpdepfile" -n -e 's% %\\ %g' -e '/^\(.*\)$/ s::'"$tab"'\1 \\:p' >> "$depfile"
|
||||||
|
echo "$tab" >> "$depfile"
|
||||||
|
sed < "$tmpdepfile" -n -e 's% %\\ %g' -e '/^\(.*\)$/ s::\1\::p' >> "$depfile"
|
||||||
|
rm -f "$tmpdepfile"
|
||||||
|
;;
|
||||||
|
|
||||||
|
msvcmsys)
|
||||||
|
# This case exists only to let depend.m4 do its work. It works by
|
||||||
|
# looking at the text of this script. This case will never be run,
|
||||||
|
# since it is checked for above.
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
|
||||||
|
none)
|
||||||
|
exec "$@"
|
||||||
|
;;
|
||||||
|
|
||||||
|
*)
|
||||||
|
echo "Unknown depmode $depmode" 1>&2
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
exit 0
|
||||||
|
|
||||||
|
# Local Variables:
|
||||||
|
# mode: shell-script
|
||||||
|
# sh-indentation: 2
|
||||||
|
# eval: (add-hook 'before-save-hook 'time-stamp)
|
||||||
|
# time-stamp-start: "scriptversion="
|
||||||
|
# time-stamp-format: "%:y-%02m-%02d.%02H"
|
||||||
|
# time-stamp-time-zone: "UTC0"
|
||||||
|
# time-stamp-end: "; # UTC"
|
||||||
|
# End:
|
50
src/dependencies/curl-8.8.0/docs/ALTSVC.md
Normal file
50
src/dependencies/curl-8.8.0/docs/ALTSVC.md
Normal file
|
@ -0,0 +1,50 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
# Alt-Svc
|
||||||
|
|
||||||
|
curl features support for the Alt-Svc: HTTP header.
|
||||||
|
|
||||||
|
## Enable Alt-Svc in build
|
||||||
|
|
||||||
|
`./configure --enable-alt-svc`
|
||||||
|
|
||||||
|
(enabled by default since 7.73.0)
|
||||||
|
|
||||||
|
## Standard
|
||||||
|
|
||||||
|
[RFC 7838](https://datatracker.ietf.org/doc/html/rfc7838)
|
||||||
|
|
||||||
|
# Alt-Svc cache file format
|
||||||
|
|
||||||
|
This is a text based file with one line per entry and each line consists of nine
|
||||||
|
space separated fields.
|
||||||
|
|
||||||
|
## Example
|
||||||
|
|
||||||
|
h2 quic.tech 8443 h3-22 quic.tech 8443 "20190808 06:18:37" 0 0
|
||||||
|
|
||||||
|
## Fields
|
||||||
|
|
||||||
|
1. The ALPN id for the source origin
|
||||||
|
2. The hostname for the source origin
|
||||||
|
3. The port number for the source origin
|
||||||
|
4. The ALPN id for the destination host
|
||||||
|
5. The hostname for the destination host
|
||||||
|
6. The port number for the destination host
|
||||||
|
7. The expiration date and time of this entry within double quotes. The date format is "YYYYMMDD HH:MM:SS" and the time zone is GMT.
|
||||||
|
8. Boolean (1 or 0) if "persist" was set for this entry
|
||||||
|
9. Integer priority value (not currently used)
|
||||||
|
|
||||||
|
If the hostname is an IPv6 numerical address, it is stored with brackets such
|
||||||
|
as `[::1]`.
|
||||||
|
|
||||||
|
# TODO
|
||||||
|
|
||||||
|
- handle multiple response headers, when one of them says `clear` (should
|
||||||
|
override them all)
|
||||||
|
- using `Age:` value for caching age as per spec
|
||||||
|
- `CURLALTSVC_IMMEDIATELY` support
|
146
src/dependencies/curl-8.8.0/docs/BINDINGS.md
Normal file
146
src/dependencies/curl-8.8.0/docs/BINDINGS.md
Normal file
|
@ -0,0 +1,146 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
libcurl bindings
|
||||||
|
================
|
||||||
|
|
||||||
|
Creative people have written bindings or interfaces for various environments
|
||||||
|
and programming languages. Using one of these allows you to take advantage of
|
||||||
|
curl powers from within your favourite language or system.
|
||||||
|
|
||||||
|
This is a list of all known interfaces as of this writing.
|
||||||
|
|
||||||
|
The bindings listed below are not part of the curl/libcurl distribution
|
||||||
|
archives, but must be downloaded and installed separately.
|
||||||
|
|
||||||
|
<!-- markdown-link-check-disable -->
|
||||||
|
|
||||||
|
[Ada95](https://web.archive.org/web/20070403105909/www.almroth.com/adacurl/index.html) Written by Andreas Almroth
|
||||||
|
|
||||||
|
[Basic](https://scriptbasic.com/) ScriptBasic bindings written by Peter Verhas
|
||||||
|
|
||||||
|
C++: [curlpp](https://github.com/jpbarrette/curlpp/) Written by Jean-Philippe Barrette-LaPierre,
|
||||||
|
[curlcpp](https://github.com/JosephP91/curlcpp) by Giuseppe Persico and [C++
|
||||||
|
Requests](https://github.com/libcpr/cpr) by Huu Nguyen
|
||||||
|
|
||||||
|
[Ch](https://chcurl.sourceforge.net/) Written by Stephen Nestinger and Jonathan Rogado
|
||||||
|
|
||||||
|
Cocoa: [BBHTTP](https://github.com/biasedbit/BBHTTP) written by Bruno de Carvalho
|
||||||
|
[curlhandle](https://github.com/karelia/curlhandle) Written by Dan Wood
|
||||||
|
|
||||||
|
Clojure: [clj-curl](https://github.com/lsevero/clj-curl) by Lucas Severo
|
||||||
|
|
||||||
|
[D](https://dlang.org/library/std/net/curl.html) Written by Kenneth Bogert
|
||||||
|
|
||||||
|
[Delphi](https://github.com/Mercury13/curl4delphi) Written by Mikhail Merkuryev
|
||||||
|
|
||||||
|
[Dylan](https://dylanlibs.sourceforge.net/) Written by Chris Double
|
||||||
|
|
||||||
|
[Eiffel](https://iron.eiffel.com/repository/20.11/package/ABEF6975-37AC-45FD-9C67-52D10BA0669B) Written by Eiffel Software
|
||||||
|
|
||||||
|
[Euphoria](https://web.archive.org/web/20050204080544/rays-web.com/eulibcurl.htm) Written by Ray Smith
|
||||||
|
|
||||||
|
[Falcon](http://www.falconpl.org/project_docs/curl/)
|
||||||
|
|
||||||
|
[Ferite](https://web.archive.org/web/20150102192018/ferite.org/) Written by Paul Querna
|
||||||
|
|
||||||
|
[Fortran](https://github.com/interkosmos/fortran-curl) Written by Philipp Engel
|
||||||
|
|
||||||
|
[Gambas](https://gambas.sourceforge.net/)
|
||||||
|
|
||||||
|
[glib/GTK+](https://web.archive.org/web/20100526203452/atterer.net/glibcurl) Written by Richard Atterer
|
||||||
|
|
||||||
|
Go: [go-curl](https://github.com/andelf/go-curl) by ShuYu Wang
|
||||||
|
|
||||||
|
[Guile](https://github.com/spk121/guile-curl) Written by Michael L. Gran
|
||||||
|
|
||||||
|
[Harbour](https://github.com/vszakats/hb/tree/main/contrib/hbcurl) Written by Viktor Szakats
|
||||||
|
|
||||||
|
[Haskell](https://hackage.haskell.org/package/curl) Written by Galois, Inc
|
||||||
|
|
||||||
|
[Hollywood](https://www.hollywood-mal.com/download.html) hURL by Andreas Falkenhahn
|
||||||
|
|
||||||
|
[Java](https://github.com/pjlegato/curl-java)
|
||||||
|
|
||||||
|
[Julia](https://github.com/JuliaWeb/LibCURL.jl) Written by Amit Murthy
|
||||||
|
|
||||||
|
[Kapito](https://github.com/puzza007/katipo) is an Erlang HTTP library around libcurl.
|
||||||
|
|
||||||
|
[Lisp](https://common-lisp.net/project/cl-curl/) Written by Liam Healy
|
||||||
|
|
||||||
|
Lua: [luacurl](https://web.archive.org/web/20201205052437/luacurl.luaforge.net/) by Alexander Marinov, [Lua-cURL](https://github.com/Lua-cURL) by Jürgen Hötzel
|
||||||
|
|
||||||
|
[Mono](https://web.archive.org/web/20070606064500/https://forge.novell.com/modules/xfmod/project/?libcurl-mono) Written by Jeffrey Phillips
|
||||||
|
|
||||||
|
[.NET](https://sourceforge.net/projects/libcurl-net/) libcurl-net by Jeffrey Phillips
|
||||||
|
|
||||||
|
[Nim](https://nimble.directory/pkg/libcurl) wrapper for libcurl
|
||||||
|
|
||||||
|
[node.js](https://github.com/JCMais/node-libcurl) node-libcurl by Jonathan Cardoso Machado
|
||||||
|
|
||||||
|
[Object-Pascal](https://web.archive.org/web/20020610214926/www.tekool.com/opcurl) Free Pascal, Delphi and Kylix binding written by Christophe Espern.
|
||||||
|
|
||||||
|
[OCaml](https://opam.ocaml.org/packages/ocurl/) Written by Lars Nilsson and ygrek
|
||||||
|
|
||||||
|
[Pascal](https://web.archive.org/web/20030804091414/houston.quik.com/jkp/curlpas/) Free Pascal, Delphi and Kylix binding written by Jeffrey Pohlmeyer.
|
||||||
|
|
||||||
|
Perl: [WWW::Curl](https://github.com/szbalint/WWW--Curl) Maintained by Cris
|
||||||
|
Bailiff and Bálint Szilakszi,
|
||||||
|
[perl6-net-curl](https://github.com/azawawi/perl6-net-curl) by Ahmad M. Zawawi
|
||||||
|
[NET::Curl](https://metacpan.org/pod/Net::Curl) by Przemyslaw Iskra
|
||||||
|
|
||||||
|
[PHP](https://php.net/curl) Originally written by Sterling Hughes
|
||||||
|
|
||||||
|
[PostgreSQL](https://github.com/pramsey/pgsql-http) - HTTP client for PostgreSQL
|
||||||
|
|
||||||
|
[PostgreSQL](https://github.com/RekGRpth/pg_curl) - cURL client for PostgreSQL
|
||||||
|
|
||||||
|
[PureBasic](https://www.purebasic.com/documentation/http/index.html) uses libcurl in its "native" HTTP subsystem
|
||||||
|
|
||||||
|
[Python](http://pycurl.io/) PycURL by Kjetil Jacobsen
|
||||||
|
|
||||||
|
[Python](https://pypi.org/project/pymcurl/) mcurl by Ganesh Viswanathan
|
||||||
|
|
||||||
|
[Q](https://q-lang.sourceforge.net/) The libcurl module is part of the default install
|
||||||
|
|
||||||
|
[R](https://cran.r-project.org/package=curl)
|
||||||
|
|
||||||
|
[Rexx](https://rexxcurl.sourceforge.net/) Written Mark Hessling
|
||||||
|
|
||||||
|
[Ring](https://ring-lang.sourceforge.io/doc1.3/libcurl.html) RingLibCurl by Mahmoud Fayed
|
||||||
|
|
||||||
|
RPG, support for ILE/RPG on OS/400 is included in source distribution
|
||||||
|
|
||||||
|
Ruby: [curb](https://github.com/taf2/curb) written by Ross Bamford,
|
||||||
|
[ruby-curl-multi](https://github.com/kball/curl_multi.rb) by Kristjan Petursson and Keith Rarick
|
||||||
|
|
||||||
|
[Rust](https://github.com/alexcrichton/curl-rust) curl-rust - by Carl Lerche
|
||||||
|
|
||||||
|
[Scheme](https://www.metapaper.net/lisovsky/web/curl/) Bigloo binding by Kirill Lisovsky
|
||||||
|
|
||||||
|
[Scilab](https://help.scilab.org/docs/current/fr_FR/getURL.html) binding by Sylvestre Ledru
|
||||||
|
|
||||||
|
[S-Lang](https://www.jedsoft.org/slang/modules/curl.html) by John E Davis
|
||||||
|
|
||||||
|
[Smalltalk](https://www.squeaksource.com/CurlPlugin/) Written by Danil Osipchuk
|
||||||
|
|
||||||
|
[SP-Forth](https://sourceforge.net/p/spf/spf/ci/master/tree/devel/~ac/lib/lin/curl/) Written by Andrey Cherezov
|
||||||
|
|
||||||
|
[SPL](https://web.archive.org/web/20210203022158/www.clifford.at/spl/spldoc/curl.html) Written by Clifford Wolf
|
||||||
|
|
||||||
|
[Tcl](https://web.archive.org/web/20160826011806/mirror.yellow5.com/tclcurl/) Tclcurl by Andrés García
|
||||||
|
|
||||||
|
[Vibe](https://github.com/ttytm/vibe) HTTP requests through libcurl in V
|
||||||
|
|
||||||
|
[Visual Basic](https://sourceforge.net/projects/libcurl-vb/) libcurl-vb by Jeffrey Phillips
|
||||||
|
|
||||||
|
[Visual Foxpro](https://web.archive.org/web/20130730181523/www.ctl32.com.ar/libcurl.asp) by Carlos Alloatti
|
||||||
|
|
||||||
|
[wxWidgets](https://wxcode.sourceforge.net/components/wxcurl/) Written by Casey O'Donnell
|
||||||
|
|
||||||
|
[XBLite](https://web.archive.org/web/20060426150418/perso.wanadoo.fr/xblite/libraries.html) Written by David Szafranski
|
||||||
|
|
||||||
|
[Xojo](https://github.com/charonn0/RB-libcURL) Written by Andrew Lambert
|
177
src/dependencies/curl-8.8.0/docs/BUFQ.md
Normal file
177
src/dependencies/curl-8.8.0/docs/BUFQ.md
Normal file
|
@ -0,0 +1,177 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
# bufq
|
||||||
|
|
||||||
|
This is an internal module for managing I/O buffers. A `bufq` can be written
|
||||||
|
to and read from. It manages read and write positions and has a maximum size.
|
||||||
|
|
||||||
|
## read/write
|
||||||
|
|
||||||
|
Its basic read/write functions have a similar signature and return code handling
|
||||||
|
as many internal Curl read and write ones.
|
||||||
|
|
||||||
|
|
||||||
|
```
|
||||||
|
ssize_t Curl_bufq_write(struct bufq *q, const unsigned char *buf, size_t len, CURLcode *err);
|
||||||
|
|
||||||
|
- returns the length written into `q` or -1 on error.
|
||||||
|
- writing to a full `q` returns -1 and set *err to CURLE_AGAIN
|
||||||
|
|
||||||
|
ssize_t Curl_bufq_read(struct bufq *q, unsigned char *buf, size_t len, CURLcode *err);
|
||||||
|
|
||||||
|
- returns the length read from `q` or -1 on error.
|
||||||
|
- reading from an empty `q` returns -1 and set *err to CURLE_AGAIN
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
To pass data into a `bufq` without an extra copy, read callbacks can be used.
|
||||||
|
|
||||||
|
```
|
||||||
|
typedef ssize_t Curl_bufq_reader(void *reader_ctx, unsigned char *buf, size_t len,
|
||||||
|
CURLcode *err);
|
||||||
|
|
||||||
|
ssize_t Curl_bufq_slurp(struct bufq *q, Curl_bufq_reader *reader, void *reader_ctx,
|
||||||
|
CURLcode *err);
|
||||||
|
```
|
||||||
|
|
||||||
|
`Curl_bufq_slurp()` invokes the given `reader` callback, passing it its own
|
||||||
|
internal buffer memory to write to. It may invoke the `reader` several times,
|
||||||
|
as long as it has space and while the `reader` always returns the length that
|
||||||
|
was requested. There are variations of `slurp` that call the `reader` at most
|
||||||
|
once or only read in a maximum amount of bytes.
|
||||||
|
|
||||||
|
The analog mechanism for write out buffer data is:
|
||||||
|
|
||||||
|
```
|
||||||
|
typedef ssize_t Curl_bufq_writer(void *writer_ctx, const unsigned char *buf, size_t len,
|
||||||
|
CURLcode *err);
|
||||||
|
|
||||||
|
ssize_t Curl_bufq_pass(struct bufq *q, Curl_bufq_writer *writer, void *writer_ctx,
|
||||||
|
CURLcode *err);
|
||||||
|
```
|
||||||
|
|
||||||
|
`Curl_bufq_pass()` invokes the `writer`, passing its internal memory and
|
||||||
|
remove the amount that `writer` reports.
|
||||||
|
|
||||||
|
## peek and skip
|
||||||
|
|
||||||
|
It is possible to get access to the memory of data stored in a `bufq` with:
|
||||||
|
|
||||||
|
```
|
||||||
|
bool Curl_bufq_peek(const struct bufq *q, const unsigned char **pbuf, size_t *plen);
|
||||||
|
```
|
||||||
|
|
||||||
|
On returning TRUE, `pbuf` points to internal memory with `plen` bytes that one
|
||||||
|
may read. This is only valid until another operation on `bufq` is performed.
|
||||||
|
|
||||||
|
Instead of reading `bufq` data, one may simply skip it:
|
||||||
|
|
||||||
|
```
|
||||||
|
void Curl_bufq_skip(struct bufq *q, size_t amount);
|
||||||
|
```
|
||||||
|
|
||||||
|
This removes `amount` number of bytes from the `bufq`.
|
||||||
|
|
||||||
|
|
||||||
|
## lifetime
|
||||||
|
|
||||||
|
`bufq` is initialized and freed similar to the `dynbuf` module. Code using
|
||||||
|
`bufq` holds a `struct bufq` somewhere. Before it uses it, it invokes:
|
||||||
|
|
||||||
|
```
|
||||||
|
void Curl_bufq_init(struct bufq *q, size_t chunk_size, size_t max_chunks);
|
||||||
|
```
|
||||||
|
|
||||||
|
The `bufq` is told how many "chunks" of data it shall hold at maximum and how
|
||||||
|
large those "chunks" should be. There are some variants of this, allowing for
|
||||||
|
more options. How "chunks" are handled in a `bufq` is presented in the section
|
||||||
|
about memory management.
|
||||||
|
|
||||||
|
The user of the `bufq` has the responsibility to call:
|
||||||
|
|
||||||
|
```
|
||||||
|
void Curl_bufq_free(struct bufq *q);
|
||||||
|
```
|
||||||
|
to free all resources held by `q`. It is possible to reset a `bufq` to empty via:
|
||||||
|
|
||||||
|
```
|
||||||
|
void Curl_bufq_reset(struct bufq *q);
|
||||||
|
```
|
||||||
|
|
||||||
|
## memory management
|
||||||
|
|
||||||
|
Internally, a `bufq` uses allocation of fixed size, e.g. the "chunk_size", up
|
||||||
|
to a maximum number, e.g. "max_chunks". These chunks are allocated on demand,
|
||||||
|
therefore writing to a `bufq` may return `CURLE_OUT_OF_MEMORY`. Once the max
|
||||||
|
number of chunks are used, the `bufq` reports that it is "full".
|
||||||
|
|
||||||
|
Each chunks has a `read` and `write` index. A `bufq` keeps its chunks in a
|
||||||
|
list. Reading happens always at the head chunk, writing always goes to the
|
||||||
|
tail chunk. When the head chunk becomes empty, it is removed. When the tail
|
||||||
|
chunk becomes full, another chunk is added to the end of the list, becoming
|
||||||
|
the new tail.
|
||||||
|
|
||||||
|
Chunks that are no longer used are returned to a `spare` list by default. If
|
||||||
|
the `bufq` is created with option `BUFQ_OPT_NO_SPARES` those chunks are freed
|
||||||
|
right away.
|
||||||
|
|
||||||
|
If a `bufq` is created with a `bufc_pool`, the no longer used chunks are
|
||||||
|
returned to the pool. Also `bufq` asks the pool for a chunk when it needs one.
|
||||||
|
More in section "pools".
|
||||||
|
|
||||||
|
## empty, full and overflow
|
||||||
|
|
||||||
|
One can ask about the state of a `bufq` with methods such as
|
||||||
|
`Curl_bufq_is_empty(q)`, `Curl_bufq_is_full(q)`, etc. The amount of data held
|
||||||
|
by a `bufq` is the sum of the data in all its chunks. This is what is reported
|
||||||
|
by `Curl_bufq_len(q)`.
|
||||||
|
|
||||||
|
Note that a `bufq` length and it being "full" are only loosely related. A
|
||||||
|
simple example:
|
||||||
|
|
||||||
|
* create a `bufq` with chunk_size=1000 and max_chunks=4.
|
||||||
|
* write 4000 bytes to it, it reports "full"
|
||||||
|
* read 1 bytes from it, it still reports "full"
|
||||||
|
* read 999 more bytes from it, and it is no longer "full"
|
||||||
|
|
||||||
|
The reason for this is that full really means: *bufq uses max_chunks and the
|
||||||
|
last one cannot be written to*.
|
||||||
|
|
||||||
|
When you read 1 byte from the head chunk in the example above, the head still
|
||||||
|
hold 999 unread bytes. Only when those are also read, can the head chunk be
|
||||||
|
removed and a new tail be added.
|
||||||
|
|
||||||
|
There is another variation to this. If you initialized a `bufq` with option
|
||||||
|
`BUFQ_OPT_SOFT_LIMIT`, it allows writes **beyond** the `max_chunks`. It
|
||||||
|
reports **full**, but one can **still** write. This option is necessary, if
|
||||||
|
partial writes need to be avoided. It means that you need other checks to keep
|
||||||
|
the `bufq` from growing ever larger and larger.
|
||||||
|
|
||||||
|
|
||||||
|
## pools
|
||||||
|
|
||||||
|
A `struct bufc_pool` may be used to create chunks for a `bufq` and keep spare
|
||||||
|
ones around. It is initialized and used via:
|
||||||
|
|
||||||
|
```
|
||||||
|
void Curl_bufcp_init(struct bufc_pool *pool, size_t chunk_size, size_t spare_max);
|
||||||
|
|
||||||
|
void Curl_bufq_initp(struct bufq *q, struct bufc_pool *pool, size_t max_chunks, int opts);
|
||||||
|
```
|
||||||
|
|
||||||
|
The pool gets the size and the mount of spares to keep. The `bufq` gets the
|
||||||
|
pool and the `max_chunks`. It no longer needs to know the chunk sizes, as
|
||||||
|
those are managed by the pool.
|
||||||
|
|
||||||
|
A pool can be shared between many `bufq`s, as long as all of them operate in
|
||||||
|
the same thread. In curl that would be true for all transfers using the same
|
||||||
|
multi handle. The advantages of a pool are:
|
||||||
|
|
||||||
|
* when all `bufq`s are empty, only memory for `max_spare` chunks in the pool
|
||||||
|
is used. Empty `bufq`s holds no memory.
|
||||||
|
* the latest spare chunk is the first to be handed out again, no matter which
|
||||||
|
`bufq` needs it. This keeps the footprint of "recently used" memory smaller.
|
86
src/dependencies/curl-8.8.0/docs/BUFREF.md
Normal file
86
src/dependencies/curl-8.8.0/docs/BUFREF.md
Normal file
|
@ -0,0 +1,86 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
# bufref
|
||||||
|
|
||||||
|
This is an internal module for handling buffer references. A referenced
|
||||||
|
buffer is associated with its destructor function that is implicitly called
|
||||||
|
when the reference is invalidated. Once referenced, a buffer cannot be
|
||||||
|
reallocated.
|
||||||
|
|
||||||
|
A data length is stored within the reference for binary data handling
|
||||||
|
purposes; it is not used by the bufref API.
|
||||||
|
|
||||||
|
The `struct bufref` is used to hold data referencing a buffer. The members of
|
||||||
|
that structure **MUST NOT** be accessed or modified without using the dedicated
|
||||||
|
bufref API.
|
||||||
|
|
||||||
|
## `init`
|
||||||
|
|
||||||
|
```c
|
||||||
|
void Curl_bufref_init(struct bufref *br);
|
||||||
|
```
|
||||||
|
|
||||||
|
Initializes a `bufref` structure. This function **MUST** be called before any
|
||||||
|
other operation is performed on the structure.
|
||||||
|
|
||||||
|
Upon completion, the referenced buffer is `NULL` and length is zero.
|
||||||
|
|
||||||
|
This function may also be called to bypass referenced buffer destruction while
|
||||||
|
invalidating the current reference.
|
||||||
|
|
||||||
|
## `free`
|
||||||
|
|
||||||
|
```c
|
||||||
|
void Curl_bufref_free(struct bufref *br);
|
||||||
|
```
|
||||||
|
|
||||||
|
Destroys the previously referenced buffer using its destructor and
|
||||||
|
reinitializes the structure for a possible subsequent reuse.
|
||||||
|
|
||||||
|
## `set`
|
||||||
|
|
||||||
|
```c
|
||||||
|
void Curl_bufref_set(struct bufref *br, const void *buffer, size_t length,
|
||||||
|
void (*destructor)(void *));
|
||||||
|
```
|
||||||
|
|
||||||
|
Releases the previously referenced buffer, then assigns the new `buffer` to
|
||||||
|
the structure, associated with its `destructor` function. The latter can be
|
||||||
|
specified as `NULL`: this is the case when the referenced buffer is static.
|
||||||
|
|
||||||
|
if `buffer` is NULL, `length` must be zero.
|
||||||
|
|
||||||
|
## `memdup`
|
||||||
|
|
||||||
|
```c
|
||||||
|
CURLcode Curl_bufref_memdup(struct bufref *br, const void *data, size_t length);
|
||||||
|
```
|
||||||
|
|
||||||
|
Releases the previously referenced buffer, then duplicates the `length`-byte
|
||||||
|
`data` into a buffer allocated via `malloc()` and references the latter
|
||||||
|
associated with destructor `curl_free()`.
|
||||||
|
|
||||||
|
An additional trailing byte is allocated and set to zero as a possible string
|
||||||
|
null-terminator; it is not counted in the stored length.
|
||||||
|
|
||||||
|
Returns `CURLE_OK` if successful, else `CURLE_OUT_OF_MEMORY`.
|
||||||
|
|
||||||
|
## `ptr`
|
||||||
|
|
||||||
|
```c
|
||||||
|
const unsigned char *Curl_bufref_ptr(const struct bufref *br);
|
||||||
|
```
|
||||||
|
|
||||||
|
Returns a `const unsigned char *` to the referenced buffer.
|
||||||
|
|
||||||
|
## `len`
|
||||||
|
|
||||||
|
```c
|
||||||
|
size_t Curl_bufref_len(const struct bufref *br);
|
||||||
|
```
|
||||||
|
|
||||||
|
Returns the stored length of the referenced buffer.
|
94
src/dependencies/curl-8.8.0/docs/BUG-BOUNTY.md
Normal file
94
src/dependencies/curl-8.8.0/docs/BUG-BOUNTY.md
Normal file
|
@ -0,0 +1,94 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
# The curl bug bounty
|
||||||
|
|
||||||
|
The curl project runs a bug bounty program in association with
|
||||||
|
[HackerOne](https://www.hackerone.com) and the [Internet Bug
|
||||||
|
Bounty](https://internetbugbounty.org).
|
||||||
|
|
||||||
|
## How does it work?
|
||||||
|
|
||||||
|
Start out by posting your suspected security vulnerability directly to [curl's
|
||||||
|
HackerOne program](https://hackerone.com/curl).
|
||||||
|
|
||||||
|
After you have reported a security issue, it has been deemed credible, and a
|
||||||
|
patch and advisory has been made public, you may be eligible for a bounty from
|
||||||
|
this program. See the [Security Process](https://curl.se/dev/secprocess.html)
|
||||||
|
document for how we work with security issues.
|
||||||
|
|
||||||
|
## What are the reward amounts?
|
||||||
|
|
||||||
|
The curl project offers monetary compensation for reported and published
|
||||||
|
security vulnerabilities. The amount of money that is rewarded depends on how
|
||||||
|
serious the flaw is determined to be.
|
||||||
|
|
||||||
|
Since 2021, the Bug Bounty is managed in association with the Internet Bug
|
||||||
|
Bounty and they set the reward amounts. If it would turn out that they set
|
||||||
|
amounts that are way lower than we can accept, the curl project intends to
|
||||||
|
"top up" rewards.
|
||||||
|
|
||||||
|
In 2022, typical "Medium" rated vulnerabilities have been rewarded 2,400 USD
|
||||||
|
each.
|
||||||
|
|
||||||
|
## Who is eligible for a reward?
|
||||||
|
|
||||||
|
Everyone and anyone who reports a security problem in a released curl version
|
||||||
|
that has not already been reported can ask for a bounty.
|
||||||
|
|
||||||
|
Dedicated - paid for - security audits that are performed in collaboration
|
||||||
|
with curl developers are not eligible for bounties.
|
||||||
|
|
||||||
|
Vulnerabilities in features that are off by default and documented as
|
||||||
|
experimental are not eligible for a reward.
|
||||||
|
|
||||||
|
The vulnerability has to be fixed and publicly announced (by the curl project)
|
||||||
|
before a bug bounty is considered.
|
||||||
|
|
||||||
|
Once the vulnerability has been published by curl, the researcher can request
|
||||||
|
their bounty from the [Internet Bug Bounty](https://hackerone.com/ibb).
|
||||||
|
|
||||||
|
Bounties need to be requested within twelve months from the publication of the
|
||||||
|
vulnerability.
|
||||||
|
|
||||||
|
The curl security team reserves themselves the right to deny or allow bug
|
||||||
|
bounty payouts on its own discretion. There is no appeals process.
|
||||||
|
|
||||||
|
## Product vulnerabilities only
|
||||||
|
|
||||||
|
This bug bounty only concerns the curl and libcurl products and thus their
|
||||||
|
respective source codes - when running on existing hardware. It does not
|
||||||
|
include curl documentation, curl websites, or other curl related
|
||||||
|
infrastructure.
|
||||||
|
|
||||||
|
The curl security team is the sole arbiter if a reported flaw is subject to a
|
||||||
|
bounty or not.
|
||||||
|
|
||||||
|
## Third parties
|
||||||
|
|
||||||
|
The curl bug bounty does not cover flaws in third party dependencies
|
||||||
|
(libraries) used by curl or libcurl. If the bug triggers because of curl
|
||||||
|
behaving wrongly or abusing a third party dependency, the problem is rather in
|
||||||
|
curl and not in the dependency and then the bounty might cover the problem.
|
||||||
|
|
||||||
|
## How are vulnerabilities graded?
|
||||||
|
|
||||||
|
The grading of each reported vulnerability that makes a reward claim is
|
||||||
|
performed by the curl security team. The grading is based on the CVSS (Common
|
||||||
|
Vulnerability Scoring System) 3.0.
|
||||||
|
|
||||||
|
## How are reward amounts determined?
|
||||||
|
|
||||||
|
The curl security team gives the vulnerability a score or severity level, as
|
||||||
|
mentioned above. The actual monetary reward amount is decided and paid by the
|
||||||
|
Internet Bug Bounty..
|
||||||
|
|
||||||
|
## Regarding taxes, etc. on the bounties
|
||||||
|
|
||||||
|
In the event that the individual receiving a bug bounty needs to pay taxes on
|
||||||
|
the reward money, the responsibility lies with the receiver. The curl project
|
||||||
|
or its security team never actually receive any of this money, hold the money,
|
||||||
|
or pay out the money.
|
270
src/dependencies/curl-8.8.0/docs/BUGS.md
Normal file
270
src/dependencies/curl-8.8.0/docs/BUGS.md
Normal file
|
@ -0,0 +1,270 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
# BUGS
|
||||||
|
|
||||||
|
## There are still bugs
|
||||||
|
|
||||||
|
Curl and libcurl keep being developed. Adding features and changing code
|
||||||
|
means that bugs sneak in, no matter how hard we try to keep them out.
|
||||||
|
|
||||||
|
Of course there are lots of bugs left. Not to mention misfeatures.
|
||||||
|
|
||||||
|
To help us make curl the stable and solid product we want it to be, we need
|
||||||
|
bug reports and bug fixes.
|
||||||
|
|
||||||
|
## Where to report
|
||||||
|
|
||||||
|
If you cannot fix a bug yourself and submit a fix for it, try to report an as
|
||||||
|
detailed report as possible to a curl mailing list to allow one of us to have
|
||||||
|
a go at a solution. You can optionally also submit your problem in [curl's
|
||||||
|
bug tracking system](https://github.com/curl/curl/issues).
|
||||||
|
|
||||||
|
Please read the rest of this document below first before doing that.
|
||||||
|
|
||||||
|
If you feel you need to ask around first, find a suitable [mailing list](
|
||||||
|
https://curl.se/mail/) and post your questions there.
|
||||||
|
|
||||||
|
## Security bugs
|
||||||
|
|
||||||
|
If you find a bug or problem in curl or libcurl that you think has a security
|
||||||
|
impact, for example a bug that can put users in danger or make them
|
||||||
|
vulnerable if the bug becomes public knowledge, then please report that bug
|
||||||
|
using our security development process.
|
||||||
|
|
||||||
|
Security related bugs or bugs that are suspected to have a security impact,
|
||||||
|
should be reported on the [curl security tracker at
|
||||||
|
HackerOne](https://hackerone.com/curl).
|
||||||
|
|
||||||
|
This ensures that the report reaches the curl security team so that they
|
||||||
|
first can deal with the report away from the public to minimize the harm and
|
||||||
|
impact it has on existing users out there who might be using the vulnerable
|
||||||
|
versions.
|
||||||
|
|
||||||
|
The curl project's process for handling security related issues is
|
||||||
|
[documented separately](https://curl.se/dev/secprocess.html).
|
||||||
|
|
||||||
|
## What to report
|
||||||
|
|
||||||
|
When reporting a bug, you should include all information to help us
|
||||||
|
understand what is wrong, what you expected to happen and how to repeat the
|
||||||
|
bad behavior. You therefore need to tell us:
|
||||||
|
|
||||||
|
- your operating system's name and version number
|
||||||
|
|
||||||
|
- what version of curl you are using (`curl -V` is fine)
|
||||||
|
|
||||||
|
- versions of the used libraries that libcurl is built to use
|
||||||
|
|
||||||
|
- what URL you were working with (if possible), at least which protocol
|
||||||
|
|
||||||
|
and anything and everything else you think matters. Tell us what you expected
|
||||||
|
to happen, tell use what did happen, tell us how you could make it work
|
||||||
|
another way. Dig around, try out, test. Then include all the tiny bits and
|
||||||
|
pieces in your report. You benefit from this yourself, as it enables us to
|
||||||
|
help you quicker and more accurately.
|
||||||
|
|
||||||
|
Since curl deals with networks, it often helps us if you include a protocol
|
||||||
|
debug dump with your bug report. The output you get by using the `-v` or
|
||||||
|
`--trace` options.
|
||||||
|
|
||||||
|
If curl crashed, causing a core dump (in Unix), there is hardly any use to
|
||||||
|
send that huge file to anyone of us. Unless we have the same system setup as
|
||||||
|
you, we cannot do much with it. Instead, we ask you to get a stack trace and
|
||||||
|
send that (much smaller) output to us instead.
|
||||||
|
|
||||||
|
The address and how to subscribe to the mailing lists are detailed in the
|
||||||
|
`MANUAL.md` file.
|
||||||
|
|
||||||
|
## libcurl problems
|
||||||
|
|
||||||
|
When you have written your own application with libcurl to perform transfers,
|
||||||
|
it is even more important to be specific and detailed when reporting bugs.
|
||||||
|
|
||||||
|
Tell us the libcurl version and your operating system. Tell us the name and
|
||||||
|
version of all relevant sub-components like for example the SSL library
|
||||||
|
you are using and what name resolving your libcurl uses. If you use SFTP or
|
||||||
|
SCP, the libssh2 version is relevant etc.
|
||||||
|
|
||||||
|
Showing us a real source code example repeating your problem is the best way
|
||||||
|
to get our attention and it greatly increases our chances to understand your
|
||||||
|
problem and to work on a fix (if we agree it truly is a problem).
|
||||||
|
|
||||||
|
Lots of problems that appear to be libcurl problems are actually just abuses
|
||||||
|
of the libcurl API or other malfunctions in your applications. It is advised
|
||||||
|
that you run your problematic program using a memory debug tool like valgrind
|
||||||
|
or similar before you post memory-related or "crashing" problems to us.
|
||||||
|
|
||||||
|
## Who fixes the problems
|
||||||
|
|
||||||
|
If the problems or bugs you describe are considered to be bugs, we want to
|
||||||
|
have the problems fixed.
|
||||||
|
|
||||||
|
There are no developers in the curl project that are paid to work on bugs.
|
||||||
|
All developers that take on reported bugs do this on a voluntary basis. We do
|
||||||
|
it out of an ambition to keep curl and libcurl excellent products and out of
|
||||||
|
pride.
|
||||||
|
|
||||||
|
Please do not assume that you can just lump over something to us and it then
|
||||||
|
magically gets fixed after some given time. Most often we need feedback and
|
||||||
|
help to understand what you have experienced and how to repeat a problem.
|
||||||
|
Then we may only be able to assist YOU to debug the problem and to track down
|
||||||
|
the proper fix.
|
||||||
|
|
||||||
|
We get reports from many people every month and each report can take a
|
||||||
|
considerable amount of time to really go to the bottom with.
|
||||||
|
|
||||||
|
## How to get a stack trace
|
||||||
|
|
||||||
|
First, you must make sure that you compile all sources with `-g` and that you
|
||||||
|
do not 'strip' the final executable. Try to avoid optimizing the code as well,
|
||||||
|
remove `-O`, `-O2` etc from the compiler options.
|
||||||
|
|
||||||
|
Run the program until it cores.
|
||||||
|
|
||||||
|
Run your debugger on the core file, like `<debugger> curl core`. `<debugger>`
|
||||||
|
should be replaced with the name of your debugger, in most cases that is
|
||||||
|
`gdb`, but `dbx` and others also occur.
|
||||||
|
|
||||||
|
When the debugger has finished loading the core file and presents you a
|
||||||
|
prompt, enter `where` (without quotes) and press return.
|
||||||
|
|
||||||
|
The list that is presented is the stack trace. If everything worked, it is
|
||||||
|
supposed to contain the chain of functions that were called when curl
|
||||||
|
crashed. Include the stack trace with your detailed bug report, it helps a
|
||||||
|
lot.
|
||||||
|
|
||||||
|
## Bugs in libcurl bindings
|
||||||
|
|
||||||
|
There are of course bugs in libcurl bindings. You should then primarily
|
||||||
|
approach the team that works on that particular binding and see what you can
|
||||||
|
do to help them fix the problem.
|
||||||
|
|
||||||
|
If you suspect that the problem exists in the underlying libcurl, then please
|
||||||
|
convert your program over to plain C and follow the steps outlined above.
|
||||||
|
|
||||||
|
## Bugs in old versions
|
||||||
|
|
||||||
|
The curl project typically releases new versions every other month, and we
|
||||||
|
fix several hundred bugs per year. For a huge table of releases, number of
|
||||||
|
bug fixes and more, see: https://curl.se/docs/releases.html
|
||||||
|
|
||||||
|
The developers in the curl project do not have bandwidth or energy enough to
|
||||||
|
maintain several branches or to spend much time on hunting down problems in
|
||||||
|
old versions when chances are we already fixed them or at least that they have
|
||||||
|
changed nature and appearance in later versions.
|
||||||
|
|
||||||
|
When you experience a problem and want to report it, you really SHOULD
|
||||||
|
include the version number of the curl you are using when you experience the
|
||||||
|
issue. If that version number shows us that you are using an out-of-date curl,
|
||||||
|
you should also try out a modern curl version to see if the problem persists
|
||||||
|
or how/if it has changed in appearance.
|
||||||
|
|
||||||
|
Even if you cannot immediately upgrade your application/system to run the
|
||||||
|
latest curl version, you can most often at least run a test version or
|
||||||
|
experimental build or similar, to get this confirmed or not.
|
||||||
|
|
||||||
|
At times people insist that they cannot upgrade to a modern curl version, but
|
||||||
|
instead, they "just want the bug fixed". That is fine, just do not count on us
|
||||||
|
spending many cycles on trying to identify which single commit, if that is
|
||||||
|
even possible, that at some point in the past fixed the problem you are now
|
||||||
|
experiencing.
|
||||||
|
|
||||||
|
Security wise, it is almost always a bad idea to lag behind the current curl
|
||||||
|
versions by a lot. We keep discovering and reporting security problems
|
||||||
|
over time see you can see in [this
|
||||||
|
table](https://curl.se/docs/vulnerabilities.html)
|
||||||
|
|
||||||
|
# Bug fixing procedure
|
||||||
|
|
||||||
|
## What happens on first filing
|
||||||
|
|
||||||
|
When a new issue is posted in the issue tracker or on the mailing list, the
|
||||||
|
team of developers first needs to see the report. Maybe they took the day off,
|
||||||
|
maybe they are off in the woods hunting. Have patience. Allow at least a few
|
||||||
|
days before expecting someone to have responded.
|
||||||
|
|
||||||
|
In the issue tracker, you can expect that some labels are set on the issue to
|
||||||
|
help categorize it.
|
||||||
|
|
||||||
|
## First response
|
||||||
|
|
||||||
|
If your issue/bug report was not perfect at once (and few are), chances are
|
||||||
|
that someone asks follow-up questions. Which version did you use? Which
|
||||||
|
options did you use? How often does the problem occur? How can we reproduce
|
||||||
|
this problem? Which protocols does it involve? Or perhaps much more specific
|
||||||
|
and deep diving questions. It all depends on your specific issue.
|
||||||
|
|
||||||
|
You should then respond to these follow-up questions and provide more info
|
||||||
|
about the problem, so that we can help you figure it out. Or maybe you can
|
||||||
|
help us figure it out. An active back-and-forth communication is important
|
||||||
|
and the key for finding a cure and landing a fix.
|
||||||
|
|
||||||
|
## Not reproducible
|
||||||
|
|
||||||
|
We may require further work from you who actually see or experience the
|
||||||
|
problem if we cannot reproduce it and cannot understand it even after having
|
||||||
|
gotten all the info we need and having studied the source code over again.
|
||||||
|
|
||||||
|
## Unresponsive
|
||||||
|
|
||||||
|
If the problem have not been understood or reproduced, and there is nobody
|
||||||
|
responding to follow-up questions or questions asking for clarifications or
|
||||||
|
for discussing possible ways to move forward with the task, we take that as a
|
||||||
|
strong suggestion that the bug is unimportant.
|
||||||
|
|
||||||
|
Unimportant issues are closed as inactive sooner or later as they cannot be
|
||||||
|
fixed. The inactivity period (waiting for responses) should not be shorter
|
||||||
|
than two weeks but may extend months.
|
||||||
|
|
||||||
|
## Lack of time/interest
|
||||||
|
|
||||||
|
Bugs that are filed and are understood can unfortunately end up in the
|
||||||
|
"nobody cares enough about it to work on it" category. Such bugs are
|
||||||
|
perfectly valid problems that *should* get fixed but apparently are not. We
|
||||||
|
try to mark such bugs as `KNOWN_BUGS material` after a time of inactivity and
|
||||||
|
if no activity is noticed after yet some time those bugs are added to the
|
||||||
|
`KNOWN_BUGS` document and are closed in the issue tracker.
|
||||||
|
|
||||||
|
## `KNOWN_BUGS`
|
||||||
|
|
||||||
|
This is a list of known bugs. Bugs we know exist and that have been pointed
|
||||||
|
out but that have not yet been fixed. The reasons for why they have not been
|
||||||
|
fixed can involve anything really, but the primary reason is that nobody has
|
||||||
|
considered these problems to be important enough to spend the necessary time
|
||||||
|
and effort to have them fixed.
|
||||||
|
|
||||||
|
The `KNOWN_BUGS` items are always up for grabs and we love the ones who bring
|
||||||
|
one of them back to life and offer solutions to them.
|
||||||
|
|
||||||
|
The `KNOWN_BUGS` document has a sibling document known as `TODO`.
|
||||||
|
|
||||||
|
## `TODO`
|
||||||
|
|
||||||
|
Issues that are filed or reported that are not really bugs but more missing
|
||||||
|
features or ideas for future improvements and so on are marked as
|
||||||
|
*enhancement* or *feature-request* and get added to the `TODO` document and
|
||||||
|
the issues are closed. We do not keep TODO items open in the issue tracker.
|
||||||
|
|
||||||
|
The `TODO` document is full of ideas and suggestions of what we can add or
|
||||||
|
fix one day. You are always encouraged and free to grab one of those items and
|
||||||
|
take up a discussion with the curl development team on how that could be
|
||||||
|
implemented or provided in the project so that you can work on ticking it odd
|
||||||
|
that document.
|
||||||
|
|
||||||
|
If an issue is rather a bug and not a missing feature or functionality, it is
|
||||||
|
listed in `KNOWN_BUGS` instead.
|
||||||
|
|
||||||
|
## Closing off stalled bugs
|
||||||
|
|
||||||
|
The [issue and pull request trackers](https://github.com/curl/curl) only hold
|
||||||
|
"active" entries open (using a non-precise definition of what active actually
|
||||||
|
is, but they are at least not completely dead). Those that are abandoned or
|
||||||
|
in other ways dormant are closed and sometimes added to `TODO` and
|
||||||
|
`KNOWN_BUGS` instead.
|
||||||
|
|
||||||
|
This way, we only have "active" issues open on GitHub. Irrelevant issues and
|
||||||
|
pull requests do not distract developers or casual visitors.
|
190
src/dependencies/curl-8.8.0/docs/CHECKSRC.md
Normal file
190
src/dependencies/curl-8.8.0/docs/CHECKSRC.md
Normal file
|
@ -0,0 +1,190 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
# checksrc
|
||||||
|
|
||||||
|
This is the tool we use within the curl project to scan C source code and
|
||||||
|
check that it adheres to our [Source Code Style guide](CODE_STYLE.md).
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
checksrc.pl [options] [file1] [file2] ...
|
||||||
|
|
||||||
|
## Command line options
|
||||||
|
|
||||||
|
`-W[file]` skip that file and exclude it from being checked. Helpful
|
||||||
|
when, for example, one of the files is generated.
|
||||||
|
|
||||||
|
`-D[dir]` directory name to prepend to filenames when accessing them.
|
||||||
|
|
||||||
|
`-h` shows the help output, that also lists all recognized warnings
|
||||||
|
|
||||||
|
## What does `checksrc` warn for?
|
||||||
|
|
||||||
|
`checksrc` does not check and verify the code against the entire style guide.
|
||||||
|
The script is an effort to detect the most common mistakes and syntax mistakes
|
||||||
|
that contributors make before they get accustomed to our code style. Heck,
|
||||||
|
many of us regulars do the mistakes too and this script helps us keep the code
|
||||||
|
in shape.
|
||||||
|
|
||||||
|
checksrc.pl -h
|
||||||
|
|
||||||
|
Lists how to use the script and it lists all existing warnings it has and
|
||||||
|
problems it detects. At the time of this writing, the existing `checksrc`
|
||||||
|
warnings are:
|
||||||
|
|
||||||
|
- `ASSIGNWITHINCONDITION`: Assignment within a conditional expression. The
|
||||||
|
code style mandates the assignment to be done outside of it.
|
||||||
|
|
||||||
|
- `ASTERISKNOSPACE`: A pointer was declared like `char* name` instead of the
|
||||||
|
more appropriate `char *name` style. The asterisk should sit next to the
|
||||||
|
name.
|
||||||
|
|
||||||
|
- `ASTERISKSPACE`: A pointer was declared like `char * name` instead of the
|
||||||
|
more appropriate `char *name` style. The asterisk should sit right next to
|
||||||
|
the name without a space in between.
|
||||||
|
|
||||||
|
- `BADCOMMAND`: There is a bad `checksrc` instruction in the code. See the
|
||||||
|
**Ignore certain warnings** section below for details.
|
||||||
|
|
||||||
|
- `BANNEDFUNC`: A banned function was used. The functions sprintf, vsprintf,
|
||||||
|
strcat, strncat, gets are **never** allowed in curl source code.
|
||||||
|
|
||||||
|
- `BRACEELSE`: '} else' on the same line. The else is supposed to be on the
|
||||||
|
following line.
|
||||||
|
|
||||||
|
- `BRACEPOS`: wrong position for an open brace (`{`).
|
||||||
|
|
||||||
|
- `BRACEWHILE`: more than once space between end brace and while keyword
|
||||||
|
|
||||||
|
- `COMMANOSPACE`: a comma without following space
|
||||||
|
|
||||||
|
- `COPYRIGHT`: the file is missing a copyright statement
|
||||||
|
|
||||||
|
- `CPPCOMMENTS`: `//` comment detected, that is not C89 compliant
|
||||||
|
|
||||||
|
- `DOBRACE`: only use one space after do before open brace
|
||||||
|
|
||||||
|
- `EMPTYLINEBRACE`: found empty line before open brace
|
||||||
|
|
||||||
|
- `EQUALSNOSPACE`: no space after `=` sign
|
||||||
|
|
||||||
|
- `EQUALSNULL`: comparison with `== NULL` used in if/while. We use `!var`.
|
||||||
|
|
||||||
|
- `EXCLAMATIONSPACE`: space found after exclamations mark
|
||||||
|
|
||||||
|
- `FOPENMODE`: `fopen()` needs a macro for the mode string, use it
|
||||||
|
|
||||||
|
- `INDENTATION`: detected a wrong start column for code. Note that this
|
||||||
|
warning only checks some specific places and can certainly miss many bad
|
||||||
|
indentations.
|
||||||
|
|
||||||
|
- `LONGLINE`: A line is longer than 79 columns.
|
||||||
|
|
||||||
|
- `MULTISPACE`: Multiple spaces were found where only one should be used.
|
||||||
|
|
||||||
|
- `NOSPACEEQUALS`: An equals sign was found without preceding space. We prefer
|
||||||
|
`a = 2` and *not* `a=2`.
|
||||||
|
|
||||||
|
- `NOTEQUALSZERO`: check found using `!= 0`. We use plain `if(var)`.
|
||||||
|
|
||||||
|
- `ONELINECONDITION`: do not put the conditional block on the same line as `if()`
|
||||||
|
|
||||||
|
- `OPENCOMMENT`: File ended with a comment (`/*`) still "open".
|
||||||
|
|
||||||
|
- `PARENBRACE`: `){` was used without sufficient space in between.
|
||||||
|
|
||||||
|
- `RETURNNOSPACE`: `return` was used without space between the keyword and the
|
||||||
|
following value.
|
||||||
|
|
||||||
|
- `SEMINOSPACE`: There was no space (or newline) following a semicolon.
|
||||||
|
|
||||||
|
- `SIZEOFNOPAREN`: Found use of sizeof without parentheses. We prefer
|
||||||
|
`sizeof(int)` style.
|
||||||
|
|
||||||
|
- `SNPRINTF` - Found use of `snprintf()`. Since we use an internal replacement
|
||||||
|
with a different return code etc, we prefer `msnprintf()`.
|
||||||
|
|
||||||
|
- `SPACEAFTERPAREN`: there was a space after open parenthesis, `( text`.
|
||||||
|
|
||||||
|
- `SPACEBEFORECLOSE`: there was a space before a close parenthesis, `text )`.
|
||||||
|
|
||||||
|
- `SPACEBEFORECOMMA`: there was a space before a comma, `one , two`.
|
||||||
|
|
||||||
|
- `SPACEBEFOREPAREN`: there was a space before an open parenthesis, `if (`,
|
||||||
|
where one was not expected
|
||||||
|
|
||||||
|
- `SPACESEMICOLON`: there was a space before semicolon, ` ;`.
|
||||||
|
|
||||||
|
- `TABS`: TAB characters are not allowed
|
||||||
|
|
||||||
|
- `TRAILINGSPACE`: Trailing whitespace on the line
|
||||||
|
|
||||||
|
- `TYPEDEFSTRUCT`: we frown upon (most) typedefed structs
|
||||||
|
|
||||||
|
- `UNUSEDIGNORE`: a `checksrc` inlined warning ignore was asked for but not
|
||||||
|
used, that is an ignore that should be removed or changed to get used.
|
||||||
|
|
||||||
|
### Extended warnings
|
||||||
|
|
||||||
|
Some warnings are quite computationally expensive to perform, so they are
|
||||||
|
turned off by default. To enable these warnings, place a `.checksrc` file in
|
||||||
|
the directory where they should be activated with commands to enable the
|
||||||
|
warnings you are interested in. The format of the file is to enable one
|
||||||
|
warning per line like so: `enable <EXTENDEDWARNING>`
|
||||||
|
|
||||||
|
Currently these are the extended warnings which can be enabled:
|
||||||
|
|
||||||
|
- `COPYRIGHTYEAR`: the current changeset has not updated the copyright year in
|
||||||
|
the source file
|
||||||
|
|
||||||
|
- `STRERROR`: use of banned function strerror()
|
||||||
|
|
||||||
|
- `STDERR`: use of banned variable `stderr`
|
||||||
|
|
||||||
|
## Ignore certain warnings
|
||||||
|
|
||||||
|
Due to the nature of the source code and the flaws of the `checksrc` tool,
|
||||||
|
there is sometimes a need to ignore specific warnings. `checksrc` allows a few
|
||||||
|
different ways to do this.
|
||||||
|
|
||||||
|
### Inline ignore
|
||||||
|
|
||||||
|
You can control what to ignore within a specific source file by providing
|
||||||
|
instructions to `checksrc` in the source code itself. See examples below. The
|
||||||
|
instruction can ask to ignore a specific warning a specific number of times or
|
||||||
|
you ignore all of them until you mark the end of the ignored section.
|
||||||
|
|
||||||
|
Inline ignores are only done for that single specific source code file.
|
||||||
|
|
||||||
|
Example
|
||||||
|
|
||||||
|
/* !checksrc! disable LONGLINE all */
|
||||||
|
|
||||||
|
This ignores the warning for overly long lines until it is re-enabled with:
|
||||||
|
|
||||||
|
/* !checksrc! enable LONGLINE */
|
||||||
|
|
||||||
|
If the enabling is not performed before the end of the file, it is enabled
|
||||||
|
again automatically for the next file.
|
||||||
|
|
||||||
|
You can also opt to ignore just N violations so that if you have a single long
|
||||||
|
line you just cannot shorten and is agreed to be fine anyway:
|
||||||
|
|
||||||
|
/* !checksrc! disable LONGLINE 1 */
|
||||||
|
|
||||||
|
... and the warning for long lines is enabled again automatically after it has
|
||||||
|
ignored that single warning. The number `1` can of course be changed to any
|
||||||
|
other integer number. It can be used to make sure only the exact intended
|
||||||
|
instances are ignored and nothing extra.
|
||||||
|
|
||||||
|
### Directory wide ignore patterns
|
||||||
|
|
||||||
|
This is a method we have transitioned away from. Use inline ignores as far as
|
||||||
|
possible.
|
||||||
|
|
||||||
|
Make a `checksrc.skip` file in the directory of the source code with the
|
||||||
|
false positive, and include the full offending line into this file.
|
433
src/dependencies/curl-8.8.0/docs/CIPHERS.md
Normal file
433
src/dependencies/curl-8.8.0/docs/CIPHERS.md
Normal file
|
@ -0,0 +1,433 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
# Ciphers
|
||||||
|
|
||||||
|
With curl's options
|
||||||
|
[`CURLOPT_SSL_CIPHER_LIST`](https://curl.se/libcurl/c/CURLOPT_SSL_CIPHER_LIST.html)
|
||||||
|
and
|
||||||
|
[`--ciphers`](https://curl.se/docs/manpage.html#--ciphers)
|
||||||
|
users can control which ciphers to consider when negotiating TLS connections.
|
||||||
|
|
||||||
|
TLS 1.3 ciphers are supported since curl 7.61 for OpenSSL 1.1.1+, and since
|
||||||
|
curl 7.85 for Schannel with options
|
||||||
|
[`CURLOPT_TLS13_CIPHERS`](https://curl.se/libcurl/c/CURLOPT_TLS13_CIPHERS.html)
|
||||||
|
and
|
||||||
|
[`--tls13-ciphers`](https://curl.se/docs/manpage.html#--tls13-ciphers)
|
||||||
|
. If you are using a different SSL backend you can try setting TLS 1.3 cipher
|
||||||
|
suites by using the respective regular cipher option.
|
||||||
|
|
||||||
|
The names of the known ciphers differ depending on which TLS backend that
|
||||||
|
libcurl was built to use. This is an attempt to list known cipher names.
|
||||||
|
|
||||||
|
## OpenSSL
|
||||||
|
|
||||||
|
(based on [OpenSSL docs](https://www.openssl.org/docs/manmaster/man1/openssl-ciphers.html))
|
||||||
|
|
||||||
|
When specifying multiple cipher names, separate them with colon (`:`).
|
||||||
|
|
||||||
|
### SSL3 cipher suites
|
||||||
|
|
||||||
|
`NULL-MD5`
|
||||||
|
`NULL-SHA`
|
||||||
|
`RC4-MD5`
|
||||||
|
`RC4-SHA`
|
||||||
|
`IDEA-CBC-SHA`
|
||||||
|
`DES-CBC3-SHA`
|
||||||
|
`DH-DSS-DES-CBC3-SHA`
|
||||||
|
`DH-RSA-DES-CBC3-SHA`
|
||||||
|
`DHE-DSS-DES-CBC3-SHA`
|
||||||
|
`DHE-RSA-DES-CBC3-SHA`
|
||||||
|
`ADH-RC4-MD5`
|
||||||
|
`ADH-DES-CBC3-SHA`
|
||||||
|
|
||||||
|
### TLS v1.0 cipher suites
|
||||||
|
|
||||||
|
`NULL-MD5`
|
||||||
|
`NULL-SHA`
|
||||||
|
`RC4-MD5`
|
||||||
|
`RC4-SHA`
|
||||||
|
`IDEA-CBC-SHA`
|
||||||
|
`DES-CBC3-SHA`
|
||||||
|
`DHE-DSS-DES-CBC3-SHA`
|
||||||
|
`DHE-RSA-DES-CBC3-SHA`
|
||||||
|
`ADH-RC4-MD5`
|
||||||
|
`ADH-DES-CBC3-SHA`
|
||||||
|
|
||||||
|
### AES cipher suites from RFC 3268, extending TLS v1.0
|
||||||
|
|
||||||
|
`AES128-SHA`
|
||||||
|
`AES256-SHA`
|
||||||
|
`DH-DSS-AES128-SHA`
|
||||||
|
`DH-DSS-AES256-SHA`
|
||||||
|
`DH-RSA-AES128-SHA`
|
||||||
|
`DH-RSA-AES256-SHA`
|
||||||
|
`DHE-DSS-AES128-SHA`
|
||||||
|
`DHE-DSS-AES256-SHA`
|
||||||
|
`DHE-RSA-AES128-SHA`
|
||||||
|
`DHE-RSA-AES256-SHA`
|
||||||
|
`ADH-AES128-SHA`
|
||||||
|
`ADH-AES256-SHA`
|
||||||
|
|
||||||
|
### SEED cipher suites from RFC 4162, extending TLS v1.0
|
||||||
|
|
||||||
|
`SEED-SHA`
|
||||||
|
`DH-DSS-SEED-SHA`
|
||||||
|
`DH-RSA-SEED-SHA`
|
||||||
|
`DHE-DSS-SEED-SHA`
|
||||||
|
`DHE-RSA-SEED-SHA`
|
||||||
|
`ADH-SEED-SHA`
|
||||||
|
|
||||||
|
### GOST cipher suites, extending TLS v1.0
|
||||||
|
|
||||||
|
`GOST94-GOST89-GOST89`
|
||||||
|
`GOST2001-GOST89-GOST89`
|
||||||
|
`GOST94-NULL-GOST94`
|
||||||
|
`GOST2001-NULL-GOST94`
|
||||||
|
|
||||||
|
### Elliptic curve cipher suites
|
||||||
|
|
||||||
|
`ECDHE-RSA-NULL-SHA`
|
||||||
|
`ECDHE-RSA-RC4-SHA`
|
||||||
|
`ECDHE-RSA-DES-CBC3-SHA`
|
||||||
|
`ECDHE-RSA-AES128-SHA`
|
||||||
|
`ECDHE-RSA-AES256-SHA`
|
||||||
|
`ECDHE-ECDSA-NULL-SHA`
|
||||||
|
`ECDHE-ECDSA-RC4-SHA`
|
||||||
|
`ECDHE-ECDSA-DES-CBC3-SHA`
|
||||||
|
`ECDHE-ECDSA-AES128-SHA`
|
||||||
|
`ECDHE-ECDSA-AES256-SHA`
|
||||||
|
`AECDH-NULL-SHA`
|
||||||
|
`AECDH-RC4-SHA`
|
||||||
|
`AECDH-DES-CBC3-SHA`
|
||||||
|
`AECDH-AES128-SHA`
|
||||||
|
`AECDH-AES256-SHA`
|
||||||
|
|
||||||
|
### TLS v1.2 cipher suites
|
||||||
|
|
||||||
|
`NULL-SHA256`
|
||||||
|
`AES128-SHA256`
|
||||||
|
`AES256-SHA256`
|
||||||
|
`AES128-GCM-SHA256`
|
||||||
|
`AES256-GCM-SHA384`
|
||||||
|
`DH-RSA-AES128-SHA256`
|
||||||
|
`DH-RSA-AES256-SHA256`
|
||||||
|
`DH-RSA-AES128-GCM-SHA256`
|
||||||
|
`DH-RSA-AES256-GCM-SHA384`
|
||||||
|
`DH-DSS-AES128-SHA256`
|
||||||
|
`DH-DSS-AES256-SHA256`
|
||||||
|
`DH-DSS-AES128-GCM-SHA256`
|
||||||
|
`DH-DSS-AES256-GCM-SHA384`
|
||||||
|
`DHE-RSA-AES128-SHA256`
|
||||||
|
`DHE-RSA-AES256-SHA256`
|
||||||
|
`DHE-RSA-AES128-GCM-SHA256`
|
||||||
|
`DHE-RSA-AES256-GCM-SHA384`
|
||||||
|
`DHE-DSS-AES128-SHA256`
|
||||||
|
`DHE-DSS-AES256-SHA256`
|
||||||
|
`DHE-DSS-AES128-GCM-SHA256`
|
||||||
|
`DHE-DSS-AES256-GCM-SHA384`
|
||||||
|
`ECDHE-RSA-AES128-SHA256`
|
||||||
|
`ECDHE-RSA-AES256-SHA384`
|
||||||
|
`ECDHE-RSA-AES128-GCM-SHA256`
|
||||||
|
`ECDHE-RSA-AES256-GCM-SHA384`
|
||||||
|
`ECDHE-ECDSA-AES128-SHA256`
|
||||||
|
`ECDHE-ECDSA-AES256-SHA384`
|
||||||
|
`ECDHE-ECDSA-AES128-GCM-SHA256`
|
||||||
|
`ECDHE-ECDSA-AES256-GCM-SHA384`
|
||||||
|
`ADH-AES128-SHA256`
|
||||||
|
`ADH-AES256-SHA256`
|
||||||
|
`ADH-AES128-GCM-SHA256`
|
||||||
|
`ADH-AES256-GCM-SHA384`
|
||||||
|
`AES128-CCM`
|
||||||
|
`AES256-CCM`
|
||||||
|
`DHE-RSA-AES128-CCM`
|
||||||
|
`DHE-RSA-AES256-CCM`
|
||||||
|
`AES128-CCM8`
|
||||||
|
`AES256-CCM8`
|
||||||
|
`DHE-RSA-AES128-CCM8`
|
||||||
|
`DHE-RSA-AES256-CCM8`
|
||||||
|
`ECDHE-ECDSA-AES128-CCM`
|
||||||
|
`ECDHE-ECDSA-AES256-CCM`
|
||||||
|
`ECDHE-ECDSA-AES128-CCM8`
|
||||||
|
`ECDHE-ECDSA-AES256-CCM8`
|
||||||
|
|
||||||
|
### Camellia HMAC-Based cipher suites from RFC 6367, extending TLS v1.2
|
||||||
|
|
||||||
|
`ECDHE-ECDSA-CAMELLIA128-SHA256`
|
||||||
|
`ECDHE-ECDSA-CAMELLIA256-SHA384`
|
||||||
|
`ECDHE-RSA-CAMELLIA128-SHA256`
|
||||||
|
`ECDHE-RSA-CAMELLIA256-SHA384`
|
||||||
|
|
||||||
|
### TLS 1.3 cipher suites
|
||||||
|
|
||||||
|
(Note these ciphers are set with `CURLOPT_TLS13_CIPHERS` and `--tls13-ciphers`)
|
||||||
|
|
||||||
|
`TLS_AES_256_GCM_SHA384`
|
||||||
|
`TLS_CHACHA20_POLY1305_SHA256`
|
||||||
|
`TLS_AES_128_GCM_SHA256`
|
||||||
|
`TLS_AES_128_CCM_8_SHA256`
|
||||||
|
`TLS_AES_128_CCM_SHA256`
|
||||||
|
|
||||||
|
## WolfSSL
|
||||||
|
|
||||||
|
`RC4-SHA`,
|
||||||
|
`RC4-MD5`,
|
||||||
|
`DES-CBC3-SHA`,
|
||||||
|
`AES128-SHA`,
|
||||||
|
`AES256-SHA`,
|
||||||
|
`NULL-SHA`,
|
||||||
|
`NULL-SHA256`,
|
||||||
|
`DHE-RSA-AES128-SHA`,
|
||||||
|
`DHE-RSA-AES256-SHA`,
|
||||||
|
`DHE-PSK-AES256-GCM-SHA384`,
|
||||||
|
`DHE-PSK-AES128-GCM-SHA256`,
|
||||||
|
`PSK-AES256-GCM-SHA384`,
|
||||||
|
`PSK-AES128-GCM-SHA256`,
|
||||||
|
`DHE-PSK-AES256-CBC-SHA384`,
|
||||||
|
`DHE-PSK-AES128-CBC-SHA256`,
|
||||||
|
`PSK-AES256-CBC-SHA384`,
|
||||||
|
`PSK-AES128-CBC-SHA256`,
|
||||||
|
`PSK-AES128-CBC-SHA`,
|
||||||
|
`PSK-AES256-CBC-SHA`,
|
||||||
|
`DHE-PSK-AES128-CCM`,
|
||||||
|
`DHE-PSK-AES256-CCM`,
|
||||||
|
`PSK-AES128-CCM`,
|
||||||
|
`PSK-AES256-CCM`,
|
||||||
|
`PSK-AES128-CCM-8`,
|
||||||
|
`PSK-AES256-CCM-8`,
|
||||||
|
`DHE-PSK-NULL-SHA384`,
|
||||||
|
`DHE-PSK-NULL-SHA256`,
|
||||||
|
`PSK-NULL-SHA384`,
|
||||||
|
`PSK-NULL-SHA256`,
|
||||||
|
`PSK-NULL-SHA`,
|
||||||
|
`HC128-MD5`,
|
||||||
|
`HC128-SHA`,
|
||||||
|
`HC128-B2B256`,
|
||||||
|
`AES128-B2B256`,
|
||||||
|
`AES256-B2B256`,
|
||||||
|
`RABBIT-SHA`,
|
||||||
|
`NTRU-RC4-SHA`,
|
||||||
|
`NTRU-DES-CBC3-SHA`,
|
||||||
|
`NTRU-AES128-SHA`,
|
||||||
|
`NTRU-AES256-SHA`,
|
||||||
|
`AES128-CCM-8`,
|
||||||
|
`AES256-CCM-8`,
|
||||||
|
`ECDHE-ECDSA-AES128-CCM`,
|
||||||
|
`ECDHE-ECDSA-AES128-CCM-8`,
|
||||||
|
`ECDHE-ECDSA-AES256-CCM-8`,
|
||||||
|
`ECDHE-RSA-AES128-SHA`,
|
||||||
|
`ECDHE-RSA-AES256-SHA`,
|
||||||
|
`ECDHE-ECDSA-AES128-SHA`,
|
||||||
|
`ECDHE-ECDSA-AES256-SHA`,
|
||||||
|
`ECDHE-RSA-RC4-SHA`,
|
||||||
|
`ECDHE-RSA-DES-CBC3-SHA`,
|
||||||
|
`ECDHE-ECDSA-RC4-SHA`,
|
||||||
|
`ECDHE-ECDSA-DES-CBC3-SHA`,
|
||||||
|
`AES128-SHA256`,
|
||||||
|
`AES256-SHA256`,
|
||||||
|
`DHE-RSA-AES128-SHA256`,
|
||||||
|
`DHE-RSA-AES256-SHA256`,
|
||||||
|
`ECDH-RSA-AES128-SHA`,
|
||||||
|
`ECDH-RSA-AES256-SHA`,
|
||||||
|
`ECDH-ECDSA-AES128-SHA`,
|
||||||
|
`ECDH-ECDSA-AES256-SHA`,
|
||||||
|
`ECDH-RSA-RC4-SHA`,
|
||||||
|
`ECDH-RSA-DES-CBC3-SHA`,
|
||||||
|
`ECDH-ECDSA-RC4-SHA`,
|
||||||
|
`ECDH-ECDSA-DES-CBC3-SHA`,
|
||||||
|
`AES128-GCM-SHA256`,
|
||||||
|
`AES256-GCM-SHA384`,
|
||||||
|
`DHE-RSA-AES128-GCM-SHA256`,
|
||||||
|
`DHE-RSA-AES256-GCM-SHA384`,
|
||||||
|
`ECDHE-RSA-AES128-GCM-SHA256`,
|
||||||
|
`ECDHE-RSA-AES256-GCM-SHA384`,
|
||||||
|
`ECDHE-ECDSA-AES128-GCM-SHA256`,
|
||||||
|
`ECDHE-ECDSA-AES256-GCM-SHA384`,
|
||||||
|
`ECDH-RSA-AES128-GCM-SHA256`,
|
||||||
|
`ECDH-RSA-AES256-GCM-SHA384`,
|
||||||
|
`ECDH-ECDSA-AES128-GCM-SHA256`,
|
||||||
|
`ECDH-ECDSA-AES256-GCM-SHA384`,
|
||||||
|
`CAMELLIA128-SHA`,
|
||||||
|
`DHE-RSA-CAMELLIA128-SHA`,
|
||||||
|
`CAMELLIA256-SHA`,
|
||||||
|
`DHE-RSA-CAMELLIA256-SHA`,
|
||||||
|
`CAMELLIA128-SHA256`,
|
||||||
|
`DHE-RSA-CAMELLIA128-SHA256`,
|
||||||
|
`CAMELLIA256-SHA256`,
|
||||||
|
`DHE-RSA-CAMELLIA256-SHA256`,
|
||||||
|
`ECDHE-RSA-AES128-SHA256`,
|
||||||
|
`ECDHE-ECDSA-AES128-SHA256`,
|
||||||
|
`ECDH-RSA-AES128-SHA256`,
|
||||||
|
`ECDH-ECDSA-AES128-SHA256`,
|
||||||
|
`ECDHE-RSA-AES256-SHA384`,
|
||||||
|
`ECDHE-ECDSA-AES256-SHA384`,
|
||||||
|
`ECDH-RSA-AES256-SHA384`,
|
||||||
|
`ECDH-ECDSA-AES256-SHA384`,
|
||||||
|
`ECDHE-RSA-CHACHA20-POLY1305`,
|
||||||
|
`ECDHE-ECDSA-CHACHA20-POLY1305`,
|
||||||
|
`DHE-RSA-CHACHA20-POLY1305`,
|
||||||
|
`ECDHE-RSA-CHACHA20-POLY1305-OLD`,
|
||||||
|
`ECDHE-ECDSA-CHACHA20-POLY1305-OLD`,
|
||||||
|
`DHE-RSA-CHACHA20-POLY1305-OLD`,
|
||||||
|
`ADH-AES128-SHA`,
|
||||||
|
`QSH`,
|
||||||
|
`RENEGOTIATION-INFO`,
|
||||||
|
`IDEA-CBC-SHA`,
|
||||||
|
`ECDHE-ECDSA-NULL-SHA`,
|
||||||
|
`ECDHE-PSK-NULL-SHA256`,
|
||||||
|
`ECDHE-PSK-AES128-CBC-SHA256`,
|
||||||
|
`PSK-CHACHA20-POLY1305`,
|
||||||
|
`ECDHE-PSK-CHACHA20-POLY1305`,
|
||||||
|
`DHE-PSK-CHACHA20-POLY1305`,
|
||||||
|
`EDH-RSA-DES-CBC3-SHA`,
|
||||||
|
|
||||||
|
## Schannel
|
||||||
|
|
||||||
|
Schannel allows the enabling and disabling of encryption algorithms, but not
|
||||||
|
specific cipher suites, prior to TLS 1.3. The algorithms are
|
||||||
|
[defined](https://docs.microsoft.com/windows/desktop/SecCrypto/alg-id) by
|
||||||
|
Microsoft.
|
||||||
|
|
||||||
|
The algorithms below are for TLS 1.2 and earlier. TLS 1.3 is covered in the
|
||||||
|
next section.
|
||||||
|
|
||||||
|
There is also the case that the selected algorithm is not supported by the
|
||||||
|
protocol or does not match the ciphers offered by the server during the SSL
|
||||||
|
negotiation. In this case curl returns error
|
||||||
|
`CURLE_SSL_CONNECT_ERROR (35) SEC_E_ALGORITHM_MISMATCH`
|
||||||
|
and the request fails.
|
||||||
|
|
||||||
|
`CALG_MD2`,
|
||||||
|
`CALG_MD4`,
|
||||||
|
`CALG_MD5`,
|
||||||
|
`CALG_SHA`,
|
||||||
|
`CALG_SHA1`,
|
||||||
|
`CALG_MAC`,
|
||||||
|
`CALG_RSA_SIGN`,
|
||||||
|
`CALG_DSS_SIGN`,
|
||||||
|
`CALG_NO_SIGN`,
|
||||||
|
`CALG_RSA_KEYX`,
|
||||||
|
`CALG_DES`,
|
||||||
|
`CALG_3DES_112`,
|
||||||
|
`CALG_3DES`,
|
||||||
|
`CALG_DESX`,
|
||||||
|
`CALG_RC2`,
|
||||||
|
`CALG_RC4`,
|
||||||
|
`CALG_SEAL`,
|
||||||
|
`CALG_DH_SF`,
|
||||||
|
`CALG_DH_EPHEM`,
|
||||||
|
`CALG_AGREEDKEY_ANY`,
|
||||||
|
`CALG_HUGHES_MD5`,
|
||||||
|
`CALG_SKIPJACK`,
|
||||||
|
`CALG_TEK`,
|
||||||
|
`CALG_CYLINK_MEK`,
|
||||||
|
`CALG_SSL3_SHAMD5`,
|
||||||
|
`CALG_SSL3_MASTER`,
|
||||||
|
`CALG_SCHANNEL_MASTER_HASH`,
|
||||||
|
`CALG_SCHANNEL_MAC_KEY`,
|
||||||
|
`CALG_SCHANNEL_ENC_KEY`,
|
||||||
|
`CALG_PCT1_MASTER`,
|
||||||
|
`CALG_SSL2_MASTER`,
|
||||||
|
`CALG_TLS1_MASTER`,
|
||||||
|
`CALG_RC5`,
|
||||||
|
`CALG_HMAC`,
|
||||||
|
`CALG_TLS1PRF`,
|
||||||
|
`CALG_HASH_REPLACE_OWF`,
|
||||||
|
`CALG_AES_128`,
|
||||||
|
`CALG_AES_192`,
|
||||||
|
`CALG_AES_256`,
|
||||||
|
`CALG_AES`,
|
||||||
|
`CALG_SHA_256`,
|
||||||
|
`CALG_SHA_384`,
|
||||||
|
`CALG_SHA_512`,
|
||||||
|
`CALG_ECDH`,
|
||||||
|
`CALG_ECMQV`,
|
||||||
|
`CALG_ECDSA`,
|
||||||
|
`CALG_ECDH_EPHEM`,
|
||||||
|
|
||||||
|
As of curl 7.77.0, you can also pass `SCH_USE_STRONG_CRYPTO` as a cipher name
|
||||||
|
to [constrain the set of available ciphers as specified in the Schannel
|
||||||
|
documentation](https://docs.microsoft.com/en-us/windows/win32/secauthn/tls-cipher-suites-in-windows-server-2022).
|
||||||
|
Note that the supported ciphers in this case follow the OS version, so if you
|
||||||
|
are running an outdated OS you might still be supporting weak ciphers.
|
||||||
|
|
||||||
|
### TLS 1.3 cipher suites
|
||||||
|
|
||||||
|
You can set TLS 1.3 ciphers for Schannel by using `CURLOPT_TLS13_CIPHERS` or
|
||||||
|
`--tls13-ciphers` with the names below.
|
||||||
|
|
||||||
|
If TLS 1.3 cipher suites are set then libcurl adds or restricts Schannel TLS
|
||||||
|
1.3 algorithms automatically. Essentially, libcurl is emulating support for
|
||||||
|
individual TLS 1.3 cipher suites since Schannel does not support it directly.
|
||||||
|
|
||||||
|
`TLS_AES_256_GCM_SHA384`
|
||||||
|
`TLS_AES_128_GCM_SHA256`
|
||||||
|
`TLS_CHACHA20_POLY1305_SHA256`
|
||||||
|
`TLS_AES_128_CCM_8_SHA256`
|
||||||
|
`TLS_AES_128_CCM_SHA256`
|
||||||
|
|
||||||
|
Note if you set TLS 1.3 ciphers without also setting the minimum TLS version
|
||||||
|
to 1.3 then it is possible Schannel may negotiate an earlier TLS version and
|
||||||
|
cipher suite if your libcurl and OS settings allow it. You can set the minimum
|
||||||
|
TLS version by using `CURLOPT_SSLVERSION` or `--tlsv1.3`.
|
||||||
|
|
||||||
|
## BearSSL
|
||||||
|
|
||||||
|
BearSSL ciphers can be specified by either the OpenSSL name (`ECDHE-RSA-AES128-GCM-SHA256`) or the IANA name (`TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256`).
|
||||||
|
|
||||||
|
Since BearSSL 0.1:
|
||||||
|
|
||||||
|
`DES-CBC3-SHA`
|
||||||
|
`AES128-SHA`
|
||||||
|
`AES256-SHA`
|
||||||
|
`AES128-SHA256`
|
||||||
|
`AES256-SHA256`
|
||||||
|
`AES128-GCM-SHA256`
|
||||||
|
`AES256-GCM-SHA384`
|
||||||
|
`ECDH-ECDSA-DES-CBC3-SHA`
|
||||||
|
`ECDH-ECDSA-AES128-SHA`
|
||||||
|
`ECDH-ECDSA-AES256-SHA`
|
||||||
|
`ECDHE-ECDSA-DES-CBC3-SHA`
|
||||||
|
`ECDHE-ECDSA-AES128-SHA`
|
||||||
|
`ECDHE-ECDSA-AES256-SHA`
|
||||||
|
`ECDH-RSA-DES-CBC3-SHA`
|
||||||
|
`ECDH-RSA-AES128-SHA`
|
||||||
|
`ECDH-RSA-AES256-SHA`
|
||||||
|
`ECDHE-RSA-DES-CBC3-SHA`
|
||||||
|
`ECDHE-RSA-AES128-SHA`
|
||||||
|
`ECDHE-RSA-AES256-SHA`
|
||||||
|
`ECDHE-ECDSA-AES128-SHA256`
|
||||||
|
`ECDHE-ECDSA-AES256-SHA384`
|
||||||
|
`ECDH-ECDSA-AES128-SHA256`
|
||||||
|
`ECDH-ECDSA-AES256-SHA384`
|
||||||
|
`ECDHE-RSA-AES128-SHA256`
|
||||||
|
`ECDHE-RSA-AES256-SHA384`
|
||||||
|
`ECDH-RSA-AES128-SHA256`
|
||||||
|
`ECDH-RSA-AES256-SHA384`
|
||||||
|
`ECDHE-ECDSA-AES128-GCM-SHA256`
|
||||||
|
`ECDHE-ECDSA-AES256-GCM-SHA384`
|
||||||
|
`ECDH-ECDSA-AES128-GCM-SHA256`
|
||||||
|
`ECDH-ECDSA-AES256-GCM-SHA384`
|
||||||
|
`ECDHE-RSA-AES128-GCM-SHA256`
|
||||||
|
`ECDHE-RSA-AES256-GCM-SHA384`
|
||||||
|
`ECDH-RSA-AES128-GCM-SHA256`
|
||||||
|
`ECDH-RSA-AES256-GCM-SHA384`
|
||||||
|
|
||||||
|
Since BearSSL 0.2:
|
||||||
|
|
||||||
|
`ECDHE-RSA-CHACHA20-POLY1305`
|
||||||
|
`ECDHE-ECDSA-CHACHA20-POLY1305`
|
||||||
|
|
||||||
|
Since BearSSL 0.6:
|
||||||
|
|
||||||
|
`AES128-CCM`
|
||||||
|
`AES256-CCM`
|
||||||
|
`AES128-CCM8`
|
||||||
|
`AES256-CCM8`
|
||||||
|
`ECDHE-ECDSA-AES128-CCM`
|
||||||
|
`ECDHE-ECDSA-AES256-CCM`
|
||||||
|
`ECDHE-ECDSA-AES128-CCM8`
|
||||||
|
`ECDHE-ECDSA-AES256-CCM8`
|
132
src/dependencies/curl-8.8.0/docs/CLIENT-READERS.md
Normal file
132
src/dependencies/curl-8.8.0/docs/CLIENT-READERS.md
Normal file
|
@ -0,0 +1,132 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
# curl client readers
|
||||||
|
|
||||||
|
Client readers is a design in the internals of libcurl, not visible in its public API. They were started
|
||||||
|
in curl v8.7.0. This document describes the concepts, its high level implementation and the motivations.
|
||||||
|
|
||||||
|
## Naming
|
||||||
|
|
||||||
|
`libcurl` operates between clients and servers. A *client* is the application using libcurl, like the command line tool `curl` itself. Data to be uploaded to a server is **read** from the client and **sent** to the server, the servers response is **received** by `libcurl` and then **written** to the client.
|
||||||
|
|
||||||
|
With this naming established, client readers are concerned with providing data from the application to the server. Applications register callbacks via `CURLOPT_READFUNCTION`, data via `CURLOPT_POSTFIELDS` and other options to be used by `libcurl` when the request is send.
|
||||||
|
|
||||||
|
## Invoking
|
||||||
|
|
||||||
|
The transfer loop that sends and receives, is using `Curl_client_read()` to get more data to send for a transfer. If no specific reader has been installed yet, the default one that uses `CURLOPT_READFUNCTION` is added. The prototype is
|
||||||
|
|
||||||
|
```
|
||||||
|
CURLcode Curl_client_read(struct Curl_easy *data, char *buf, size_t blen,
|
||||||
|
size_t *nread, bool *eos);
|
||||||
|
```
|
||||||
|
The arguments are the transfer to read for, a buffer to hold the read data, its length, the actual number of bytes placed into the buffer and the `eos` (*end of stream*) flag indicating that no more data is available. The `eos` flag may be set for a read amount, if that amount was the last. That way curl can avoid to read an additional time.
|
||||||
|
|
||||||
|
The implementation of `Curl_client_read()` uses a chain of *client reader* instances to get the data. This is similar to the design of *client writers*. The chain of readers allows processing of the data to send.
|
||||||
|
|
||||||
|
The definition of a reader is:
|
||||||
|
|
||||||
|
```
|
||||||
|
struct Curl_crtype {
|
||||||
|
const char *name; /* writer name. */
|
||||||
|
CURLcode (*do_init)(struct Curl_easy *data, struct Curl_creader *writer);
|
||||||
|
CURLcode (*do_read)(struct Curl_easy *data, struct Curl_creader *reader,
|
||||||
|
char *buf, size_t blen, size_t *nread, bool *eos);
|
||||||
|
void (*do_close)(struct Curl_easy *data, struct Curl_creader *reader);
|
||||||
|
bool (*needs_rewind)(struct Curl_easy *data, struct Curl_creader *reader);
|
||||||
|
curl_off_t (*total_length)(struct Curl_easy *data,
|
||||||
|
struct Curl_creader *reader);
|
||||||
|
CURLcode (*resume_from)(struct Curl_easy *data,
|
||||||
|
struct Curl_creader *reader, curl_off_t offset);
|
||||||
|
CURLcode (*rewind)(struct Curl_easy *data, struct Curl_creader *reader);
|
||||||
|
};
|
||||||
|
|
||||||
|
struct Curl_creader {
|
||||||
|
const struct Curl_crtype *crt; /* type implementation */
|
||||||
|
struct Curl_creader *next; /* Downstream reader. */
|
||||||
|
Curl_creader_phase phase; /* phase at which it operates */
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
`Curl_creader` is a reader instance with a `next` pointer to form the chain. It as a type `crt` which provides the implementation. The main callback is `do_read()` which provides the data to the caller. The others are for setup and tear down. `needs_rewind()` is explained further below.
|
||||||
|
|
||||||
|
## Phases and Ordering
|
||||||
|
|
||||||
|
Since client readers may transform the data being read through the chain, the order in which they are called is relevant for the outcome. When a reader is created, it gets the `phase` property in which it operates. Reader phases are defined like:
|
||||||
|
|
||||||
|
```
|
||||||
|
typedef enum {
|
||||||
|
CURL_CR_NET, /* data send to the network (connection filters) */
|
||||||
|
CURL_CR_TRANSFER_ENCODE, /* add transfer-encodings */
|
||||||
|
CURL_CR_PROTOCOL, /* before transfer, but after content decoding */
|
||||||
|
CURL_CR_CONTENT_ENCODE, /* add content-encodings */
|
||||||
|
CURL_CR_CLIENT /* data read from client */
|
||||||
|
} Curl_creader_phase;
|
||||||
|
```
|
||||||
|
|
||||||
|
If a reader for phase `PROTOCOL` is added to the chain, it is always added *after* any `NET` or `TRANSFER_ENCODE` readers and *before* and `CONTENT_ENCODE` and `CLIENT` readers. If there is already a reader for the same phase, the new reader is added before the existing one(s).
|
||||||
|
|
||||||
|
### Example: `chunked` reader
|
||||||
|
|
||||||
|
In `http_chunks.c` a client reader for chunked uploads is implemented. This one operates at phase `CURL_CR_TRANSFER_ENCODE`. Any data coming from the reader "below" has the HTTP/1.1 chunk handling applied and returned to the caller.
|
||||||
|
|
||||||
|
When this reader sees an `eos` from below, it generates the terminal chunk, adding trailers if provided by the application. When that last chunk is fully returned, it also sets `eos` to the caller.
|
||||||
|
|
||||||
|
### Example: `lineconv` reader
|
||||||
|
|
||||||
|
In `sendf.c` a client reader that does line-end conversions is implemented. It operates at `CURL_CR_CONTENT_ENCODE` and converts any "\n" to "\r\n". This is used for FTP ASCII uploads or when the general `crlf` options has been set.
|
||||||
|
|
||||||
|
### Example: `null` reader
|
||||||
|
|
||||||
|
Implemented in `sendf.c` for phase `CURL_CR_CLIENT`, this reader has the simple job of providing transfer bytes of length 0 to the caller, immediately indicating an `eos`. This reader is installed by HTTP for all GET/HEAD requests and when authentication is being negotiated.
|
||||||
|
|
||||||
|
### Example: `buf` reader
|
||||||
|
|
||||||
|
Implemented in `sendf.c` for phase `CURL_CR_CLIENT`, this reader get a buffer pointer and a length and provides exactly these bytes. This one is used in HTTP for sending `postfields` provided by the application.
|
||||||
|
|
||||||
|
## Request retries
|
||||||
|
|
||||||
|
Sometimes it is necessary to send a request with client data again. Transfer handling can inquire via `Curl_client_read_needs_rewind()` if a rewind (e.g. a reset of the client data) is necessary. This asks all installed readers if they need it and give `FALSE` of none does.
|
||||||
|
|
||||||
|
## Upload Size
|
||||||
|
|
||||||
|
Many protocols need to know the amount of bytes delivered by the client readers in advance. They may invoke `Curl_creader_total_length(data)` to retrieve that. However, not all reader chains know the exact value beforehand. In that case, the call returns `-1` for "unknown".
|
||||||
|
|
||||||
|
Even if the length of the "raw" data is known, the length that is send may not. Example: with option `--crlf` the uploaded content undergoes line-end conversion. The line converting reader does not know in advance how many newlines it may encounter. Therefore it must return `-1` for any positive raw content length.
|
||||||
|
|
||||||
|
In HTTP, once the correct client readers are installed, the protocol asks the readers for the total length. If that is known, it can set `Content-Length:` accordingly. If not, it may choose to add an HTTP "chunked" reader.
|
||||||
|
|
||||||
|
In addition, there is `Curl_creader_client_length(data)` which gives the total length as reported by the reader in phase `CURL_CR_CLIENT` without asking other readers that may transform the raw data. This is useful in estimating the size of an upload. The HTTP protocol uses this to determine if `Expect: 100-continue` shall be done.
|
||||||
|
|
||||||
|
## Resuming
|
||||||
|
|
||||||
|
Uploads can start at a specific offset, if so requested. The "resume from" that offset. This applies to the reader in phase `CURL_CR_CLIENT` that delivers the "raw" content. Resumption can fail if the installed reader does not support it or if the offset is too large.
|
||||||
|
|
||||||
|
The total length reported by the reader changes when resuming. Example: resuming an upload of 100 bytes by 25 reports a total length of 75 afterwards.
|
||||||
|
|
||||||
|
If `resume_from()` is invoked twice, it is additive. There is currently no way to undo a resume.
|
||||||
|
|
||||||
|
## Rewinding
|
||||||
|
|
||||||
|
When a request is retried, installed client readers are discarded and replaced by new ones. This works only if the new readers upload the same data. For many readers, this is not an issue. The "null" reader always does the same. Also the `buf` reader, initialized with the same buffer, does this.
|
||||||
|
|
||||||
|
Readers operating on callbacks to the application need to "rewind" the underlying content. For example, when reading from a `FILE*`, the reader needs to `fseek()` to the beginning. The following methods are used:
|
||||||
|
|
||||||
|
1. `Curl_creader_needs_rewind(data)`: tells if a rewind is necessary, given the current state of the reader chain. If nothing really has been read so far, this returns `FALSE`.
|
||||||
|
2. `Curl_creader_will_rewind(data)`: tells if the reader chain rewinds at the start of the next request.
|
||||||
|
3. `Curl_creader_set_rewind(data, TRUE)`: marks the reader chain for rewinding at the start of the next request.
|
||||||
|
4. `Curl_client_start(data)`: tells the readers that a new request starts and they need to rewind if requested.
|
||||||
|
|
||||||
|
|
||||||
|
## Summary and Outlook
|
||||||
|
|
||||||
|
By adding the client reader interface, any protocol can control how/if it wants the curl transfer to send bytes for a request. The transfer loop becomes then blissfully ignorant of the specifics.
|
||||||
|
|
||||||
|
The protocols on the other hand no longer have to care to package data most efficiently. At any time, should more data be needed, it can be read from the client. This is used when sending HTTP requests headers to add as much request body data to the initial sending as there is room for.
|
||||||
|
|
||||||
|
Future enhancements based on the client readers:
|
||||||
|
* `expect-100` handling: place that into a HTTP specific reader at `CURL_CR_PROTOCOL` and eliminate the checks in the generic transfer parts.
|
||||||
|
* `eos forwarding`: transfer should forward an `eos` flag to the connection filters. Filters like HTTP/2 and HTTP/3 can make use of that, terminating streams early. This would also eliminate length checks in stream handling.
|
123
src/dependencies/curl-8.8.0/docs/CLIENT-WRITERS.md
Normal file
123
src/dependencies/curl-8.8.0/docs/CLIENT-WRITERS.md
Normal file
|
@ -0,0 +1,123 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
# curl client writers
|
||||||
|
|
||||||
|
Client writers is a design in the internals of libcurl, not visible in its public API. They were started
|
||||||
|
in curl v8.5.0. This document describes the concepts, its high level implementation and the motivations.
|
||||||
|
|
||||||
|
## Naming
|
||||||
|
|
||||||
|
`libcurl` operates between clients and servers. A *client* is the application using libcurl, like the command line tool `curl` itself. Data to be uploaded to a server is **read** from the client and **send** to the server, the servers response is **received** by `libcurl` and then **written** to the client.
|
||||||
|
|
||||||
|
With this naming established, client writers are concerned with writing responses from the server to the application. Applications register callbacks via `CURLOPT_WRITEFUNCTION` and `CURLOPT_HEADERFUNCTION` to be invoked by `libcurl` when the response is received.
|
||||||
|
|
||||||
|
## Invoking
|
||||||
|
|
||||||
|
All code in `libcurl` that handles response data is ultimately expected to forward this data via `Curl_client_write()` to the application. The exact prototype of this function is:
|
||||||
|
|
||||||
|
```
|
||||||
|
CURLcode Curl_client_write(struct Curl_easy *data, int type, const char *buf, size_t blen);
|
||||||
|
```
|
||||||
|
The `type` argument specifies what the bytes in `buf` actually are. The following bits are defined:
|
||||||
|
|
||||||
|
```
|
||||||
|
#define CLIENTWRITE_BODY (1<<0) /* non-meta information, BODY */
|
||||||
|
#define CLIENTWRITE_INFO (1<<1) /* meta information, not a HEADER */
|
||||||
|
#define CLIENTWRITE_HEADER (1<<2) /* meta information, HEADER */
|
||||||
|
#define CLIENTWRITE_STATUS (1<<3) /* a special status HEADER */
|
||||||
|
#define CLIENTWRITE_CONNECT (1<<4) /* a CONNECT related HEADER */
|
||||||
|
#define CLIENTWRITE_1XX (1<<5) /* a 1xx response related HEADER */
|
||||||
|
#define CLIENTWRITE_TRAILER (1<<6) /* a trailer HEADER */
|
||||||
|
```
|
||||||
|
|
||||||
|
The main types here are `CLIENTWRITE_BODY` and `CLIENTWRITE_HEADER`. They are
|
||||||
|
mutually exclusive. The other bits are enhancements to `CLIENTWRITE_HEADER` to
|
||||||
|
specify what the header is about. They are only used in HTTP and related
|
||||||
|
protocols (RTSP and WebSocket).
|
||||||
|
|
||||||
|
The implementation of `Curl_client_write()` uses a chain of *client writer* instances to process the call and make sure that the bytes reach the proper application callbacks. This is similar to the design of connection filters: client writers can be chained to process the bytes written through them. The definition is:
|
||||||
|
|
||||||
|
```
|
||||||
|
struct Curl_cwtype {
|
||||||
|
const char *name;
|
||||||
|
CURLcode (*do_init)(struct Curl_easy *data,
|
||||||
|
struct Curl_cwriter *writer);
|
||||||
|
CURLcode (*do_write)(struct Curl_easy *data,
|
||||||
|
struct Curl_cwriter *writer, int type,
|
||||||
|
const char *buf, size_t nbytes);
|
||||||
|
void (*do_close)(struct Curl_easy *data,
|
||||||
|
struct Curl_cwriter *writer);
|
||||||
|
};
|
||||||
|
|
||||||
|
struct Curl_cwriter {
|
||||||
|
const struct Curl_cwtype *cwt; /* type implementation */
|
||||||
|
struct Curl_cwriter *next; /* Downstream writer. */
|
||||||
|
Curl_cwriter_phase phase; /* phase at which it operates */
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
`Curl_cwriter` is a writer instance with a `next` pointer to form the chain. It has a type `cwt` which provides the implementation. The main callback is `do_write()` that processes the data and calls then the `next` writer. The others are for setup and tear down.
|
||||||
|
|
||||||
|
## Phases and Ordering
|
||||||
|
|
||||||
|
Since client writers may transform the bytes written through them, the order in which the are called is relevant for the outcome. When a writer is created, one property it gets is the `phase` in which it operates. Writer phases are defined like:
|
||||||
|
|
||||||
|
```
|
||||||
|
typedef enum {
|
||||||
|
CURL_CW_RAW, /* raw data written, before any decoding */
|
||||||
|
CURL_CW_TRANSFER_DECODE, /* remove transfer-encodings */
|
||||||
|
CURL_CW_PROTOCOL, /* after transfer, but before content decoding */
|
||||||
|
CURL_CW_CONTENT_DECODE, /* remove content-encodings */
|
||||||
|
CURL_CW_CLIENT /* data written to client */
|
||||||
|
} Curl_cwriter_phase;
|
||||||
|
```
|
||||||
|
|
||||||
|
If a writer for phase `PROTOCOL` is added to the chain, it is always added *after* any `RAW` or `TRANSFER_DECODE` and *before* any `CONTENT_DECODE` and `CLIENT` phase writer. If there is already a writer for the same phase present, the new writer is inserted just before that one.
|
||||||
|
|
||||||
|
All transfers have a chain of 3 writers by default. A specific protocol handler may alter that by adding additional writers. The 3 standard writers are (name, phase):
|
||||||
|
|
||||||
|
1. `"raw", CURL_CW_RAW `: if the transfer is verbose, it forwards the body data to the debug function.
|
||||||
|
1. `"download", CURL_CW_PROTOCOL`: checks that protocol limits are kept and updates progress counters. When a download has a known length, it checks that it is not exceeded and errors otherwise.
|
||||||
|
1. `"client", CURL_CW_CLIENT`: the main work horse. It invokes the application callbacks or writes to the configured file handles. It chops large writes into smaller parts, as documented for `CURLOPT_WRITEFUNCTION`. If also handles *pausing* of transfers when the application callback returns `CURL_WRITEFUNC_PAUSE`.
|
||||||
|
|
||||||
|
With these writers always in place, libcurl's protocol handlers automatically have these implemented.
|
||||||
|
|
||||||
|
## Enhanced Use
|
||||||
|
|
||||||
|
HTTP is the protocol in curl that makes use of the client writer chain by
|
||||||
|
adding writers to it. When the `libcurl` application set
|
||||||
|
`CURLOPT_ACCEPT_ENCODING` (as `curl` does with `--compressed`), the server is
|
||||||
|
offered an `Accept-Encoding` header with the algorithms supported. The server
|
||||||
|
then may choose to send the response body compressed. For example using `gzip`
|
||||||
|
or `brotli` or even both.
|
||||||
|
|
||||||
|
In the server's response, if there is a `Content-Encoding` header listing the
|
||||||
|
encoding applied. If supported by `libcurl` it then decompresses the content
|
||||||
|
before writing it out to the client. How does it do that?
|
||||||
|
|
||||||
|
The HTTP protocol adds client writers in phase `CURL_CW_CONTENT_DECODE` on
|
||||||
|
seeing such a header. For each encoding listed, it adds the corresponding
|
||||||
|
writer. The response from the server is then passed through
|
||||||
|
`Curl_client_write()` to the writers that decode it. If several encodings had
|
||||||
|
been applied the writer chain decodes them in the proper order.
|
||||||
|
|
||||||
|
When the server provides a `Content-Length` header, that value applies to the
|
||||||
|
*compressed* content. Length checks on the response bytes must happen *before*
|
||||||
|
it gets decoded. That is why this check happens in phase `CURL_CW_PROTOCOL`
|
||||||
|
which always is ordered before writers in phase `CURL_CW_CONTENT_DECODE`.
|
||||||
|
|
||||||
|
What else?
|
||||||
|
|
||||||
|
Well, HTTP servers may also apply a `Transfer-Encoding` to the body of a response. The most well-known one is `chunked`, but algorithms like `gzip` and friends could also be applied. The difference to content encodings is that decoding needs to happen *before* protocol checks, for example on length, are done.
|
||||||
|
|
||||||
|
That is why transfer decoding writers are added for phase `CURL_CW_TRANSFER_DECODE`. Which makes their operation happen *before* phase `CURL_CW_PROTOCOL` where length may be checked.
|
||||||
|
|
||||||
|
## Summary
|
||||||
|
|
||||||
|
By adding the common behavior of all protocols into `Curl_client_write()` we make sure that they do apply everywhere. Protocol handler have less to worry about. Changes to default behavior can be done without affecting handler implementations.
|
||||||
|
|
||||||
|
Having a writer chain as implementation allows protocol handlers with extra needs, like HTTP, to add to this for special behavior. The common way of writing the actual response data stays the same.
|
46
src/dependencies/curl-8.8.0/docs/CMakeLists.txt
Normal file
46
src/dependencies/curl-8.8.0/docs/CMakeLists.txt
Normal file
|
@ -0,0 +1,46 @@
|
||||||
|
#***************************************************************************
|
||||||
|
# _ _ ____ _
|
||||||
|
# Project ___| | | | _ \| |
|
||||||
|
# / __| | | | |_) | |
|
||||||
|
# | (__| |_| | _ <| |___
|
||||||
|
# \___|\___/|_| \_\_____|
|
||||||
|
#
|
||||||
|
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
#
|
||||||
|
# This software is licensed as described in the file COPYING, which
|
||||||
|
# you should have received as part of this distribution. The terms
|
||||||
|
# are also available at https://curl.se/docs/copyright.html.
|
||||||
|
#
|
||||||
|
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
|
||||||
|
# copies of the Software, and permit persons to whom the Software is
|
||||||
|
# furnished to do so, under the terms of the COPYING file.
|
||||||
|
#
|
||||||
|
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
|
||||||
|
# KIND, either express or implied.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: curl
|
||||||
|
#
|
||||||
|
###########################################################################
|
||||||
|
#add_subdirectory(examples)
|
||||||
|
if(BUILD_LIBCURL_DOCS)
|
||||||
|
add_subdirectory(libcurl)
|
||||||
|
endif()
|
||||||
|
if(ENABLE_CURL_MANUAL AND BUILD_CURL_EXE)
|
||||||
|
add_subdirectory(cmdline-opts)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(BUILD_MISC_DOCS)
|
||||||
|
foreach(_man_misc IN ITEMS "curl-config" "mk-ca-bundle")
|
||||||
|
set(_man_target "${CURL_BINARY_DIR}/docs/${_man_misc}.1")
|
||||||
|
add_custom_command(OUTPUT "${_man_target}"
|
||||||
|
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
|
||||||
|
COMMAND "${PERL_EXECUTABLE}" ${PROJECT_SOURCE_DIR}/scripts/cd2nroff "${_man_misc}.md" > "${_man_target}"
|
||||||
|
DEPENDS "${_man_misc}.md"
|
||||||
|
VERBATIM
|
||||||
|
)
|
||||||
|
add_custom_target("curl-generate-${_man_misc}.1" ALL DEPENDS "${_man_target}")
|
||||||
|
if(NOT CURL_DISABLE_INSTALL)
|
||||||
|
install(FILES "${_man_target}" DESTINATION ${CMAKE_INSTALL_MANDIR}/man1)
|
||||||
|
endif()
|
||||||
|
endforeach()
|
||||||
|
endif()
|
38
src/dependencies/curl-8.8.0/docs/CODE_OF_CONDUCT.md
Normal file
38
src/dependencies/curl-8.8.0/docs/CODE_OF_CONDUCT.md
Normal file
|
@ -0,0 +1,38 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
Contributor Code of Conduct
|
||||||
|
===========================
|
||||||
|
|
||||||
|
As contributors and maintainers of this project, we pledge to respect all
|
||||||
|
people who contribute through reporting issues, posting feature requests,
|
||||||
|
updating documentation, submitting pull requests or patches, and other
|
||||||
|
activities.
|
||||||
|
|
||||||
|
We are committed to making participation in this project a harassment-free
|
||||||
|
experience for everyone, regardless of level of experience, gender, gender
|
||||||
|
identity and expression, sexual orientation, disability, personal appearance,
|
||||||
|
body size, race, ethnicity, age, or religion.
|
||||||
|
|
||||||
|
Examples of unacceptable behavior by participants include the use of sexual
|
||||||
|
language or imagery, derogatory comments or personal attacks, trolling, public
|
||||||
|
or private harassment, insults, or other unprofessional conduct.
|
||||||
|
|
||||||
|
Project maintainers have the right and responsibility to remove, edit, or
|
||||||
|
reject comments, commits, code, wiki edits, issues, and other contributions
|
||||||
|
that are not aligned to this Code of Conduct. Project maintainers who do not
|
||||||
|
follow the Code of Conduct may be removed from the project team.
|
||||||
|
|
||||||
|
This code of conduct applies both within project spaces and in public spaces
|
||||||
|
when an individual is representing the project or its community.
|
||||||
|
|
||||||
|
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||||
|
reported by opening an issue or contacting one or more of the project
|
||||||
|
maintainers.
|
||||||
|
|
||||||
|
This Code of Conduct is adapted from the [Contributor
|
||||||
|
Covenant](https://contributor-covenant.org/), version 1.1.0, available at
|
||||||
|
[https://contributor-covenant.org/version/1/1/0/](https://contributor-covenant.org/version/1/1/0/)
|
174
src/dependencies/curl-8.8.0/docs/CODE_REVIEW.md
Normal file
174
src/dependencies/curl-8.8.0/docs/CODE_REVIEW.md
Normal file
|
@ -0,0 +1,174 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
# How to do code reviews for curl
|
||||||
|
|
||||||
|
Anyone and everyone is encouraged and welcome to review code submissions in
|
||||||
|
curl. This is a guide on what to check for and how to perform a successful
|
||||||
|
code review.
|
||||||
|
|
||||||
|
## All submissions should get reviewed
|
||||||
|
|
||||||
|
All pull requests and patches submitted to the project should be reviewed by
|
||||||
|
at least one experienced curl maintainer before that code is accepted and
|
||||||
|
merged.
|
||||||
|
|
||||||
|
## Let the tools and tests take the first rounds
|
||||||
|
|
||||||
|
On initial pull requests, let the tools and tests do their job first and then
|
||||||
|
start out by helping the submitter understand the test failures and tool
|
||||||
|
alerts.
|
||||||
|
|
||||||
|
## How to provide feedback to author
|
||||||
|
|
||||||
|
Be nice. Ask questions. Provide examples or suggestions of improvements.
|
||||||
|
Assume the best intentions. Remember language barriers.
|
||||||
|
|
||||||
|
All first-time contributors can become regulars. Let's help them go there.
|
||||||
|
|
||||||
|
## Is this a change we want?
|
||||||
|
|
||||||
|
If this is not a change that seems to be aligned with the project's path
|
||||||
|
forward and as such cannot be accepted, inform the author about this sooner
|
||||||
|
rather than later. Do it gently and explain why and possibly what could be
|
||||||
|
done to make it more acceptable.
|
||||||
|
|
||||||
|
## API/ABI stability or changed behavior
|
||||||
|
|
||||||
|
Changing the API and the ABI may be fine in a change but it needs to be done
|
||||||
|
deliberately and carefully. If not, a reviewer must help the author to realize
|
||||||
|
the mistake.
|
||||||
|
|
||||||
|
curl and libcurl are similarly strict on not modifying existing behavior. API
|
||||||
|
and ABI stability is not enough, the behavior should also remain intact as far
|
||||||
|
as possible.
|
||||||
|
|
||||||
|
## Code style
|
||||||
|
|
||||||
|
Most code style nits are detected by checksrc but not all. Only leave remarks
|
||||||
|
on style deviation once checksrc does not find anymore.
|
||||||
|
|
||||||
|
Minor nits from fresh submitters can also be handled by the maintainer when
|
||||||
|
merging, in case it seems like the submitter is not clear on what to do. We
|
||||||
|
want to make the process fun and exciting for new contributors.
|
||||||
|
|
||||||
|
## Encourage consistency
|
||||||
|
|
||||||
|
Make sure new code is written in a similar style as existing code. Naming,
|
||||||
|
logic, conditions, etc.
|
||||||
|
|
||||||
|
## Are pointers always non-NULL?
|
||||||
|
|
||||||
|
If a function or code rely on pointers being non-NULL, take an extra look if
|
||||||
|
that seems to be a fair assessment.
|
||||||
|
|
||||||
|
## Asserts
|
||||||
|
|
||||||
|
Conditions that should never be false can be verified with `DEBUGASSERT()`
|
||||||
|
calls to get caught in tests and debugging easier, while not having an impact
|
||||||
|
on final or release builds.
|
||||||
|
|
||||||
|
## Memory allocation
|
||||||
|
|
||||||
|
Can the mallocs be avoided? Do not introduce mallocs in any hot paths. If
|
||||||
|
there are (new) mallocs, can they be combined into fewer calls?
|
||||||
|
|
||||||
|
Are all allocations handled in error paths to avoid leaks and crashes?
|
||||||
|
|
||||||
|
## Thread-safety
|
||||||
|
|
||||||
|
We do not like static variables as they break thread-safety and prevent
|
||||||
|
functions from being reentrant.
|
||||||
|
|
||||||
|
## Should features be `#ifdef`ed?
|
||||||
|
|
||||||
|
Features and functionality may not be present everywhere and should therefore
|
||||||
|
be `#ifdef`ed. Additionally, some features should be possible to switch on/off
|
||||||
|
in the build.
|
||||||
|
|
||||||
|
Write `#ifdef`s to be as little of a "maze" as possible.
|
||||||
|
|
||||||
|
## Does it look portable enough?
|
||||||
|
|
||||||
|
curl runs "everywhere". Does the code take a reasonable stance and enough
|
||||||
|
precautions to be possible to build and run on most platforms?
|
||||||
|
|
||||||
|
Remember that we live by C89 restrictions.
|
||||||
|
|
||||||
|
## Tests and testability
|
||||||
|
|
||||||
|
New features should be added in conjunction with one or more test cases.
|
||||||
|
Ideally, functions should also be written so that unit tests can be done to
|
||||||
|
test individual functions.
|
||||||
|
|
||||||
|
## Documentation
|
||||||
|
|
||||||
|
New features or changes to existing functionality **must** be accompanied by
|
||||||
|
updated documentation. Submitting that in a separate follow-up pull request is
|
||||||
|
not OK. A code review must also verify that the submitted documentation update
|
||||||
|
matches the code submission.
|
||||||
|
|
||||||
|
English is not everyone's first language, be mindful of this and help the
|
||||||
|
submitter improve the text if it needs a rewrite to read better.
|
||||||
|
|
||||||
|
## Code should not be hard to understand
|
||||||
|
|
||||||
|
Source code should be written to maximize readability and be easy to
|
||||||
|
understand.
|
||||||
|
|
||||||
|
## Functions should not be large
|
||||||
|
|
||||||
|
A single function should never be large as that makes it hard to follow and
|
||||||
|
understand all the exit points and state changes. Some existing functions in
|
||||||
|
curl certainly violate this ground rule but when reviewing new code we should
|
||||||
|
propose splitting into smaller functions.
|
||||||
|
|
||||||
|
## Duplication is evil
|
||||||
|
|
||||||
|
Anything that looks like duplicated code is a red flag. Anything that seems to
|
||||||
|
introduce code that we *should* already have or provide needs a closer check.
|
||||||
|
|
||||||
|
## Sensitive data
|
||||||
|
|
||||||
|
When credentials are involved, take an extra look at what happens with this
|
||||||
|
data. Where it comes from and where it goes.
|
||||||
|
|
||||||
|
## Variable types differ
|
||||||
|
|
||||||
|
`size_t` is not a fixed size. `time_t` can be signed or unsigned and have
|
||||||
|
different sizes. Relying on variable sizes is a red flag.
|
||||||
|
|
||||||
|
Also remember that endianness and >= 32 bit accesses to unaligned addresses
|
||||||
|
are problematic areas.
|
||||||
|
|
||||||
|
## Integer overflows
|
||||||
|
|
||||||
|
Be careful about integer overflows. Some variable types can be either 32 bit
|
||||||
|
or 64 bit. Integer overflows must be detected and acted on *before* they
|
||||||
|
happen.
|
||||||
|
|
||||||
|
## Dangerous use of functions
|
||||||
|
|
||||||
|
Maybe use of `realloc()` should rather use the dynbuf functions?
|
||||||
|
|
||||||
|
Do not allow new code that grows buffers without using dynbuf.
|
||||||
|
|
||||||
|
Use of C functions that rely on a terminating zero must only be used on data
|
||||||
|
that really do have a null-terminating zero.
|
||||||
|
|
||||||
|
## Dangerous "data styles"
|
||||||
|
|
||||||
|
Make extra precautions and verify that memory buffers that need a terminating
|
||||||
|
zero always have exactly that. Buffers *without* a null-terminator must not be
|
||||||
|
used as input to string functions.
|
||||||
|
|
||||||
|
# Commit messages
|
||||||
|
|
||||||
|
Tightly coupled with a code review is making sure that the commit message is
|
||||||
|
good. It is the responsibility of the person who merges the code to make sure
|
||||||
|
that the commit message follows our standard (detailed in the
|
||||||
|
[CONTRIBUTE](CONTRIBUTE.md) document). This includes making sure the PR
|
||||||
|
identifies related issues and giving credit to reporters and helpers.
|
316
src/dependencies/curl-8.8.0/docs/CODE_STYLE.md
Normal file
316
src/dependencies/curl-8.8.0/docs/CODE_STYLE.md
Normal file
|
@ -0,0 +1,316 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
# curl C code style
|
||||||
|
|
||||||
|
Source code that has a common style is easier to read than code that uses
|
||||||
|
different styles in different places. It helps making the code feel like one
|
||||||
|
single code base. Easy-to-read is an important property of code and helps
|
||||||
|
making it easier to review when new things are added and it helps debugging
|
||||||
|
code when developers are trying to figure out why things go wrong. A unified
|
||||||
|
style is more important than individual contributors having their own personal
|
||||||
|
tastes satisfied.
|
||||||
|
|
||||||
|
Our C code has a few style rules. Most of them are verified and upheld by the
|
||||||
|
`scripts/checksrc.pl` script. Invoked with `make checksrc` or even by default
|
||||||
|
by the build system when built after `./configure --enable-debug` has been
|
||||||
|
used.
|
||||||
|
|
||||||
|
It is normally not a problem for anyone to follow the guidelines, as you just
|
||||||
|
need to copy the style already used in the source code and there are no
|
||||||
|
particularly unusual rules in our set of rules.
|
||||||
|
|
||||||
|
We also work hard on writing code that are warning-free on all the major
|
||||||
|
platforms and in general on as many platforms as possible. Code that obviously
|
||||||
|
causes warnings is not accepted as-is.
|
||||||
|
|
||||||
|
## Naming
|
||||||
|
|
||||||
|
Try using a non-confusing naming scheme for your new functions and variable
|
||||||
|
names. It does not necessarily have to mean that you should use the same as in
|
||||||
|
other places of the code, just that the names should be logical,
|
||||||
|
understandable and be named according to what they are used for. File-local
|
||||||
|
functions should be made static. We like lower case names.
|
||||||
|
|
||||||
|
See the [INTERNALS](https://curl.se/dev/internals.html#symbols) document on
|
||||||
|
how we name non-exported library-global symbols.
|
||||||
|
|
||||||
|
## Indenting
|
||||||
|
|
||||||
|
We use only spaces for indentation, never TABs. We use two spaces for each new
|
||||||
|
open brace.
|
||||||
|
|
||||||
|
```c
|
||||||
|
if(something_is_true) {
|
||||||
|
while(second_statement == fine) {
|
||||||
|
moo();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Comments
|
||||||
|
|
||||||
|
Since we write C89 code, **//** comments are not allowed. They were not
|
||||||
|
introduced in the C standard until C99. We use only __/* comments */__.
|
||||||
|
|
||||||
|
```c
|
||||||
|
/* this is a comment */
|
||||||
|
```
|
||||||
|
|
||||||
|
## Long lines
|
||||||
|
|
||||||
|
Source code in curl may never be wider than 79 columns and there are two
|
||||||
|
reasons for maintaining this even in the modern era of large and high
|
||||||
|
resolution screens:
|
||||||
|
|
||||||
|
1. Narrower columns are easier to read than wide ones. There is a reason
|
||||||
|
newspapers have used columns for decades or centuries.
|
||||||
|
|
||||||
|
2. Narrower columns allow developers to easier show multiple pieces of code
|
||||||
|
next to each other in different windows. It allows two or three source
|
||||||
|
code windows next to each other on the same screen - as well as multiple
|
||||||
|
terminal and debugging windows.
|
||||||
|
|
||||||
|
## Braces
|
||||||
|
|
||||||
|
In if/while/do/for expressions, we write the open brace on the same line as
|
||||||
|
the keyword and we then set the closing brace on the same indentation level as
|
||||||
|
the initial keyword. Like this:
|
||||||
|
|
||||||
|
```c
|
||||||
|
if(age < 40) {
|
||||||
|
/* clearly a youngster */
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
You may omit the braces if they would contain only a one-line statement:
|
||||||
|
|
||||||
|
```c
|
||||||
|
if(!x)
|
||||||
|
continue;
|
||||||
|
```
|
||||||
|
|
||||||
|
For functions the opening brace should be on a separate line:
|
||||||
|
|
||||||
|
```c
|
||||||
|
int main(int argc, char **argv)
|
||||||
|
{
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## 'else' on the following line
|
||||||
|
|
||||||
|
When adding an **else** clause to a conditional expression using braces, we
|
||||||
|
add it on a new line after the closing brace. Like this:
|
||||||
|
|
||||||
|
```c
|
||||||
|
if(age < 40) {
|
||||||
|
/* clearly a youngster */
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
/* probably grumpy */
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## No space before parentheses
|
||||||
|
|
||||||
|
When writing expressions using if/while/do/for, there shall be no space
|
||||||
|
between the keyword and the open parenthesis. Like this:
|
||||||
|
|
||||||
|
```c
|
||||||
|
while(1) {
|
||||||
|
/* loop forever */
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Use boolean conditions
|
||||||
|
|
||||||
|
Rather than test a conditional value such as a bool against TRUE or FALSE, a
|
||||||
|
pointer against NULL or != NULL and an int against zero or not zero in
|
||||||
|
if/while conditions we prefer:
|
||||||
|
|
||||||
|
```c
|
||||||
|
result = do_something();
|
||||||
|
if(!result) {
|
||||||
|
/* something went wrong */
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## No assignments in conditions
|
||||||
|
|
||||||
|
To increase readability and reduce complexity of conditionals, we avoid
|
||||||
|
assigning variables within if/while conditions. We frown upon this style:
|
||||||
|
|
||||||
|
```c
|
||||||
|
if((ptr = malloc(100)) == NULL)
|
||||||
|
return NULL;
|
||||||
|
```
|
||||||
|
|
||||||
|
and instead we encourage the above version to be spelled out more clearly:
|
||||||
|
|
||||||
|
```c
|
||||||
|
ptr = malloc(100);
|
||||||
|
if(!ptr)
|
||||||
|
return NULL;
|
||||||
|
```
|
||||||
|
|
||||||
|
## New block on a new line
|
||||||
|
|
||||||
|
We never write multiple statements on the same source line, even for short
|
||||||
|
if() conditions.
|
||||||
|
|
||||||
|
```c
|
||||||
|
if(a)
|
||||||
|
return TRUE;
|
||||||
|
else if(b)
|
||||||
|
return FALSE;
|
||||||
|
```
|
||||||
|
|
||||||
|
and NEVER:
|
||||||
|
|
||||||
|
```c
|
||||||
|
if(a) return TRUE;
|
||||||
|
else if(b) return FALSE;
|
||||||
|
```
|
||||||
|
|
||||||
|
## Space around operators
|
||||||
|
|
||||||
|
Please use spaces on both sides of operators in C expressions. Postfix **(),
|
||||||
|
[], ->, ., ++, --** and Unary **+, -, !, ~, &** operators excluded they should
|
||||||
|
have no space.
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
```c
|
||||||
|
bla = func();
|
||||||
|
who = name[0];
|
||||||
|
age += 1;
|
||||||
|
true = !false;
|
||||||
|
size += -2 + 3 * (a + b);
|
||||||
|
ptr->member = a++;
|
||||||
|
struct.field = b--;
|
||||||
|
ptr = &address;
|
||||||
|
contents = *pointer;
|
||||||
|
complement = ~bits;
|
||||||
|
empty = (!*string) ? TRUE : FALSE;
|
||||||
|
```
|
||||||
|
|
||||||
|
## No parentheses for return values
|
||||||
|
|
||||||
|
We use the 'return' statement without extra parentheses around the value:
|
||||||
|
|
||||||
|
```c
|
||||||
|
int works(void)
|
||||||
|
{
|
||||||
|
return TRUE;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Parentheses for sizeof arguments
|
||||||
|
|
||||||
|
When using the sizeof operator in code, we prefer it to be written with
|
||||||
|
parentheses around its argument:
|
||||||
|
|
||||||
|
```c
|
||||||
|
int size = sizeof(int);
|
||||||
|
```
|
||||||
|
|
||||||
|
## Column alignment
|
||||||
|
|
||||||
|
Some statements cannot be completed on a single line because the line would be
|
||||||
|
too long, the statement too hard to read, or due to other style guidelines
|
||||||
|
above. In such a case the statement spans multiple lines.
|
||||||
|
|
||||||
|
If a continuation line is part of an expression or sub-expression then you
|
||||||
|
should align on the appropriate column so that it is easy to tell what part of
|
||||||
|
the statement it is. Operators should not start continuation lines. In other
|
||||||
|
cases follow the 2-space indent guideline. Here are some examples from
|
||||||
|
libcurl:
|
||||||
|
|
||||||
|
```c
|
||||||
|
if(Curl_pipeline_wanted(handle->multi, CURLPIPE_HTTP1) &&
|
||||||
|
(handle->set.httpversion != CURL_HTTP_VERSION_1_0) &&
|
||||||
|
(handle->set.httpreq == HTTPREQ_GET ||
|
||||||
|
handle->set.httpreq == HTTPREQ_HEAD))
|
||||||
|
/* did not ask for HTTP/1.0 and a GET or HEAD */
|
||||||
|
return TRUE;
|
||||||
|
```
|
||||||
|
|
||||||
|
If no parenthesis, use the default indent:
|
||||||
|
|
||||||
|
```c
|
||||||
|
data->set.http_disable_hostname_check_before_authentication =
|
||||||
|
(0 != va_arg(param, long)) ? TRUE : FALSE;
|
||||||
|
```
|
||||||
|
|
||||||
|
Function invoke with an open parenthesis:
|
||||||
|
|
||||||
|
```c
|
||||||
|
if(option) {
|
||||||
|
result = parse_login_details(option, strlen(option),
|
||||||
|
(userp ? &user : NULL),
|
||||||
|
(passwdp ? &passwd : NULL),
|
||||||
|
NULL);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Align with the "current open" parenthesis:
|
||||||
|
|
||||||
|
```c
|
||||||
|
DEBUGF(infof(data, "Curl_pp_readresp_ %d bytes of trailing "
|
||||||
|
"server response left\n",
|
||||||
|
(int)clipamount));
|
||||||
|
```
|
||||||
|
|
||||||
|
## Platform dependent code
|
||||||
|
|
||||||
|
Use **#ifdef HAVE_FEATURE** to do conditional code. We avoid checking for
|
||||||
|
particular operating systems or hardware in the #ifdef lines. The HAVE_FEATURE
|
||||||
|
shall be generated by the configure script for unix-like systems and they are
|
||||||
|
hard-coded in the `config-[system].h` files for the others.
|
||||||
|
|
||||||
|
We also encourage use of macros/functions that possibly are empty or defined
|
||||||
|
to constants when libcurl is built without that feature, to make the code
|
||||||
|
seamless. Like this example where the **magic()** function works differently
|
||||||
|
depending on a build-time conditional:
|
||||||
|
|
||||||
|
```c
|
||||||
|
#ifdef HAVE_MAGIC
|
||||||
|
void magic(int a)
|
||||||
|
{
|
||||||
|
return a + 2;
|
||||||
|
}
|
||||||
|
#else
|
||||||
|
#define magic(x) 1
|
||||||
|
#endif
|
||||||
|
|
||||||
|
int content = magic(3);
|
||||||
|
```
|
||||||
|
|
||||||
|
## No typedefed structs
|
||||||
|
|
||||||
|
Use structs by all means, but do not typedef them. Use the `struct name` way
|
||||||
|
of identifying them:
|
||||||
|
|
||||||
|
```c
|
||||||
|
struct something {
|
||||||
|
void *valid;
|
||||||
|
size_t way_to_write;
|
||||||
|
};
|
||||||
|
struct something instance;
|
||||||
|
```
|
||||||
|
|
||||||
|
**Not okay**:
|
||||||
|
|
||||||
|
```c
|
||||||
|
typedef struct {
|
||||||
|
void *wrong;
|
||||||
|
size_t way_to_write;
|
||||||
|
} something;
|
||||||
|
something instance;
|
||||||
|
```
|
308
src/dependencies/curl-8.8.0/docs/CONNECTION-FILTERS.md
Normal file
308
src/dependencies/curl-8.8.0/docs/CONNECTION-FILTERS.md
Normal file
|
@ -0,0 +1,308 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
# curl connection filters
|
||||||
|
|
||||||
|
Connection filters is a design in the internals of curl, not visible in its
|
||||||
|
public API. They were added in curl v7.87.0. This document describes the
|
||||||
|
concepts, its high level implementation and the motivations.
|
||||||
|
|
||||||
|
## Filters
|
||||||
|
|
||||||
|
A "connection filter" is a piece of code that is responsible for handling a
|
||||||
|
range of operations of curl's connections: reading, writing, waiting on
|
||||||
|
external events, connecting and closing down - to name the most important
|
||||||
|
ones.
|
||||||
|
|
||||||
|
The most important feat of connection filters is that they can be stacked on
|
||||||
|
top of each other (or "chained" if you prefer that metaphor). In the common
|
||||||
|
scenario that you want to retrieve a `https:` URL with curl, you need 2 basic
|
||||||
|
things to send the request and get the response: a TCP connection, represented
|
||||||
|
by a `socket` and a SSL instance en- and decrypt over that socket. You write
|
||||||
|
your request to the SSL instance, which encrypts and writes that data to the
|
||||||
|
socket, which then sends the bytes over the network.
|
||||||
|
|
||||||
|
With connection filters, curl's internal setup looks something like this (cf
|
||||||
|
for connection filter):
|
||||||
|
|
||||||
|
```
|
||||||
|
Curl_easy *data connectdata *conn cf-ssl cf-socket
|
||||||
|
+----------------+ +-----------------+ +-------+ +--------+
|
||||||
|
|https://curl.se/|----> | properties |----> | keys |---> | socket |--> OS --> network
|
||||||
|
+----------------+ +-----------------+ +-------+ +--------+
|
||||||
|
|
||||||
|
Curl_write(data, buffer)
|
||||||
|
--> Curl_cfilter_write(data, data->conn, buffer)
|
||||||
|
---> conn->filter->write(conn->filter, data, buffer)
|
||||||
|
```
|
||||||
|
|
||||||
|
While connection filters all do different things, they look the same from the
|
||||||
|
"outside". The code in `data` and `conn` does not really know **which**
|
||||||
|
filters are installed. `conn` just writes into the first filter, whatever that
|
||||||
|
is.
|
||||||
|
|
||||||
|
Same is true for filters. Each filter has a pointer to the `next` filter. When
|
||||||
|
SSL has encrypted the data, it does not write to a socket, it writes to the
|
||||||
|
next filter. If that is indeed a socket, or a file, or an HTTP/2 connection is
|
||||||
|
of no concern to the SSL filter.
|
||||||
|
|
||||||
|
This allows stacking, as in:
|
||||||
|
|
||||||
|
```
|
||||||
|
Direct:
|
||||||
|
http://localhost/ conn -> cf-socket
|
||||||
|
https://curl.se/ conn -> cf-ssl -> cf-socket
|
||||||
|
Via http proxy tunnel:
|
||||||
|
http://localhost/ conn -> cf-http-proxy -> cf-socket
|
||||||
|
https://curl.se/ conn -> cf-ssl -> cf-http-proxy -> cf-socket
|
||||||
|
Via https proxy tunnel:
|
||||||
|
http://localhost/ conn -> cf-http-proxy -> cf-ssl -> cf-socket
|
||||||
|
https://curl.se/ conn -> cf-ssl -> cf-http-proxy -> cf-ssl -> cf-socket
|
||||||
|
Via http proxy tunnel via SOCKS proxy:
|
||||||
|
http://localhost/ conn -> cf-http-proxy -> cf-socks -> cf-socket
|
||||||
|
```
|
||||||
|
|
||||||
|
### Connecting/Closing
|
||||||
|
|
||||||
|
Before `Curl_easy` can send the request, the connection needs to be
|
||||||
|
established. This means that all connection filters have done, whatever they
|
||||||
|
need to do: waiting for the socket to be connected, doing the TLS handshake,
|
||||||
|
performing the HTTP tunnel request, etc. This has to be done in reverse order:
|
||||||
|
the last filter has to do its connect first, then the one above can start,
|
||||||
|
etc.
|
||||||
|
|
||||||
|
Each filter does in principle the following:
|
||||||
|
|
||||||
|
```
|
||||||
|
static CURLcode
|
||||||
|
myfilter_cf_connect(struct Curl_cfilter *cf,
|
||||||
|
struct Curl_easy *data,
|
||||||
|
bool *done)
|
||||||
|
{
|
||||||
|
CURLcode result;
|
||||||
|
|
||||||
|
if(cf->connected) { /* we and all below are done */
|
||||||
|
*done = TRUE;
|
||||||
|
return CURLE_OK;
|
||||||
|
}
|
||||||
|
/* Let the filters below connect */
|
||||||
|
result = cf->next->cft->connect(cf->next, data, blocking, done);
|
||||||
|
if(result || !*done)
|
||||||
|
return result; /* below errored/not finished yet */
|
||||||
|
|
||||||
|
/* MYFILTER CONNECT THINGS */ /* below connected, do out thing */
|
||||||
|
*done = cf->connected = TRUE; /* done, remember, return */
|
||||||
|
return CURLE_OK;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Closing a connection then works similar. The `conn` tells the first filter to
|
||||||
|
close. Contrary to connecting, the filter does its own things first, before
|
||||||
|
telling the next filter to close.
|
||||||
|
|
||||||
|
### Efficiency
|
||||||
|
|
||||||
|
There are two things curl is concerned about: efficient memory use and fast
|
||||||
|
transfers.
|
||||||
|
|
||||||
|
The memory footprint of a filter is relatively small:
|
||||||
|
|
||||||
|
```
|
||||||
|
struct Curl_cfilter {
|
||||||
|
const struct Curl_cftype *cft; /* the type providing implementation */
|
||||||
|
struct Curl_cfilter *next; /* next filter in chain */
|
||||||
|
void *ctx; /* filter type specific settings */
|
||||||
|
struct connectdata *conn; /* the connection this filter belongs to */
|
||||||
|
int sockindex; /* TODO: like to get rid off this */
|
||||||
|
BIT(connected); /* != 0 iff this filter is connected */
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
The filter type `cft` is a singleton, one static struct for each type of
|
||||||
|
filter. The `ctx` is where a filter holds its specific data. That varies by
|
||||||
|
filter type. An http-proxy filter keeps the ongoing state of the CONNECT here,
|
||||||
|
free it after its has been established. The SSL filter keeps the `SSL*` (if
|
||||||
|
OpenSSL is used) here until the connection is closed. So, this varies.
|
||||||
|
|
||||||
|
`conn` is a reference to the connection this filter belongs to, so nothing
|
||||||
|
extra besides the pointer itself.
|
||||||
|
|
||||||
|
Several things, that before were kept in `struct connectdata`, now goes into
|
||||||
|
the `filter->ctx` *when needed*. So, the memory footprint for connections that
|
||||||
|
do *not* use an http proxy, or socks, or https is lower.
|
||||||
|
|
||||||
|
As to transfer efficiency, writing and reading through a filter comes at near
|
||||||
|
zero cost *if the filter does not transform the data*. An http proxy or socks
|
||||||
|
filter, once it is connected, just passes the calls through. Those filters
|
||||||
|
implementations look like this:
|
||||||
|
|
||||||
|
```
|
||||||
|
ssize_t Curl_cf_def_send(struct Curl_cfilter *cf, struct Curl_easy *data,
|
||||||
|
const void *buf, size_t len, CURLcode *err)
|
||||||
|
{
|
||||||
|
return cf->next->cft->do_send(cf->next, data, buf, len, err);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
The `recv` implementation is equivalent.
|
||||||
|
|
||||||
|
## Filter Types
|
||||||
|
|
||||||
|
The currently existing filter types (curl 8.5.0) are:
|
||||||
|
|
||||||
|
* `TCP`, `UDP`, `UNIX`: filters that operate on a socket, providing raw I/O.
|
||||||
|
* `SOCKET-ACCEPT`: special TCP socket that has a socket that has been
|
||||||
|
`accept()`ed in a `listen()`
|
||||||
|
* `SSL`: filter that applies TLS en-/decryption and handshake. Manages the
|
||||||
|
underlying TLS backend implementation.
|
||||||
|
* `HTTP-PROXY`, `H1-PROXY`, `H2-PROXY`: the first manages the connection to an
|
||||||
|
HTTP proxy server and uses the other depending on which ALPN protocol has
|
||||||
|
been negotiated.
|
||||||
|
* `SOCKS-PROXY`: filter for the various SOCKS proxy protocol variations
|
||||||
|
* `HAPROXY`: filter for the protocol of the same name, providing client IP
|
||||||
|
information to a server.
|
||||||
|
* `HTTP/2`: filter for handling multiplexed transfers over an HTTP/2
|
||||||
|
connection
|
||||||
|
* `HTTP/3`: filter for handling multiplexed transfers over an HTTP/3+QUIC
|
||||||
|
connection
|
||||||
|
* `HAPPY-EYEBALLS`: meta filter that implements IPv4/IPv6 "happy eyeballing".
|
||||||
|
It creates up to 2 sub-filters that race each other for a connection.
|
||||||
|
* `SETUP`: meta filter that manages the creation of sub-filter chains for a
|
||||||
|
specific transport (e.g. TCP or QUIC).
|
||||||
|
* `HTTPS-CONNECT`: meta filter that races a TCP+TLS and a QUIC connection
|
||||||
|
against each other to determine if HTTP/1.1, HTTP/2 or HTTP/3 shall be used
|
||||||
|
for a transfer.
|
||||||
|
|
||||||
|
Meta filters are combining other filters for a specific purpose, mostly during
|
||||||
|
connection establishment. Other filters like `TCP`, `UDP` and `UNIX` are only
|
||||||
|
to be found at the end of filter chains. SSL filters provide encryption, of
|
||||||
|
course. Protocol filters change the bytes sent and received.
|
||||||
|
|
||||||
|
## Filter Flags
|
||||||
|
|
||||||
|
Filter types carry flags that inform what they do. These are (for now):
|
||||||
|
|
||||||
|
* `CF_TYPE_IP_CONNECT`: this filter type talks directly to a server. This does
|
||||||
|
not have to be the server the transfer wants to talk to. For example when a
|
||||||
|
proxy server is used.
|
||||||
|
* `CF_TYPE_SSL`: this filter type provides encryption.
|
||||||
|
* `CF_TYPE_MULTIPLEX`: this filter type can manage multiple transfers in parallel.
|
||||||
|
|
||||||
|
Filter types can combine these flags. For example, the HTTP/3 filter types
|
||||||
|
have `CF_TYPE_IP_CONNECT`, `CF_TYPE_SSL` and `CF_TYPE_MULTIPLEX` set.
|
||||||
|
|
||||||
|
Flags are useful to extrapolate properties of a connection. To check if a
|
||||||
|
connection is encrypted, libcurl inspect the filter chain in place, top down,
|
||||||
|
for `CF_TYPE_SSL`. If it finds `CF_TYPE_IP_CONNECT` before any `CF_TYPE_SSL`,
|
||||||
|
the connection is not encrypted.
|
||||||
|
|
||||||
|
For example, `conn1` is for a `http:` request using a tunnel through an HTTP/2
|
||||||
|
`https:` proxy. `conn2` is a `https:` HTTP/2 connection to the same proxy.
|
||||||
|
`conn3` uses HTTP/3 without proxy. The filter chains would look like this
|
||||||
|
(simplified):
|
||||||
|
|
||||||
|
```
|
||||||
|
conn1 --> `HTTP-PROXY` --> `H2-PROXY` --> `SSL` --> `TCP`
|
||||||
|
flags: `IP_CONNECT` `SSL` `IP_CONNECT`
|
||||||
|
|
||||||
|
conn2 --> `HTTP/2` --> `SSL` --> `HTTP-PROXY` --> `H2-PROXY` --> `SSL` --> `TCP`
|
||||||
|
flags: `SSL` `IP_CONNECT` `SSL` `IP_CONNECT`
|
||||||
|
|
||||||
|
conn3 --> `HTTP/3`
|
||||||
|
flags: `SSL|IP_CONNECT`
|
||||||
|
```
|
||||||
|
|
||||||
|
Inspecting the filter chains, `conn1` is seen as unencrypted, since it
|
||||||
|
contains an `IP_CONNECT` filter before any `SSL`. `conn2` is clearly encrypted
|
||||||
|
as an `SSL` flagged filter is seen first. `conn3` is also encrypted as the
|
||||||
|
`SSL` flag is checked before the presence of `IP_CONNECT`.
|
||||||
|
|
||||||
|
Similar checks can determine if a connection is multiplexed or not.
|
||||||
|
|
||||||
|
## Filter Tracing
|
||||||
|
|
||||||
|
Filters may make use of special trace macros like `CURL_TRC_CF(data, cf, msg,
|
||||||
|
...)`. With `data` being the transfer and `cf` being the filter instance.
|
||||||
|
These traces are normally not active and their execution is guarded so that
|
||||||
|
they are cheap to ignore.
|
||||||
|
|
||||||
|
Users of `curl` may activate them by adding the name of the filter type to the
|
||||||
|
`--trace-config` argument. For example, in order to get more detailed tracing
|
||||||
|
of an HTTP/2 request, invoke curl with:
|
||||||
|
|
||||||
|
```
|
||||||
|
> curl -v --trace-config ids,time,http/2 https://curl.se
|
||||||
|
```
|
||||||
|
|
||||||
|
Which gives you trace output with time information, transfer+connection ids
|
||||||
|
and details from the `HTTP/2` filter. Filter type names in the trace config
|
||||||
|
are case insensitive. You may use `all` to enable tracing for all filter
|
||||||
|
types. When using `libcurl` you may call `curl_global_trace(config_string)` at
|
||||||
|
the start of your application to enable filter details.
|
||||||
|
|
||||||
|
## Meta Filters
|
||||||
|
|
||||||
|
Meta filters is a catch-all name for filter types that do not change the
|
||||||
|
transfer data in any way but provide other important services to curl. In
|
||||||
|
general, it is possible to do all sorts of silly things with them. One of the
|
||||||
|
commonly used, important things is "eyeballing".
|
||||||
|
|
||||||
|
The `HAPPY-EYEBALLS` filter is involved in the connect phase. Its job is to
|
||||||
|
try the various IPv4 and IPv6 addresses that are known for a server. If only
|
||||||
|
one address family is known (or configured), it tries the addresses one after
|
||||||
|
the other with timeouts calculated from the amount of addresses and the
|
||||||
|
overall connect timeout.
|
||||||
|
|
||||||
|
When more than one address family is to be tried, it splits the address list
|
||||||
|
into IPv4 and IPv6 and makes parallel attempts. The connection filter chain
|
||||||
|
looks like this:
|
||||||
|
|
||||||
|
```
|
||||||
|
* create connection for http://curl.se
|
||||||
|
conn[curl.se] --> SETUP[TCP] --> HAPPY-EYEBALLS --> NULL
|
||||||
|
* start connect
|
||||||
|
conn[curl.se] --> SETUP[TCP] --> HAPPY-EYEBALLS --> NULL
|
||||||
|
- ballerv4 --> TCP[151.101.1.91]:443
|
||||||
|
- ballerv6 --> TCP[2a04:4e42:c00::347]:443
|
||||||
|
* v6 answers, connected
|
||||||
|
conn[curl.se] --> SETUP[TCP] --> HAPPY-EYEBALLS --> TCP[2a04:4e42:c00::347]:443
|
||||||
|
* transfer
|
||||||
|
```
|
||||||
|
|
||||||
|
The modular design of connection filters and that we can plug them into each other is used to control the parallel attempts. When a `TCP` filter does not connect (in time), it is torn down and another one is created for the next address. This keeps the `TCP` filter simple.
|
||||||
|
|
||||||
|
The `HAPPY-EYEBALLS` on the other hand stays focused on its side of the problem. We can use it also to make other type of connection by just giving it another filter type to try to have happy eyeballing for QUIC:
|
||||||
|
|
||||||
|
```
|
||||||
|
* create connection for --http3-only https://curl.se
|
||||||
|
conn[curl.se] --> SETUP[QUIC] --> HAPPY-EYEBALLS --> NULL
|
||||||
|
* start connect
|
||||||
|
conn[curl.se] --> SETUP[QUIC] --> HAPPY-EYEBALLS --> NULL
|
||||||
|
- ballerv4 --> HTTP/3[151.101.1.91]:443
|
||||||
|
- ballerv6 --> HTTP/3[2a04:4e42:c00::347]:443
|
||||||
|
* v6 answers, connected
|
||||||
|
conn[curl.se] --> SETUP[QUIC] --> HAPPY-EYEBALLS --> HTTP/3[2a04:4e42:c00::347]:443
|
||||||
|
* transfer
|
||||||
|
```
|
||||||
|
|
||||||
|
When we plug these two variants together, we get the `HTTPS-CONNECT` filter
|
||||||
|
type that is used for `--http3` when **both** HTTP/3 and HTTP/2 or HTTP/1.1
|
||||||
|
shall be attempted:
|
||||||
|
|
||||||
|
```
|
||||||
|
* create connection for --http3 https://curl.se
|
||||||
|
conn[curl.se] --> HTTPS-CONNECT --> NULL
|
||||||
|
* start connect
|
||||||
|
conn[curl.se] --> HTTPS-CONNECT --> NULL
|
||||||
|
- SETUP[QUIC] --> HAPPY-EYEBALLS --> NULL
|
||||||
|
- ballerv4 --> HTTP/3[151.101.1.91]:443
|
||||||
|
- ballerv6 --> HTTP/3[2a04:4e42:c00::347]:443
|
||||||
|
- SETUP[TCP] --> HAPPY-EYEBALLS --> NULL
|
||||||
|
- ballerv4 --> TCP[151.101.1.91]:443
|
||||||
|
- ballerv6 --> TCP[2a04:4e42:c00::347]:443
|
||||||
|
* v4 QUIC answers, connected
|
||||||
|
conn[curl.se] --> HTTPS-CONNECT --> SETUP[QUIC] --> HAPPY-EYEBALLS --> HTTP/3[151.101.1.91]:443
|
||||||
|
* transfer
|
||||||
|
```
|
323
src/dependencies/curl-8.8.0/docs/CONTRIBUTE.md
Normal file
323
src/dependencies/curl-8.8.0/docs/CONTRIBUTE.md
Normal file
|
@ -0,0 +1,323 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
# Contributing to the curl project
|
||||||
|
|
||||||
|
This document is intended to offer guidelines on how to best contribute to the
|
||||||
|
curl project. This concerns new features as well as corrections to existing
|
||||||
|
flaws or bugs.
|
||||||
|
|
||||||
|
## Join the Community
|
||||||
|
|
||||||
|
Skip over to [https://curl.se/mail/](https://curl.se/mail/) and join
|
||||||
|
the appropriate mailing list(s). Read up on details before you post
|
||||||
|
questions. Read this file before you start sending patches. We prefer
|
||||||
|
questions sent to and discussions being held on the mailing list(s), not sent
|
||||||
|
to individuals.
|
||||||
|
|
||||||
|
Before posting to one of the curl mailing lists, please read up on the
|
||||||
|
[mailing list etiquette](https://curl.se/mail/etiquette.html).
|
||||||
|
|
||||||
|
We also hang out on IRC in #curl on libera.chat
|
||||||
|
|
||||||
|
If you are at all interested in the code side of things, consider clicking
|
||||||
|
'watch' on the [curl repo on GitHub](https://github.com/curl/curl) to be
|
||||||
|
notified of pull requests and new issues posted there.
|
||||||
|
|
||||||
|
## License and copyright
|
||||||
|
|
||||||
|
When contributing with code, you agree to put your changes and new code under
|
||||||
|
the same license curl and libcurl is already using unless stated and agreed
|
||||||
|
otherwise.
|
||||||
|
|
||||||
|
If you add a larger piece of code, you can opt to make that file or set of
|
||||||
|
files to use a different license as long as they do not enforce any changes to
|
||||||
|
the rest of the package and they make sense. Such "separate parts" can not be
|
||||||
|
GPL licensed (as we do not want copyleft to affect users of libcurl) but they
|
||||||
|
must use "GPL compatible" licenses (as we want to allow users to use libcurl
|
||||||
|
properly in GPL licensed environments).
|
||||||
|
|
||||||
|
When changing existing source code, you do not alter the copyright of the
|
||||||
|
original file(s). The copyright is still owned by the original creator(s) or
|
||||||
|
those who have been assigned copyright by the original author(s).
|
||||||
|
|
||||||
|
By submitting a patch to the curl project, you are assumed to have the right
|
||||||
|
to the code and to be allowed by your employer or whatever to hand over that
|
||||||
|
patch/code to us. We credit you for your changes as far as possible, to give
|
||||||
|
credit but also to keep a trace back to who made what changes. Please always
|
||||||
|
provide us with your full real name when contributing,
|
||||||
|
|
||||||
|
## What To Read
|
||||||
|
|
||||||
|
Source code, the man pages, the [INTERNALS
|
||||||
|
document](https://curl.se/dev/internals.html),
|
||||||
|
[TODO](https://curl.se/docs/todo.html),
|
||||||
|
[KNOWN_BUGS](https://curl.se/docs/knownbugs.html) and the [most recent
|
||||||
|
changes](https://curl.se/dev/sourceactivity.html) in git. Just lurking on the
|
||||||
|
[curl-library mailing list](https://curl.se/mail/list.cgi?list=curl-library)
|
||||||
|
gives you a lot of insights on what's going on right now. Asking there is a
|
||||||
|
good idea too.
|
||||||
|
|
||||||
|
## Write a good patch
|
||||||
|
|
||||||
|
### Follow code style
|
||||||
|
|
||||||
|
When writing C code, follow the
|
||||||
|
[CODE_STYLE](https://curl.se/dev/code-style.html) already established in
|
||||||
|
the project. Consistent style makes code easier to read and mistakes less
|
||||||
|
likely to happen. Run `make checksrc` before you submit anything, to make sure
|
||||||
|
you follow the basic style. That script does not verify everything, but if it
|
||||||
|
complains you know you have work to do.
|
||||||
|
|
||||||
|
### Non-clobbering All Over
|
||||||
|
|
||||||
|
When you write new functionality or fix bugs, it is important that you do not
|
||||||
|
fiddle all over the source files and functions. Remember that it is likely
|
||||||
|
that other people have done changes in the same source files as you have and
|
||||||
|
possibly even in the same functions. If you bring completely new
|
||||||
|
functionality, try writing it in a new source file. If you fix bugs, try to
|
||||||
|
fix one bug at a time and send them as separate patches.
|
||||||
|
|
||||||
|
### Write Separate Changes
|
||||||
|
|
||||||
|
It is annoying when you get a huge patch from someone that is said to fix 511
|
||||||
|
odd problems, but discussions and opinions do not agree with 510 of them - or
|
||||||
|
509 of them were already fixed in a different way. Then the person merging
|
||||||
|
this change needs to extract the single interesting patch from somewhere
|
||||||
|
within the huge pile of source, and that creates a lot of extra work.
|
||||||
|
|
||||||
|
Preferably, each fix that corrects a problem should be in its own patch/commit
|
||||||
|
with its own description/commit message stating exactly what they correct so
|
||||||
|
that all changes can be selectively applied by the maintainer or other
|
||||||
|
interested parties.
|
||||||
|
|
||||||
|
Also, separate changes enable bisecting much better for tracking problems
|
||||||
|
and regression in the future.
|
||||||
|
|
||||||
|
### Patch Against Recent Sources
|
||||||
|
|
||||||
|
Please try to get the latest available sources to make your patches against.
|
||||||
|
It makes the lives of the developers so much easier. The best is if you get
|
||||||
|
the most up-to-date sources from the git repository, but the latest release
|
||||||
|
archive is quite OK as well.
|
||||||
|
|
||||||
|
### Documentation
|
||||||
|
|
||||||
|
Writing docs is dead boring and one of the big problems with many open source
|
||||||
|
projects but someone's gotta do it. It makes things a lot easier if you submit
|
||||||
|
a small description of your fix or your new features with every contribution
|
||||||
|
so that it can be swiftly added to the package documentation.
|
||||||
|
|
||||||
|
Documentation is mostly provided as manpages or plain ASCII files. The
|
||||||
|
manpages are rendered from their source files that are usually written using
|
||||||
|
markdown. Most HTML files on the website and in the release archives are
|
||||||
|
generated from corresponding markdown and ASCII files.
|
||||||
|
|
||||||
|
### Test Cases
|
||||||
|
|
||||||
|
Since the introduction of the test suite, we can quickly verify that the main
|
||||||
|
features are working as they are supposed to. To maintain this situation and
|
||||||
|
improve it, all new features and functions that are added need to be tested in
|
||||||
|
the test suite. Every feature that is added should get at least one valid test
|
||||||
|
case that verifies that it works as documented. If every submitter also posts
|
||||||
|
a few test cases, it does not end up a heavy burden on a single person.
|
||||||
|
|
||||||
|
If you do not have test cases or perhaps you have done something that is hard
|
||||||
|
to write tests for, do explain exactly how you have otherwise tested and
|
||||||
|
verified your changes.
|
||||||
|
|
||||||
|
## Submit Your Changes
|
||||||
|
|
||||||
|
### How to get your changes into the main sources
|
||||||
|
|
||||||
|
Ideally you file a [pull request on
|
||||||
|
GitHub](https://github.com/curl/curl/pulls), but you can also send your plain
|
||||||
|
patch to [the curl-library mailing
|
||||||
|
list](https://curl.se/mail/list.cgi?list=curl-library).
|
||||||
|
|
||||||
|
If you opt to post a patch on the mailing list, chances are someone converts
|
||||||
|
it into a pull request for you, to have the CI jobs verify it proper before it
|
||||||
|
can be merged. Be prepared that some feedback on the proposed change might
|
||||||
|
then come on GitHub.
|
||||||
|
|
||||||
|
Your changes be reviewed and discussed and you are expected to correct flaws
|
||||||
|
pointed out and update accordingly, or the change risks stalling and
|
||||||
|
eventually just getting deleted without action. As a submitter of a change,
|
||||||
|
you are the owner of that change until it has been merged.
|
||||||
|
|
||||||
|
Respond on the list or on GitHub about the change and answer questions and/or
|
||||||
|
fix nits/flaws. This is important. We take lack of replies as a sign that you
|
||||||
|
are not anxious to get your patch accepted and we tend to simply drop such
|
||||||
|
changes.
|
||||||
|
|
||||||
|
### About pull requests
|
||||||
|
|
||||||
|
With GitHub it is easy to send a [pull
|
||||||
|
request](https://github.com/curl/curl/pulls) to the curl project to have
|
||||||
|
changes merged.
|
||||||
|
|
||||||
|
We strongly prefer pull requests to mailed patches, as it makes it a proper
|
||||||
|
git commit that is easy to merge and they are easy to track and not that easy
|
||||||
|
to lose in the flood of many emails, like they sometimes do on the mailing
|
||||||
|
lists.
|
||||||
|
|
||||||
|
Every pull request submitted is automatically tested in several different
|
||||||
|
ways. [See the CI document for more
|
||||||
|
information](https://github.com/curl/curl/blob/master/tests/CI.md).
|
||||||
|
|
||||||
|
Sometimes the tests fail due to a dependency service temporarily being offline
|
||||||
|
or otherwise unavailable, e.g. package downloads. In this case you can just
|
||||||
|
try to update your pull requests to rerun the tests later as described below.
|
||||||
|
|
||||||
|
You can update your pull requests by pushing new commits or force-pushing
|
||||||
|
changes to existing commits. Force-pushing an amended commit without any
|
||||||
|
actual content changed also allows you to retrigger the tests for that commit.
|
||||||
|
|
||||||
|
When you adjust your pull requests after review, consider squashing the
|
||||||
|
commits so that we can review the full updated version more easily.
|
||||||
|
|
||||||
|
A pull request sent to the project might get labeled `needs-votes` by a
|
||||||
|
project maintainer. This label means that in addition to meeting all other
|
||||||
|
checks and qualifications this pull request must also receive more "votes" of
|
||||||
|
user support. More signs that people want this to happen. It could be in the
|
||||||
|
form of messages saying so, or thumbs-up reactions on GitHub.
|
||||||
|
|
||||||
|
### Making quality changes
|
||||||
|
|
||||||
|
Make the patch against as recent source versions as possible.
|
||||||
|
|
||||||
|
If you have followed the tips in this document and your patch still has not
|
||||||
|
been incorporated or responded to after some weeks, consider resubmitting it
|
||||||
|
to the list or better yet: change it to a pull request.
|
||||||
|
|
||||||
|
### Commit messages
|
||||||
|
|
||||||
|
A short guide to how to write git commit messages in the curl project.
|
||||||
|
|
||||||
|
---- start ----
|
||||||
|
[area]: [short line describing the main effect]
|
||||||
|
-- empty line --
|
||||||
|
[full description, no wider than 72 columns that describes as much as
|
||||||
|
possible as to why this change is made, and possibly what things
|
||||||
|
it fixes and everything else that is related, with unwieldy URLs replaced
|
||||||
|
with references like [0], [1], etc.]
|
||||||
|
-- empty line --
|
||||||
|
[[0] URL - Reference to a URL in the description, almost like Markdown;
|
||||||
|
the last numbered reference is followed by an -- empty line -- ]
|
||||||
|
[Follow-up to {shorthash} - if this fixes or continues a previous commit;
|
||||||
|
add a Ref: that commit's PR or issue if it's not a small, obvious fix;
|
||||||
|
followed by an -- empty line -- ]
|
||||||
|
[Bug: URL to the source of the report or more related discussion; use Fixes
|
||||||
|
for GitHub issues instead when that is appropriate]
|
||||||
|
[Approved-by: John Doe - credit someone who approved the PR; if you are
|
||||||
|
committing this for someone else using --author=... you do not need this
|
||||||
|
as you are implicitly approving it by committing]
|
||||||
|
[Authored-by: John Doe - credit the original author of the code; only use
|
||||||
|
this if you cannot use "git commit --author=..."]
|
||||||
|
[Signed-off-by: John Doe - we do not use this, but do not bother removing it]
|
||||||
|
[whatever-else-by: credit all helpers, finders, doers; try to use one of
|
||||||
|
the following keywords if at all possible, for consistency:
|
||||||
|
Acked-by:, Assisted-by:, Co-authored-by:, Found-by:, Reported-by:,
|
||||||
|
Reviewed-by:, Suggested-by:, Tested-by:]
|
||||||
|
[Ref: #1234 - if this is related to a GitHub issue or PR, possibly one that
|
||||||
|
has already been closed]
|
||||||
|
[Ref: URL to more information about the commit; use Bug: instead for
|
||||||
|
a reference to a bug on another bug tracker]
|
||||||
|
[Fixes #1234 - if this closes a GitHub issue; GitHub closes the issue once
|
||||||
|
this commit is merged]
|
||||||
|
[Closes #1234 - if this closes a GitHub PR; GitHub closes the PR once this
|
||||||
|
commit is merged]
|
||||||
|
---- stop ----
|
||||||
|
|
||||||
|
The first line is a succinct description of the change:
|
||||||
|
|
||||||
|
- use the imperative, present tense: "change" not "changed" nor "changes"
|
||||||
|
- do not capitalize the first letter
|
||||||
|
- no period (.) at the end
|
||||||
|
|
||||||
|
The `[area]` in the first line can be `http2`, `cookies`, `openssl` or
|
||||||
|
similar. There is no fixed list to select from but using the same "area" as
|
||||||
|
other related changes could make sense.
|
||||||
|
|
||||||
|
Do not forget to use commit --author=... if you commit someone else's work, and
|
||||||
|
make sure that you have your own user and email setup correctly in git before
|
||||||
|
you commit.
|
||||||
|
|
||||||
|
Add whichever header lines as appropriate, with one line per person if more
|
||||||
|
than one person was involved. There is no need to credit yourself unless you
|
||||||
|
are using --author=... which hides your identity. Do not include people's
|
||||||
|
email addresses in headers to avoid spam, unless they are already public from
|
||||||
|
a previous commit; saying `{userid} on github` is OK.
|
||||||
|
|
||||||
|
### Write Access to git Repository
|
||||||
|
|
||||||
|
If you are a frequent contributor, you may be given push access to the git
|
||||||
|
repository and then you are able to push your changes straight into the git
|
||||||
|
repo instead of sending changes as pull requests or by mail as patches.
|
||||||
|
|
||||||
|
Just ask if this is what you would want. You are required to have posted
|
||||||
|
several high quality patches first, before you can be granted push access.
|
||||||
|
|
||||||
|
### How To Make a Patch with git
|
||||||
|
|
||||||
|
You need to first checkout the repository:
|
||||||
|
|
||||||
|
git clone https://github.com/curl/curl.git
|
||||||
|
|
||||||
|
You then proceed and edit all the files you like and you commit them to your
|
||||||
|
local repository:
|
||||||
|
|
||||||
|
git commit [file]
|
||||||
|
|
||||||
|
As usual, group your commits so that you commit all changes at once that
|
||||||
|
constitute a logical change.
|
||||||
|
|
||||||
|
Once you have done all your commits and you are happy with what you see, you
|
||||||
|
can make patches out of your changes that are suitable for mailing:
|
||||||
|
|
||||||
|
git format-patch remotes/origin/master
|
||||||
|
|
||||||
|
This creates files in your local directory named `NNNN-[name].patch` for each
|
||||||
|
commit.
|
||||||
|
|
||||||
|
Now send those patches off to the curl-library list. You can of course opt to
|
||||||
|
do that with the 'git send-email' command.
|
||||||
|
|
||||||
|
### How To Make a Patch without git
|
||||||
|
|
||||||
|
Keep a copy of the unmodified curl sources. Make your changes in a separate
|
||||||
|
source tree. When you think you have something that you want to offer the
|
||||||
|
curl community, use GNU diff to generate patches.
|
||||||
|
|
||||||
|
If you have modified a single file, try something like:
|
||||||
|
|
||||||
|
diff -u unmodified-file.c my-changed-one.c > my-fixes.diff
|
||||||
|
|
||||||
|
If you have modified several files, possibly in different directories, you
|
||||||
|
can use diff recursively:
|
||||||
|
|
||||||
|
diff -ur curl-original-dir curl-modified-sources-dir > my-fixes.diff
|
||||||
|
|
||||||
|
The GNU diff and GNU patch tools exist for virtually all platforms, including
|
||||||
|
all kinds of Unixes and Windows.
|
||||||
|
|
||||||
|
### Useful resources
|
||||||
|
- [Webinar on getting code into cURL](https://www.youtube.com/watch?v=QmZ3W1d6LQI)
|
||||||
|
|
||||||
|
## Update copyright and license information
|
||||||
|
|
||||||
|
There is a CI job called **REUSE compliance / check** that runs on every pull
|
||||||
|
request and commit to verify that the *REUSE state* of all files are still
|
||||||
|
fine.
|
||||||
|
|
||||||
|
This means that all files need to have their license and copyright information
|
||||||
|
clearly stated. Ideally by having the standard curl source code header, with
|
||||||
|
the SPDX-License-Identifier included. If the header does not work, you can use a
|
||||||
|
smaller header or add the information for a specific file to the `.reuse/dep5`
|
||||||
|
file.
|
||||||
|
|
||||||
|
You can manually verify the copyright and compliance status by running the
|
||||||
|
`./scripts/copyright.pl` script in the root of the git repository.
|
174
src/dependencies/curl-8.8.0/docs/CURL-DISABLE.md
Normal file
174
src/dependencies/curl-8.8.0/docs/CURL-DISABLE.md
Normal file
|
@ -0,0 +1,174 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
# Code defines to disable features and protocols
|
||||||
|
|
||||||
|
## `CURL_DISABLE_ALTSVC`
|
||||||
|
|
||||||
|
Disable support for Alt-Svc: HTTP headers.
|
||||||
|
|
||||||
|
## `CURL_DISABLE_BINDLOCAL`
|
||||||
|
|
||||||
|
Disable support for binding the local end of connections.
|
||||||
|
|
||||||
|
## `CURL_DISABLE_COOKIES`
|
||||||
|
|
||||||
|
Disable support for HTTP cookies.
|
||||||
|
|
||||||
|
## `CURL_DISABLE_BASIC_AUTH`
|
||||||
|
|
||||||
|
Disable support for the Basic authentication methods.
|
||||||
|
|
||||||
|
## `CURL_DISABLE_BEARER_AUTH`
|
||||||
|
|
||||||
|
Disable support for the Bearer authentication methods.
|
||||||
|
|
||||||
|
## `CURL_DISABLE_DIGEST_AUTH`
|
||||||
|
|
||||||
|
Disable support for the Digest authentication methods.
|
||||||
|
|
||||||
|
## `CURL_DISABLE_KERBEROS_AUTH`
|
||||||
|
|
||||||
|
Disable support for the Kerberos authentication methods.
|
||||||
|
|
||||||
|
## `CURL_DISABLE_NEGOTIATE_AUTH`
|
||||||
|
|
||||||
|
Disable support for the negotiate authentication methods.
|
||||||
|
|
||||||
|
## `CURL_DISABLE_AWS`
|
||||||
|
|
||||||
|
Disable **AWS-SIG4** support.
|
||||||
|
|
||||||
|
## `CURL_DISABLE_DICT`
|
||||||
|
|
||||||
|
Disable the DICT protocol
|
||||||
|
|
||||||
|
## `CURL_DISABLE_DOH`
|
||||||
|
|
||||||
|
Disable DNS-over-HTTPS
|
||||||
|
|
||||||
|
## `CURL_DISABLE_FILE`
|
||||||
|
|
||||||
|
Disable the FILE protocol
|
||||||
|
|
||||||
|
## `CURL_DISABLE_FORM_API`
|
||||||
|
|
||||||
|
Disable the form API
|
||||||
|
|
||||||
|
## `CURL_DISABLE_FTP`
|
||||||
|
|
||||||
|
Disable the FTP (and FTPS) protocol
|
||||||
|
|
||||||
|
## `CURL_DISABLE_GETOPTIONS`
|
||||||
|
|
||||||
|
Disable the `curl_easy_options` API calls that lets users get information
|
||||||
|
about existing options to `curl_easy_setopt`.
|
||||||
|
|
||||||
|
## `CURL_DISABLE_GOPHER`
|
||||||
|
|
||||||
|
Disable the GOPHER protocol.
|
||||||
|
|
||||||
|
## `CURL_DISABLE_HEADERS_API`
|
||||||
|
|
||||||
|
Disable the HTTP header API.
|
||||||
|
|
||||||
|
## `CURL_DISABLE_HSTS`
|
||||||
|
|
||||||
|
Disable the HTTP Strict Transport Security support.
|
||||||
|
|
||||||
|
## `CURL_DISABLE_HTTP`
|
||||||
|
|
||||||
|
Disable the HTTP(S) protocols. Note that this then also disable HTTP proxy
|
||||||
|
support.
|
||||||
|
|
||||||
|
## `CURL_DISABLE_HTTP_AUTH`
|
||||||
|
|
||||||
|
Disable support for all HTTP authentication methods.
|
||||||
|
|
||||||
|
## `CURL_DISABLE_IMAP`
|
||||||
|
|
||||||
|
Disable the IMAP(S) protocols.
|
||||||
|
|
||||||
|
## `CURL_DISABLE_LDAP`
|
||||||
|
|
||||||
|
Disable the LDAP(S) protocols.
|
||||||
|
|
||||||
|
## `CURL_DISABLE_LDAPS`
|
||||||
|
|
||||||
|
Disable the LDAPS protocol.
|
||||||
|
|
||||||
|
## `CURL_DISABLE_LIBCURL_OPTION`
|
||||||
|
|
||||||
|
Disable the --libcurl option from the curl tool.
|
||||||
|
|
||||||
|
## `CURL_DISABLE_MIME`
|
||||||
|
|
||||||
|
Disable MIME support.
|
||||||
|
|
||||||
|
## `CURL_DISABLE_MQTT`
|
||||||
|
|
||||||
|
Disable MQTT support.
|
||||||
|
|
||||||
|
## `CURL_DISABLE_NETRC`
|
||||||
|
|
||||||
|
Disable the netrc parser.
|
||||||
|
|
||||||
|
## `CURL_DISABLE_NTLM`
|
||||||
|
|
||||||
|
Disable support for NTLM.
|
||||||
|
|
||||||
|
## `CURL_DISABLE_OPENSSL_AUTO_LOAD_CONFIG`
|
||||||
|
|
||||||
|
Disable the auto load config support in the OpenSSL backend.
|
||||||
|
|
||||||
|
## `CURL_DISABLE_PARSEDATE`
|
||||||
|
|
||||||
|
Disable date parsing
|
||||||
|
|
||||||
|
## `CURL_DISABLE_POP3`
|
||||||
|
|
||||||
|
Disable the POP3 protocol
|
||||||
|
|
||||||
|
## `CURL_DISABLE_PROGRESS_METER`
|
||||||
|
|
||||||
|
Disable the built-in progress meter
|
||||||
|
|
||||||
|
## `CURL_DISABLE_PROXY`
|
||||||
|
|
||||||
|
Disable support for proxies
|
||||||
|
|
||||||
|
## `CURL_DISABLE_RTSP`
|
||||||
|
|
||||||
|
Disable the RTSP protocol.
|
||||||
|
|
||||||
|
## `CURL_DISABLE_SHUFFLE_DNS`
|
||||||
|
|
||||||
|
Disable the shuffle DNS feature
|
||||||
|
|
||||||
|
## `CURL_DISABLE_SMB`
|
||||||
|
|
||||||
|
Disable the SMB(S) protocols
|
||||||
|
|
||||||
|
## `CURL_DISABLE_SMTP`
|
||||||
|
|
||||||
|
Disable the SMTP(S) protocols
|
||||||
|
|
||||||
|
## `CURL_DISABLE_SOCKETPAIR`
|
||||||
|
|
||||||
|
Disable the use of `socketpair()` internally to allow waking up and canceling
|
||||||
|
`curl_multi_poll()`.
|
||||||
|
|
||||||
|
## `CURL_DISABLE_TELNET`
|
||||||
|
|
||||||
|
Disable the TELNET protocol
|
||||||
|
|
||||||
|
## `CURL_DISABLE_TFTP`
|
||||||
|
|
||||||
|
Disable the TFTP protocol
|
||||||
|
|
||||||
|
## `CURL_DISABLE_VERBOSE_STRINGS`
|
||||||
|
|
||||||
|
Disable verbose strings and error messages.
|
159
src/dependencies/curl-8.8.0/docs/CURLDOWN.md
Normal file
159
src/dependencies/curl-8.8.0/docs/CURLDOWN.md
Normal file
|
@ -0,0 +1,159 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
# curldown
|
||||||
|
|
||||||
|
A markdown-like syntax for libcurl man pages.
|
||||||
|
|
||||||
|
## Purpose
|
||||||
|
|
||||||
|
A text format for writing libcurl documentation in the shape of man pages.
|
||||||
|
|
||||||
|
Make it easier for users to contribute and write documentation. A format that
|
||||||
|
is easier on the eye in its source format.
|
||||||
|
|
||||||
|
Make it harder to do syntactical mistakes.
|
||||||
|
|
||||||
|
Use a format that allows creating man pages that end up looking exactly like
|
||||||
|
the man pages did when we wrote them in nroff format.
|
||||||
|
|
||||||
|
Take advantage of the fact that people these days are accustomed to markdown
|
||||||
|
by using a markdown-like syntax.
|
||||||
|
|
||||||
|
This allows us to fix issues in the nroff format easier since now we generate
|
||||||
|
them. For example: escaping minus to prevent them from being turned into
|
||||||
|
Unicode by man.
|
||||||
|
|
||||||
|
Generate nroff output that looks (next to) *identical* to the previous files,
|
||||||
|
so that the look, existing test cases, HTML conversions, existing
|
||||||
|
infrastructure etc remain mostly intact.
|
||||||
|
|
||||||
|
Contains meta-data in a structured way to allow better output (for example the
|
||||||
|
see also information) and general awareness of what the file is about.
|
||||||
|
|
||||||
|
## File extension
|
||||||
|
|
||||||
|
Since curldown looks similar to markdown, we use `.md` extensions on the
|
||||||
|
files.
|
||||||
|
|
||||||
|
## Conversion
|
||||||
|
|
||||||
|
Convert **from curldown to nroff** with `cd2nroff`. Generates nroff man pages.
|
||||||
|
|
||||||
|
Convert **from nroff to curldown** with `nroff2cd`. This is only meant to be
|
||||||
|
used for the initial conversion to curldown and should ideally never be needed
|
||||||
|
again.
|
||||||
|
|
||||||
|
Convert, check or clean up an existing curldown to nicer, better, cleaner
|
||||||
|
curldown with **cd2cd**.
|
||||||
|
|
||||||
|
Mass-convert all curldown files to nroff in specified directories with
|
||||||
|
`cdall`:
|
||||||
|
|
||||||
|
cdall [dir1] [dir2] [dir3] ..
|
||||||
|
|
||||||
|
## Known issues
|
||||||
|
|
||||||
|
The `cd2nroff` tool does not yet handle *italics* or **bold** where the start
|
||||||
|
and the end markers are used on separate lines.
|
||||||
|
|
||||||
|
The `nroff2cd` tool generates code style quotes for all `.fi` sections since
|
||||||
|
the nroff format does not carry a distinction.
|
||||||
|
|
||||||
|
# Format
|
||||||
|
|
||||||
|
Each curldown starts with a header with meta-data:
|
||||||
|
|
||||||
|
---
|
||||||
|
c: Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
Title: CURLOPT_AWS_SIGV4
|
||||||
|
Section: 3
|
||||||
|
Source: libcurl
|
||||||
|
Protocol:
|
||||||
|
- HTTP
|
||||||
|
See-also:
|
||||||
|
- CURLOPT_HEADEROPT (3)
|
||||||
|
- CURLOPT_HTTPAUTH (3)
|
||||||
|
TLS-backend:
|
||||||
|
- [name]
|
||||||
|
---
|
||||||
|
|
||||||
|
All curldown files *must* have all the headers present and at least one
|
||||||
|
`See-also:` entry specified.
|
||||||
|
|
||||||
|
If the man page is for section 3 (library related). The `Protocol` list must
|
||||||
|
contain at least one protocol, which can be `*` if the option is virtually for
|
||||||
|
everything. If `*` is used, it must be the only listed protocol. Recognized
|
||||||
|
protocols are either URL schemes (in uppercase), `TLS` or `TCP`.
|
||||||
|
|
||||||
|
If the `Protocol` list contains `TLS`, then there must also be a `TLS-backend`
|
||||||
|
list, specifying `All` or a list of what TLS backends that work with this
|
||||||
|
option. The available TLS backends are:
|
||||||
|
|
||||||
|
- `BearSSL`
|
||||||
|
- `GnuTLS`
|
||||||
|
- `mbedTLS`
|
||||||
|
- `OpenSSL` (also covers BoringSSL, libressl, quictls, AWS-LC and AmiSSL)
|
||||||
|
- `rustls`
|
||||||
|
- `Schannel`
|
||||||
|
- `Secure Transport`
|
||||||
|
- `wolfSSL`
|
||||||
|
- `All`: all TLS backends
|
||||||
|
|
||||||
|
Following the header in the file, is the manual page using markdown-like
|
||||||
|
syntax:
|
||||||
|
|
||||||
|
~~~
|
||||||
|
# NAME
|
||||||
|
a page - this is a page descriving something
|
||||||
|
|
||||||
|
# SYNOPSIS
|
||||||
|
~~~c
|
||||||
|
#include <curl/curl.h>
|
||||||
|
|
||||||
|
CURLcode curl_easy_setopt(CURL *handle, CURLOPT_AWS_SIGV4, char *param);
|
||||||
|
~~~
|
||||||
|
~~~
|
||||||
|
|
||||||
|
Quoted source code should start with `~~~c` and end with `~~~` while regular
|
||||||
|
quotes can start with `~~~` or just be indented with 4 spaces.
|
||||||
|
|
||||||
|
Headers at top-level `#` get converted to `.SH`.
|
||||||
|
|
||||||
|
`nroff2cd` supports the `##` next level header which gets converted to `.IP`.
|
||||||
|
|
||||||
|
Write bold words or phrases within `**` like:
|
||||||
|
|
||||||
|
This is a **bold** word.
|
||||||
|
|
||||||
|
Write italics like:
|
||||||
|
|
||||||
|
This is *italics*.
|
||||||
|
|
||||||
|
Due to how man pages do not support backticks especially formatted, such
|
||||||
|
occurrences in the source are instead just using italics in the generated
|
||||||
|
output:
|
||||||
|
|
||||||
|
This `word` appears in italics.
|
||||||
|
|
||||||
|
When generating the nroff output, the tooling removes superfluous newlines,
|
||||||
|
meaning they can be used freely in the source file to make the text more
|
||||||
|
readable.
|
||||||
|
|
||||||
|
To make sure curldown documents render correctly as markdown, all literal
|
||||||
|
occurrences of `<` or `>` need to be escaped by a leading backslash.
|
||||||
|
|
||||||
|
## symbols
|
||||||
|
|
||||||
|
All mentioned curl symbols that have their own man pages, like
|
||||||
|
`curl_easy_perform(3)` are automatically rendered using italics in the output
|
||||||
|
without having to enclose it with asterisks. This helps ensuring that they get
|
||||||
|
converted to links properly later in the HTML version on the website, as
|
||||||
|
converted with `roffit`. This makes the curldown text easier to read even when
|
||||||
|
mentioning many curl symbols.
|
||||||
|
|
||||||
|
This auto-linking works for patterns matching `(lib|)curl[^ ]*(3)`.
|
55
src/dependencies/curl-8.8.0/docs/DEPRECATE.md
Normal file
55
src/dependencies/curl-8.8.0/docs/DEPRECATE.md
Normal file
|
@ -0,0 +1,55 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
# Items to be removed from future curl releases
|
||||||
|
|
||||||
|
If any of these deprecated features is a cause for concern for you, please
|
||||||
|
email the
|
||||||
|
[curl-library mailing list](https://lists.haxx.se/listinfo/curl-library)
|
||||||
|
as soon as possible and explain to us why this is a problem for you and
|
||||||
|
how your use case cannot be satisfied properly using a workaround.
|
||||||
|
|
||||||
|
## TLS libraries without 1.3 support
|
||||||
|
|
||||||
|
curl drops support for TLS libraries without TLS 1.3 capability after May
|
||||||
|
2025.
|
||||||
|
|
||||||
|
It requires that a curl build using the library should be able to negotiate
|
||||||
|
and use TLS 1.3, or else it is not good enough.
|
||||||
|
|
||||||
|
As of May 2024, the libraries that need to get fixed to remain supported after
|
||||||
|
May 2025 are: BearSSL and Secure Transport.
|
||||||
|
|
||||||
|
## space-separated `NOPROXY` patterns
|
||||||
|
|
||||||
|
When specifying patterns/domain names for curl that should *not* go through a
|
||||||
|
proxy, the curl tool features the `--noproxy` command line option and the
|
||||||
|
library supports the `NO_PROXY` environment variable and the `CURLOPT_NOPROXY`
|
||||||
|
libcurl option.
|
||||||
|
|
||||||
|
They all set the same list of patterns. This list is documented to be a set of
|
||||||
|
**comma-separated** names, but can also be provided separated with just
|
||||||
|
space. The ability to just use spaces for this has never been documented but
|
||||||
|
some users may still have come to rely on this.
|
||||||
|
|
||||||
|
Several other tools and utilities also parse the `NO_PROXY` environment
|
||||||
|
variable but do not consider a space to be a valid separator. Using spaces for
|
||||||
|
separator is probably less portable and might cause more friction than commas
|
||||||
|
do. Users should use commas for this for greater portability.
|
||||||
|
|
||||||
|
curl removes the support for space-separated names in July 2024.
|
||||||
|
|
||||||
|
## past removals
|
||||||
|
|
||||||
|
- Pipelining
|
||||||
|
- axTLS
|
||||||
|
- PolarSSL
|
||||||
|
- NPN
|
||||||
|
- Support for systems without 64 bit data types
|
||||||
|
- NSS
|
||||||
|
- gskit
|
||||||
|
- mingw v1
|
||||||
|
- NTLM_WB
|
277
src/dependencies/curl-8.8.0/docs/DISTROS.md
Normal file
277
src/dependencies/curl-8.8.0/docs/DISTROS.md
Normal file
|
@ -0,0 +1,277 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
# curl distros
|
||||||
|
|
||||||
|
<!-- markdown-link-check-disable -->
|
||||||
|
|
||||||
|
Lots of organizations distribute curl packages to end users. This is a
|
||||||
|
collection of pointers to where to learn more about curl on and with each
|
||||||
|
distro. Those marked *Rolling Release* typically run the latest version of curl
|
||||||
|
and are therefore less likely to have back-ported patches to older versions.
|
||||||
|
|
||||||
|
We discuss curl distro issues, patches and collaboration on the [curl-distros
|
||||||
|
mailing list](https://lists.haxx.se/listinfo/curl-distros).
|
||||||
|
|
||||||
|
## AlmaLinux
|
||||||
|
|
||||||
|
- curl package source and patches: curl package source and patches
|
||||||
|
- curl issues: https://bugs.almalinux.org/view_all_bug_page.php click Category and choose curl
|
||||||
|
- curl security: https://errata.almalinux.org/ search for curl
|
||||||
|
|
||||||
|
## Alpine Linux
|
||||||
|
|
||||||
|
- curl: https://pkgs.alpinelinux.org/package/edge/main/x86_64/curl
|
||||||
|
- curl issues: https://gitlab.alpinelinux.org/alpine/aports/-/issues
|
||||||
|
- curl security: https://security.alpinelinux.org/srcpkg/curl
|
||||||
|
- curl package source and patches: https://gitlab.alpinelinux.org/alpine/aports/-/tree/master/main/curl
|
||||||
|
|
||||||
|
## Alt Linux
|
||||||
|
|
||||||
|
- curl: http://www.sisyphus.ru/srpm/Sisyphus/curl
|
||||||
|
- curl patches: http://www.sisyphus.ru/ru/srpm/Sisyphus/curl/patches
|
||||||
|
- curl issues: http://www.sisyphus.ru/ru/srpm/Sisyphus/curl/bugs
|
||||||
|
|
||||||
|
## Arch Linux
|
||||||
|
|
||||||
|
*Rolling Release*
|
||||||
|
|
||||||
|
- curl: https://archlinux.org/packages/core/x86_64/curl/
|
||||||
|
- curl issues: https://gitlab.archlinux.org/archlinux/packaging/packages/curl/-/issues
|
||||||
|
- curl security: https://security.archlinux.org/package/curl
|
||||||
|
- curl wiki: https://wiki.archlinux.org/title/CURL
|
||||||
|
|
||||||
|
## Buildroot
|
||||||
|
|
||||||
|
*Rolling Release*
|
||||||
|
|
||||||
|
- curl package source and patches: https://git.buildroot.net/buildroot/tree/package/libcurl
|
||||||
|
- curl issues: https://bugs.buildroot.org/buglist.cgi?quicksearch=curl
|
||||||
|
|
||||||
|
## Chimera
|
||||||
|
|
||||||
|
- curl package source and patches: https://github.com/chimera-linux/cports/tree/master/main/curl
|
||||||
|
|
||||||
|
## Clear Linux
|
||||||
|
|
||||||
|
*Rolling Release*
|
||||||
|
|
||||||
|
- curl: https://github.com/clearlinux-pkgs/curl
|
||||||
|
- curl issues: https://github.com/clearlinux/distribution/issues
|
||||||
|
|
||||||
|
## Conary
|
||||||
|
|
||||||
|
- curl: https://github.com/conan-io/conan-center-index/tree/master/recipes/libcurl
|
||||||
|
- curl issues: https://github.com/conan-io/conan-center-index/issues
|
||||||
|
- curl patches: https://github.com/conan-io/conan-center-index/tree/master/recipes/libcurl (in `all/patches/*`, if any)
|
||||||
|
|
||||||
|
## conda-forge
|
||||||
|
|
||||||
|
- curl: https://github.com/conda-forge/curl-feedstock
|
||||||
|
- curl issues: https://github.com/conda-forge/curl-feedstock/issues
|
||||||
|
|
||||||
|
## CRUX
|
||||||
|
|
||||||
|
- curl: https://crux.nu/portdb/?a=search&q=curl
|
||||||
|
- curl issues: https://git.crux.nu/ports/core/issues/?type=all&state=open&q=curl
|
||||||
|
|
||||||
|
## curl-for-win
|
||||||
|
|
||||||
|
(this is the official curl binaries for Windows shipped by the curl project)
|
||||||
|
|
||||||
|
*Rolling Release*
|
||||||
|
|
||||||
|
- curl: https://curl.se/windows/
|
||||||
|
- curl patches: https://github.com/curl/curl-for-win/blob/main/curl.patch (if any)
|
||||||
|
- build-specific issues: https://github.com/curl/curl-for-win/issues
|
||||||
|
|
||||||
|
Issues and patches for this are managed in the main curl project.
|
||||||
|
|
||||||
|
## Cygwin
|
||||||
|
|
||||||
|
- curl: https://cygwin.com/cgit/cygwin-packages/curl/tree/curl.cygport
|
||||||
|
- curl patches: https://cygwin.com/cgit/cygwin-packages/curl/tree
|
||||||
|
- curl issues: https://inbox.sourceware.org/cygwin/?q=s%3Acurl
|
||||||
|
|
||||||
|
## Cygwin (cross mingw64)
|
||||||
|
|
||||||
|
- mingw64-x86_64-curl: https://cygwin.com/cgit/cygwin-packages/mingw64-x86_64-curl/tree/mingw64-x86_64-curl.cygport
|
||||||
|
- mingw64-x86_64-curl patches: https://cygwin.com/cgit/cygwin-packages/mingw64-x86_64-curl/tree
|
||||||
|
- mingw64-x86_64-curl issues: https://inbox.sourceware.org/cygwin/?q=s%3Amingw64-x86_64-curl
|
||||||
|
|
||||||
|
## Debian
|
||||||
|
|
||||||
|
- curl: https://tracker.debian.org/pkg/curl
|
||||||
|
- curl issues: https://bugs.debian.org/cgi-bin/pkgreport.cgi?pkg=curl
|
||||||
|
- curl patches: https://udd.debian.org/patches.cgi?src=curl
|
||||||
|
- curl patches: https://salsa.debian.org/debian/curl (in debian/* branches, inside the folder debian/patches)
|
||||||
|
|
||||||
|
## Fedora
|
||||||
|
|
||||||
|
- curl: https://src.fedoraproject.org/rpms/curl
|
||||||
|
- curl issues: [bugzilla](https://bugzilla.redhat.com/buglist.cgi?bug_status=NEW&bug_status=ASSIGNED&classification=Fedora&product=Fedora&product=Fedora%20EPEL&component=curl)
|
||||||
|
- curl patches: [list of patches in package git](https://src.fedoraproject.org/rpms/curl/tree/rawhide)
|
||||||
|
|
||||||
|
## FreeBSD
|
||||||
|
|
||||||
|
- curl: https://cgit.freebsd.org/ports/tree/ftp/curl
|
||||||
|
- curl patches: https://cgit.freebsd.org/ports/tree/ftp/curl
|
||||||
|
- curl issues: https://bugs.freebsd.org/bugzilla/buglist.cgi?bug_status=__open__&order=Importance&product=Ports%20%26%20Packages&query_format=advanced&short_desc=curl&short_desc_type=allwordssubstr
|
||||||
|
|
||||||
|
## Gentoo Linux
|
||||||
|
|
||||||
|
*Rolling Release*
|
||||||
|
|
||||||
|
- curl: https://packages.gentoo.org/packages/net-misc/curl
|
||||||
|
- curl issues: https://bugs.gentoo.org/buglist.cgi?quicksearch=net-misc/curl
|
||||||
|
- curl package sources and patches: https://gitweb.gentoo.org/repo/gentoo.git/tree/net-misc/curl/
|
||||||
|
|
||||||
|
## GNU Guix
|
||||||
|
|
||||||
|
*Rolling Release*
|
||||||
|
|
||||||
|
- curl: https://git.savannah.gnu.org/gitweb/?p=guix.git;a=blob;f=gnu/packages/curl.scm;hb=HEAD
|
||||||
|
- curl issues: https://issues.guix.gnu.org/search?query=curl
|
||||||
|
|
||||||
|
## Homebrew
|
||||||
|
|
||||||
|
*Rolling Release*
|
||||||
|
|
||||||
|
- curl: https://formulae.brew.sh/formula/curl
|
||||||
|
|
||||||
|
Homebrew's policy is that all patches and issues should be submitted upstream
|
||||||
|
unless it is very specific to Homebrew's way of packaging software.
|
||||||
|
|
||||||
|
## MacPorts
|
||||||
|
|
||||||
|
*Rolling Release*
|
||||||
|
|
||||||
|
- curl: https://github.com/macports/macports-ports/tree/master/net/curl
|
||||||
|
- curl issues: https://trac.macports.org/query?0_port=curl&0_port_mode=%7E&0_status=%21closed
|
||||||
|
- curl patches: https://github.com/macports/macports-ports/tree/master/net/curl/files
|
||||||
|
|
||||||
|
## Mageia
|
||||||
|
|
||||||
|
- curl: https://svnweb.mageia.org/packages/cauldron/curl/current/SPECS/curl.spec?view=markup
|
||||||
|
- curl issues: https://bugs.mageia.org/buglist.cgi?bug_status=NEW&bug_status=UNCONFIRMED&bug_status=NEEDINFO&bug_status=UPSTREAM&bug_status=ASSIGNED&component=RPM%20Packages&f1=cf_rpmpkg&list_id=176576&o1=casesubstring&product=Mageia&query_format=advanced&v1=curl
|
||||||
|
- curl patches: https://svnweb.mageia.org/packages/cauldron/curl/current/SOURCES/
|
||||||
|
- curl patches in stable distro releases: https://svnweb.mageia.org/packages/updates/<STABLE_VERSION>/curl/current/SOURCES/
|
||||||
|
- curl security: https://advisories.mageia.org/src_curl.html
|
||||||
|
|
||||||
|
## MSYS2
|
||||||
|
|
||||||
|
*Rolling Release*
|
||||||
|
|
||||||
|
- curl: https://github.com/msys2/MINGW-packages/tree/master/mingw-w64-curl
|
||||||
|
- curl issues: https://github.com/msys2/MINGW-packages/issues
|
||||||
|
- curl patches: https://github.com/msys2/MINGW-packages/tree/master/mingw-w64-curl (`*.patch`)
|
||||||
|
|
||||||
|
## Muldersoft
|
||||||
|
|
||||||
|
*Rolling Release*
|
||||||
|
|
||||||
|
- curl: https://github.com/lordmulder/cURL-build-win32
|
||||||
|
- curl issues: https://github.com/lordmulder/cURL-build-win32/issues
|
||||||
|
- curl patches: https://github.com/lordmulder/cURL-build-win32/tree/master/patch
|
||||||
|
|
||||||
|
## NixOS
|
||||||
|
|
||||||
|
- curl: https://github.com/NixOS/nixpkgs/blob/master/pkgs/tools/networking/curl/default.nix
|
||||||
|
- curl issues: https://github.com/NixOS/nixpkgs
|
||||||
|
|
||||||
|
nixpkgs is the package repository used by the NixOS Linux distribution, but
|
||||||
|
can also be used on other distributions
|
||||||
|
|
||||||
|
## OmniOS
|
||||||
|
|
||||||
|
- curl: https://github.com/omniosorg/omnios-build/tree/master/build/curl
|
||||||
|
- curl issues: https://github.com/omniosorg/omnios-build/issues
|
||||||
|
- curl patches: https://github.com/omniosorg/omnios-build/tree/master/build/curl/patches
|
||||||
|
|
||||||
|
## OpenIndiana
|
||||||
|
|
||||||
|
- curl: https://github.com/OpenIndiana/oi-userland/tree/oi/hipster/components/web/curl
|
||||||
|
- curl issues: https://www.illumos.org/projects/openindiana/issues
|
||||||
|
- curl patches: https://github.com/OpenIndiana/oi-userland/tree/oi/hipster/components/web/curl/patches
|
||||||
|
|
||||||
|
## OpenSUSE
|
||||||
|
|
||||||
|
- curl source and patches: https://build.opensuse.org/package/show/openSUSE%3AFactory/curl
|
||||||
|
|
||||||
|
## Oracle Solaris
|
||||||
|
|
||||||
|
- curl: https://github.com/oracle/solaris-userland/tree/master/components/curl
|
||||||
|
- curl issues: https://support.oracle.com/ (requires support contract)
|
||||||
|
- curl patches: https://github.com/oracle/solaris-userland/tree/master/components/curl/patches
|
||||||
|
|
||||||
|
## OpenEmbedded / Yocto Project
|
||||||
|
|
||||||
|
*Rolling Release*
|
||||||
|
|
||||||
|
- curl: https://layers.openembedded.org/layerindex/recipe/5765/
|
||||||
|
- curl issues: https://bugzilla.yoctoproject.org/
|
||||||
|
- curl patches: https://git.openembedded.org/openembedded-core/tree/meta/recipes-support/curl
|
||||||
|
|
||||||
|
## PLD Linux
|
||||||
|
|
||||||
|
- curl package source and patches: https://github.com/pld-linux/curl
|
||||||
|
- curl issues: https://bugs.launchpad.net/pld-linux?field.searchtext=curl&search=Search&field.status%3Alist=NEW&field.status%3Alist=INCOMPLETE_WITH_RESPONSE&field.status%3Alist=INCOMPLETE_WITHOUT_RESPONSE&field.status%3Alist=CONFIRMED&field.status%3Alist=TRIAGED&field.status%3Alist=INPROGRESS&field.status%3Alist=FIXCOMMITTED&field.assignee=&field.bug_reporter=&field.omit_dupes=on&field.has_patch=&field.has_no_package=
|
||||||
|
|
||||||
|
## pkgsrc
|
||||||
|
|
||||||
|
- curl: https://github.com/NetBSD/pkgsrc/tree/trunk/www/curl
|
||||||
|
- curl issues: https://github.com/NetBSD/pkgsrc/issues
|
||||||
|
- curl patches: https://github.com/NetBSD/pkgsrc/tree/trunk/www/curl/patches
|
||||||
|
|
||||||
|
## Red Hat Enterprise Linux / CentOS Stream
|
||||||
|
|
||||||
|
- curl: https://kojihub.stream.centos.org/koji/packageinfo?packageID=217
|
||||||
|
- curl issues: https://issues.redhat.com/secure/CreateIssueDetails!init.jspa?pid=12332745&issuetype=1&components=12377466&priority=10300
|
||||||
|
- curl patches: https://gitlab.com/redhat/centos-stream/rpms/curl
|
||||||
|
|
||||||
|
## Rocky Linux
|
||||||
|
|
||||||
|
- curl: https://git.rockylinux.org/staging/rpms/curl/-/blob/r9/SPECS/curl.spec
|
||||||
|
- curl issues: https://bugs.rockylinux.org
|
||||||
|
- curl patches: https://git.rockylinux.org/staging/rpms/curl/-/tree/r9/SOURCES
|
||||||
|
|
||||||
|
## SerenityOS
|
||||||
|
|
||||||
|
- curl: https://github.com/SerenityOS/serenity/tree/master/Ports/curl
|
||||||
|
- curl issues: https://github.com/SerenityOS/serenity/issues?q=label%3Aports
|
||||||
|
- curl patches: https://github.com/SerenityOS/serenity/tree/master/Ports/curl/patches
|
||||||
|
|
||||||
|
## SmartOS
|
||||||
|
|
||||||
|
- curl: https://github.com/TritonDataCenter/illumos-extra/tree/master/curl
|
||||||
|
- curl issues: https://github.com/TritonDataCenter/illumos-extra/issues
|
||||||
|
- curl patches: https://github.com/TritonDataCenter/illumos-extra/tree/master/curl/Patches
|
||||||
|
|
||||||
|
## SPACK
|
||||||
|
|
||||||
|
- curl package source and patches: https://github.com/spack/spack/tree/develop/var/spack/repos/builtin/packages/curl
|
||||||
|
|
||||||
|
## vcpkg
|
||||||
|
|
||||||
|
*Rolling Release*
|
||||||
|
|
||||||
|
- curl: https://github.com/microsoft/vcpkg/tree/master/ports/curl
|
||||||
|
- curl issues: https://github.com/microsoft/vcpkg/issues
|
||||||
|
- curl patches: https://github.com/microsoft/vcpkg/tree/master/ports/curl (`*.patch`)
|
||||||
|
|
||||||
|
## Void Linux
|
||||||
|
|
||||||
|
*Rolling Release*
|
||||||
|
|
||||||
|
- curl: https://github.com/void-linux/void-packages/tree/master/srcpkgs/curl
|
||||||
|
- curl issues: https://github.com/void-linux/void-packages/issues
|
||||||
|
- curl patches: https://github.com/void-linux/void-packages/tree/master/srcpkgs/curl/patches
|
||||||
|
|
||||||
|
## Wolfi
|
||||||
|
|
||||||
|
*Rolling Release*
|
||||||
|
|
||||||
|
- curl: https://github.com/wolfi-dev/os/blob/main/curl.yaml
|
134
src/dependencies/curl-8.8.0/docs/DYNBUF.md
Normal file
134
src/dependencies/curl-8.8.0/docs/DYNBUF.md
Normal file
|
@ -0,0 +1,134 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
# dynbuf
|
||||||
|
|
||||||
|
This is the internal module for creating and handling "dynamic buffers". This
|
||||||
|
means buffers that can be appended to, dynamically and grow to adapt.
|
||||||
|
|
||||||
|
There is always a terminating zero put at the end of the dynamic buffer.
|
||||||
|
|
||||||
|
The `struct dynbuf` is used to hold data for each instance of a dynamic
|
||||||
|
buffer. The members of that struct **MUST NOT** be accessed or modified
|
||||||
|
without using the dedicated dynbuf API.
|
||||||
|
|
||||||
|
## `Curl_dyn_init`
|
||||||
|
|
||||||
|
```c
|
||||||
|
void Curl_dyn_init(struct dynbuf *s, size_t toobig);
|
||||||
|
```
|
||||||
|
|
||||||
|
This initializes a struct to use for dynbuf and it cannot fail. The `toobig`
|
||||||
|
value **must** be set to the maximum size we allow this buffer instance to
|
||||||
|
grow to. The functions below return `CURLE_OUT_OF_MEMORY` when hitting this
|
||||||
|
limit.
|
||||||
|
|
||||||
|
## `Curl_dyn_free`
|
||||||
|
|
||||||
|
```c
|
||||||
|
void Curl_dyn_free(struct dynbuf *s);
|
||||||
|
```
|
||||||
|
|
||||||
|
Free the associated memory and clean up. After a free, the `dynbuf` struct can
|
||||||
|
be reused to start appending new data to.
|
||||||
|
|
||||||
|
## `Curl_dyn_addn`
|
||||||
|
|
||||||
|
```c
|
||||||
|
CURLcode Curl_dyn_addn(struct dynbuf *s, const void *mem, size_t len);
|
||||||
|
```
|
||||||
|
|
||||||
|
Append arbitrary data of a given length to the end of the buffer.
|
||||||
|
|
||||||
|
If this function fails it calls `Curl_dyn_free` on `dynbuf`.
|
||||||
|
|
||||||
|
## `Curl_dyn_add`
|
||||||
|
|
||||||
|
```c
|
||||||
|
CURLcode Curl_dyn_add(struct dynbuf *s, const char *str);
|
||||||
|
```
|
||||||
|
|
||||||
|
Append a C string to the end of the buffer.
|
||||||
|
|
||||||
|
If this function fails it calls `Curl_dyn_free` on `dynbuf`.
|
||||||
|
|
||||||
|
## `Curl_dyn_addf`
|
||||||
|
|
||||||
|
```c
|
||||||
|
CURLcode Curl_dyn_addf(struct dynbuf *s, const char *fmt, ...);
|
||||||
|
```
|
||||||
|
|
||||||
|
Append a `printf()`-style string to the end of the buffer.
|
||||||
|
|
||||||
|
If this function fails it calls `Curl_dyn_free` on `dynbuf`.
|
||||||
|
|
||||||
|
## `Curl_dyn_vaddf`
|
||||||
|
|
||||||
|
```c
|
||||||
|
CURLcode Curl_dyn_vaddf(struct dynbuf *s, const char *fmt, va_list ap);
|
||||||
|
```
|
||||||
|
|
||||||
|
Append a `vprintf()`-style string to the end of the buffer.
|
||||||
|
|
||||||
|
If this function fails it calls `Curl_dyn_free` on `dynbuf`.
|
||||||
|
|
||||||
|
## `Curl_dyn_reset`
|
||||||
|
|
||||||
|
```c
|
||||||
|
void Curl_dyn_reset(struct dynbuf *s);
|
||||||
|
```
|
||||||
|
|
||||||
|
Reset the buffer length, but leave the allocation.
|
||||||
|
|
||||||
|
## `Curl_dyn_tail`
|
||||||
|
|
||||||
|
```c
|
||||||
|
CURLcode Curl_dyn_tail(struct dynbuf *s, size_t length);
|
||||||
|
```
|
||||||
|
|
||||||
|
Keep `length` bytes of the buffer tail (the last `length` bytes of the
|
||||||
|
buffer). The rest of the buffer is dropped. The specified `length` must not be
|
||||||
|
larger than the buffer length. To instead keep the leading part, see
|
||||||
|
`Curl_dyn_setlen()`.
|
||||||
|
|
||||||
|
## `Curl_dyn_ptr`
|
||||||
|
|
||||||
|
```c
|
||||||
|
char *Curl_dyn_ptr(const struct dynbuf *s);
|
||||||
|
```
|
||||||
|
|
||||||
|
Returns a `char *` to the buffer if it has a length, otherwise may return
|
||||||
|
NULL. Since the buffer may be reallocated, this pointer should not be trusted
|
||||||
|
or used anymore after the next buffer manipulation call.
|
||||||
|
|
||||||
|
## `Curl_dyn_uptr`
|
||||||
|
|
||||||
|
```c
|
||||||
|
unsigned char *Curl_dyn_uptr(const struct dynbuf *s);
|
||||||
|
```
|
||||||
|
|
||||||
|
Returns an `unsigned char *` to the buffer if it has a length, otherwise may
|
||||||
|
return NULL. Since the buffer may be reallocated, this pointer should not be
|
||||||
|
trusted or used anymore after the next buffer manipulation call.
|
||||||
|
|
||||||
|
## `Curl_dyn_len`
|
||||||
|
|
||||||
|
```c
|
||||||
|
size_t Curl_dyn_len(const struct dynbuf *s);
|
||||||
|
```
|
||||||
|
|
||||||
|
Returns the length of the buffer in bytes. Does not include the terminating
|
||||||
|
zero byte.
|
||||||
|
|
||||||
|
## `Curl_dyn_setlen`
|
||||||
|
|
||||||
|
```c
|
||||||
|
CURLcode Curl_dyn_setlen(struct dynbuf *s, size_t len);
|
||||||
|
```
|
||||||
|
|
||||||
|
Sets the new shorter length of the buffer in number of bytes. Keeps the
|
||||||
|
leftmost set number of bytes, discards the rest. To instead keep the tail part
|
||||||
|
of the buffer, see `Curl_dyn_tail()`.
|
73
src/dependencies/curl-8.8.0/docs/EARLY-RELEASE.md
Normal file
73
src/dependencies/curl-8.8.0/docs/EARLY-RELEASE.md
Normal file
|
@ -0,0 +1,73 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
# How to determine if an early patch release is warranted
|
||||||
|
|
||||||
|
In the curl project we do releases every 8 weeks. Unless we break the cycle
|
||||||
|
and do an early patch release.
|
||||||
|
|
||||||
|
We do frequent releases partly to always have the next release "not too far
|
||||||
|
away".
|
||||||
|
|
||||||
|
## Bugfix
|
||||||
|
|
||||||
|
During the release cycle, and especially in the beginning of a new cycle (the
|
||||||
|
so-called "cool down" period), there are times when a bug is reported and
|
||||||
|
after it has been subsequently fixed correctly, the question might be asked:
|
||||||
|
is this bug and associated fix important enough for an early patch release?
|
||||||
|
|
||||||
|
The question can only be properly asked when a fix has been created and landed
|
||||||
|
in the git master branch.
|
||||||
|
|
||||||
|
## Early release
|
||||||
|
|
||||||
|
An early patch release means that we ship a new, complete and full release
|
||||||
|
called `major.minor.patch` where the `patch` part is increased by one since
|
||||||
|
the previous release. A curl release is a curl release. There is no small or
|
||||||
|
big and we never release just a patch. There is only "release".
|
||||||
|
|
||||||
|
## Questions to ask
|
||||||
|
|
||||||
|
- Is there a security advisory rated high or critical?
|
||||||
|
- Is there a data corruption bug?
|
||||||
|
- Did the bug cause an API/ABI breakage?
|
||||||
|
- Does the problem annoy a significant share of the user population?
|
||||||
|
|
||||||
|
If the answer is yes to one or more of the above, an early release might be
|
||||||
|
warranted.
|
||||||
|
|
||||||
|
More questions to ask ourselves when doing the assessment if the answers to
|
||||||
|
the three ones above are all 'no'.
|
||||||
|
|
||||||
|
- Does the bug cause curl to prematurely terminate?
|
||||||
|
- How common is the affected buggy option/feature/protocol/platform to get
|
||||||
|
used?
|
||||||
|
- How large is the estimated impacted user base?
|
||||||
|
- Does the bug block something crucial for applications or other adoption of
|
||||||
|
curl "out there" ?
|
||||||
|
- Does the bug cause problems for curl developers or others on "the curl
|
||||||
|
team" ?
|
||||||
|
- Is the bug limited to the curl tool only? That might have a smaller impact
|
||||||
|
than a bug also present in libcurl.
|
||||||
|
- Is there a (decent) workaround?
|
||||||
|
- Is it a regression? Is the bug introduced in this release?
|
||||||
|
- Can the bug be fixed "easily" by applying a patch?
|
||||||
|
- Does the bug break the build? Most users do not build curl themselves.
|
||||||
|
- How long is it until the already scheduled next release?
|
||||||
|
- Can affected users safely rather revert to a former release until the next
|
||||||
|
scheduled release?
|
||||||
|
- Is it a performance regression with no functionality side-effects? If so it
|
||||||
|
has to be substantial.
|
||||||
|
|
||||||
|
## If an early release is deemed necessary
|
||||||
|
|
||||||
|
Unless done for security or similarly important reasons, an early release
|
||||||
|
should not be done within a week of the previous release.
|
||||||
|
|
||||||
|
This, to enable us to collect and bundle more fixes into the same release to
|
||||||
|
make the release more worthwhile for everyone and to allow more time for fixes
|
||||||
|
to settle and things to get tested. Getting a release in shape and done in
|
||||||
|
style is work that should not be rushed.
|
480
src/dependencies/curl-8.8.0/docs/ECH.md
Normal file
480
src/dependencies/curl-8.8.0/docs/ECH.md
Normal file
|
@ -0,0 +1,480 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
# Building curl with HTTPS-RR and ECH support
|
||||||
|
|
||||||
|
We've added support for ECH to in this curl build. That can use HTTPS RRs
|
||||||
|
published in the DNS, if curl is using DoH, or else can accept the relevant
|
||||||
|
ECHConfigList values from the command line. That works with OpenSSL,
|
||||||
|
WolfSSL or boringssl as the TLS provider, depending on how you build curl.
|
||||||
|
|
||||||
|
This feature is EXPERIMENTAL. DO NOT USE IN PRODUCTION.
|
||||||
|
|
||||||
|
This should however provide enough of a proof-of-concept to prompt an informed
|
||||||
|
discussion about a good path forward for ECH support in curl, when using
|
||||||
|
OpenSSL, or other TLS libraries, as those add ECH support.
|
||||||
|
|
||||||
|
## OpenSSL Build
|
||||||
|
|
||||||
|
To build our ECH-enabled OpenSSL fork:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd $HOME/code
|
||||||
|
git clone https://github.com/defo-project/openssl
|
||||||
|
cd openssl
|
||||||
|
./config --libdir=lib --prefix=$HOME/code/openssl-local-inst
|
||||||
|
...stuff...
|
||||||
|
make -j8
|
||||||
|
...stuff (maybe go for coffee)...
|
||||||
|
make install_sw
|
||||||
|
...a little bit of stuff...
|
||||||
|
```
|
||||||
|
|
||||||
|
To build curl ECH-enabled, making use of the above:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd $HOME/code
|
||||||
|
git clone https://github.com/curl/curl
|
||||||
|
cd curl
|
||||||
|
autoreconf -fi
|
||||||
|
LDFLAGS="-Wl,-rpath,$HOME/code/openssl-local-inst/lib/" ./configure --with-ssl=$HOME/code/openssl-local-inst --enable-ech --enable-httpsrr
|
||||||
|
...lots of output...
|
||||||
|
WARNING: ech ECH HTTPSRR enabled but marked EXPERIMENTAL...
|
||||||
|
make
|
||||||
|
...lots more output...
|
||||||
|
```
|
||||||
|
|
||||||
|
If you do not get that WARNING at the end of the ``configure`` command, then ECH
|
||||||
|
is not enabled, so go back some steps and re-do whatever needs re-doing:-) If you
|
||||||
|
want to debug curl then you should add ``--enable-debug`` to the ``configure``
|
||||||
|
command.
|
||||||
|
|
||||||
|
In a recent (2024-05-20) build on one machine, configure failed to find the
|
||||||
|
ECH-enabled SSL library, apparently due to the existence of
|
||||||
|
``$HOME/code/openssl-local-inst/lib/pkgconfig`` as a directory containing
|
||||||
|
various settings. Deleting that directory worked around the problem but may not
|
||||||
|
be the best solution.
|
||||||
|
|
||||||
|
## Using ECH and DoH
|
||||||
|
|
||||||
|
Curl supports using DoH for A/AAAA lookups so it was relatively easy to add
|
||||||
|
retrieval of HTTPS RRs in that situation. To use ECH and DoH together:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd $HOME/code/curl
|
||||||
|
LD_LIBRARY_PATH=$HOME/code/openssl ./src/curl --ech true --doh-url https://one.one.one.one/dns-query https://defo.ie/ech-check.php
|
||||||
|
...
|
||||||
|
SSL_ECH_STATUS: success <img src="greentick-small.png" alt="good" /> <br/>
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
The output snippet above is within the HTML for the webpage, when things work.
|
||||||
|
|
||||||
|
The above works for these test sites:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
https://defo.ie/ech-check.php
|
||||||
|
https://draft-13.esni.defo.ie:8413/stats
|
||||||
|
https://draft-13.esni.defo.ie:8414/stats
|
||||||
|
https://crypto.cloudflare.com/cdn-cgi/trace
|
||||||
|
https://tls-ech.dev
|
||||||
|
```
|
||||||
|
|
||||||
|
The list above has 4 different server technologies, implemented by 3 different
|
||||||
|
parties, and includes a case (the port 8414 server) where HelloRetryRequest
|
||||||
|
(HRR) is forced.
|
||||||
|
|
||||||
|
We currently support the following new curl command line arguments/options:
|
||||||
|
|
||||||
|
- ``--ech <config>`` - the ``config`` value can be one of:
|
||||||
|
- ``false`` says to not attempt ECH
|
||||||
|
- ``true`` says to attempt ECH, if possible
|
||||||
|
- ``grease`` if attempting ECH is not possible, then send a GREASE ECH extension
|
||||||
|
- ``hard`` hard-fail the connection if ECH cannot be attempted
|
||||||
|
- ``ecl:<b64value>`` a base64 encoded ECHConfigList, rather than one accessed from the DNS
|
||||||
|
- ``pn:<name>`` over-ride the ``public_name`` from an ECHConfigList
|
||||||
|
|
||||||
|
Note that in the above "attempt ECH" means the client emitting a TLS
|
||||||
|
ClientHello with a "real" ECH extension, but that does not mean that the
|
||||||
|
relevant server can succeed in decrypting, as things can fail for other
|
||||||
|
reasons.
|
||||||
|
|
||||||
|
## Supplying an ECHConfigList on the command line
|
||||||
|
|
||||||
|
To supply the ECHConfigList on the command line, you might need a bit of
|
||||||
|
cut-and-paste, e.g.:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
dig +short https defo.ie
|
||||||
|
1 . ipv4hint=213.108.108.101 ech=AED+DQA8PAAgACD8WhlS7VwEt5bf3lekhHvXrQBGDrZh03n/LsNtAodbUAAEAAEAAQANY292ZXIuZGVmby5pZQAA ipv6hint=2a00:c6c0:0:116:5::10
|
||||||
|
```
|
||||||
|
|
||||||
|
Then paste the base64 encoded ECHConfigList onto the curl command line:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
LD_LIBRARY_PATH=$HOME/code/openssl ./src/curl --ech ecl:AED+DQA8PAAgACD8WhlS7VwEt5bf3lekhHvXrQBGDrZh03n/LsNtAodbUAAEAAEAAQANY292ZXIuZGVmby5pZQAA https://defo.ie/ech-check.php
|
||||||
|
...
|
||||||
|
SSL_ECH_STATUS: success <img src="greentick-small.png" alt="good" /> <br/>
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
The output snippet above is within the HTML for the webpage.
|
||||||
|
|
||||||
|
If you paste in the wrong ECHConfigList (it changes hourly for ``defo.ie``) you
|
||||||
|
should get an error like this:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
LD_LIBRARY_PATH=$HOME/code/openssl ./src/curl -vvv --ech ecl:AED+DQA8yAAgACDRMQo+qYNsNRNj+vfuQfFIkrrUFmM4vogucxKj/4nzYgAEAAEAAQANY292ZXIuZGVmby5pZQAA https://defo.ie/ech-check.php
|
||||||
|
...
|
||||||
|
* OpenSSL/3.3.0: error:0A00054B:SSL routines::ech required
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
There is a reason to want this command line option - for use before publishing
|
||||||
|
an ECHConfigList in the DNS as per the Internet-draft [A well-known URI for
|
||||||
|
publishing ECHConfigList values](https://datatracker.ietf.org/doc/draft-ietf-tls-wkech/).
|
||||||
|
|
||||||
|
If you do use a wrong ECHConfigList value, then the server might return a
|
||||||
|
good value, via the ``retry_configs`` mechanism. You can see that value in
|
||||||
|
the verbose output, e.g.:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
LD_LIBRARY_PATH=$HOME/code/openssl ./src/curl -vvv --ech ecl:AED+DQA8yAAgACDRMQo+qYNsNRNj+vfuQfFIkrrUFmM4vogucxKj/4nzYgAEAAEAAQANY292ZXIuZGVmby5pZQAA https://defo.ie/ech-check.php
|
||||||
|
...
|
||||||
|
* ECH: retry_configs AQD+DQA8DAAgACBvYqJy+Hgk33wh/ZLBzKSPgwxeop7gvojQzfASq7zeZQAEAAEAAQANY292ZXIuZGVmby5pZQAA/g0APEMAIAAgXkT5r4cYs8z19q5rdittyIX8gfQ3ENW4wj1fVoiJZBoABAABAAEADWNvdmVyLmRlZm8uaWUAAP4NADw2ACAAINXSE9EdXzEQIJZA7vpwCIQsWqsFohZARXChgPsnfI1kAAQAAQABAA1jb3Zlci5kZWZvLmllAAD+DQA8cQAgACASeiD5F+UoSnVoHvA2l1EifUVMFtbVZ76xwDqmMPraHQAEAAEAAQANY292ZXIuZGVmby5pZQAA
|
||||||
|
* ECH: retry_configs for defo.ie from cover.defo.ie, 319
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
At that point, you could copy the base64 encoded value above and try again.
|
||||||
|
For now, this only works for the OpenSSL and boringssl builds.
|
||||||
|
|
||||||
|
## Default settings
|
||||||
|
|
||||||
|
Curl has various ways to configure default settings, e.g. in ``$HOME/.curlrc``,
|
||||||
|
so one can set the DoH URL and enable ECH that way:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cat ~/.curlrc
|
||||||
|
doh-url=https://one.one.one.one/dns-query
|
||||||
|
silent
|
||||||
|
ech=true
|
||||||
|
```
|
||||||
|
|
||||||
|
Note that when you use the system's curl command (rather than our ECH-enabled
|
||||||
|
build), it is liable to warn that ``ech`` is an unknown option. If that is an
|
||||||
|
issue (e.g. if some script re-directs stdout and stderr somewhere) then adding
|
||||||
|
the ``silent`` line above seems to be a good enough fix. (Though of
|
||||||
|
course, yet another script could depend on non-silent behavior, so you may have
|
||||||
|
to figure out what you prefer yourself.) That seems to have changed with the
|
||||||
|
latest build, previously ``silent=TRUE`` was what I used in ``~/.curlrc`` but
|
||||||
|
now that seems to cause a problem, so that the following line(s) are ignored.
|
||||||
|
|
||||||
|
If you want to always use our OpenSSL build you can set ``LD_LIBRARY_PATH``
|
||||||
|
in the environment:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
export LD_LIBRARY_PATH=$HOME/code/openssl
|
||||||
|
```
|
||||||
|
|
||||||
|
When you do the above, there can be a mismatch between OpenSSL versions
|
||||||
|
for applications that check that. A ``git push`` for example fails so you
|
||||||
|
should unset ``LD_LIBRARY_PATH`` before doing that or use a different shell.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git push
|
||||||
|
OpenSSL version mismatch. Built against 30000080, you have 30200000
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
With all that setup as above the command line gets simpler:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
./src/curl https://defo.ie/ech-check.php
|
||||||
|
...
|
||||||
|
SSL_ECH_STATUS: success <img src="greentick-small.png" alt="good" /> <br/>
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
The ``--ech true`` option is opportunistic, so tries to do ECH but does not fail if
|
||||||
|
the client for example cannot find any ECHConfig values. The ``--ech hard``
|
||||||
|
option hard-fails if there is no ECHConfig found in DNS, so for now, that is not
|
||||||
|
a good option to set as a default. Once ECH has really been attempted by
|
||||||
|
the client, if decryption on the server side fails, then curl fails.
|
||||||
|
|
||||||
|
## Code changes for ECH support when using DoH
|
||||||
|
|
||||||
|
Code changes are ``#ifdef`` protected via ``USE_ECH`` or ``USE_HTTPSRR``:
|
||||||
|
|
||||||
|
- ``USE_HTTPSRR`` is used for HTTPS RR retrieval code that could be generically
|
||||||
|
used should non-ECH uses for HTTPS RRs be identified, e.g. use of ALPN values
|
||||||
|
or IP address hints.
|
||||||
|
|
||||||
|
- ``USE_ECH`` protects ECH specific code.
|
||||||
|
|
||||||
|
There are various obvious code blocks for handling the new command line
|
||||||
|
arguments which aren't described here, but should be fairly clear.
|
||||||
|
|
||||||
|
As shown in the ``configure`` usage above, there are ``configure.ac`` changes
|
||||||
|
that allow separately dis/enabling ``USE_HTTPSRR`` and ``USE_ECH``. If ``USE_ECH``
|
||||||
|
is enabled, then ``USE_HTTPSRR`` is forced. In both cases ``USE_DOH``
|
||||||
|
is required. (There may be some configuration conflicts available for the
|
||||||
|
determined:-)
|
||||||
|
|
||||||
|
The main functional change, as you would expect, is in ``lib/vtls/openssl.c``
|
||||||
|
where an ECHConfig, if available from command line or DNS cache, is fed into
|
||||||
|
the OpenSSL library via the new APIs implemented in our OpenSSL fork for that
|
||||||
|
purpose. This code also implements the opportunistic (``--ech true``) or hard-fail
|
||||||
|
(``--ech hard``) logic.
|
||||||
|
|
||||||
|
Other than that, the main additions are in ``lib/doh.c``
|
||||||
|
where we re-use ``dohprobe()`` to retrieve an HTTPS RR value for the target
|
||||||
|
domain. If such a value is found, that is stored using a new ``store_https()``
|
||||||
|
function in a new field in the ``dohentry`` structure.
|
||||||
|
|
||||||
|
The qname for the DoH query is modified if the port number is not 443, as
|
||||||
|
defined in the SVCB specification.
|
||||||
|
|
||||||
|
When the DoH process has worked, ``Curl_doh_is_resolved()`` now also returns
|
||||||
|
the relevant HTTPS RR value data in the ``Curl_dns_entry`` structure.
|
||||||
|
That is later accessed when the TLS session is being established, if ECH is
|
||||||
|
enabled (from ``lib/vtls/openssl.c`` as described above).
|
||||||
|
|
||||||
|
## Limitations
|
||||||
|
|
||||||
|
Things that need fixing, but that can probably be ignored for the
|
||||||
|
moment:
|
||||||
|
|
||||||
|
- We could easily add code to make use of an ``alpn=`` value found in an HTTPS
|
||||||
|
RR, passing that on to OpenSSL for use as the "inner" ALPN value, but have
|
||||||
|
yet to do that.
|
||||||
|
|
||||||
|
Current limitations (more interesting than the above):
|
||||||
|
|
||||||
|
- Only the first HTTPS RR value retrieved is actually processed as described
|
||||||
|
above, that could be extended in future, though picking the "right" HTTPS RR
|
||||||
|
could be non-trivial if multiple RRs are published - matching IP address hints
|
||||||
|
versus A/AAAA values might be a good basis for that. Last I checked though,
|
||||||
|
browsers supporting ECH did not handle multiple HTTPS RRs well, though that
|
||||||
|
needs re-checking as it has been a while.
|
||||||
|
|
||||||
|
- It is unclear how one should handle any IP address hints found in an HTTPS RR.
|
||||||
|
It may be that a bit of consideration of how "multi-CDN" deployments might
|
||||||
|
emerge would provide good answers there, but for now, it is not clear how best
|
||||||
|
curl might handle those values when present in the DNS.
|
||||||
|
|
||||||
|
- The SVCB/HTTPS RR specification supports a new "CNAME at apex" indirection
|
||||||
|
("aliasMode") - the current code takes no account of that at all. One could
|
||||||
|
envisage implementing the equivalent of following CNAMEs in such cases, but
|
||||||
|
it is not clear if that'd be a good plan. (As of now, chrome browsers do not seem
|
||||||
|
to have any support for that "aliasMode" and we've not checked Firefox for that
|
||||||
|
recently.)
|
||||||
|
|
||||||
|
- We have not investigated what related changes or additions might be needed
|
||||||
|
for applications using libcurl, as opposed to use of curl as a command line
|
||||||
|
tool.
|
||||||
|
|
||||||
|
- We have not yet implemented tests as part of the usual curl test harness as
|
||||||
|
doing so would seem to require re-implementing an ECH-enabled server as part
|
||||||
|
of the curl test harness. For now, we have a ``./tests/ech_test.sh`` script
|
||||||
|
that attempts ECH with various test servers and with many combinations of the
|
||||||
|
allowed command line options. While that is a useful test and has find issues,
|
||||||
|
it is not comprehensive and we're not (as yet) sure what would be the right
|
||||||
|
level of coverage. When running that script you should not have a
|
||||||
|
``$HOME/.curlrc`` file that affects ECH or some of the negative tests could
|
||||||
|
produce spurious failures.
|
||||||
|
|
||||||
|
## Building with cmake
|
||||||
|
|
||||||
|
To build with cmake, assuming our ECH-enabled OpenSSL is as before:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd $HOME/code
|
||||||
|
git clone https://github.com/curl/curl
|
||||||
|
cd curl
|
||||||
|
mkdir build
|
||||||
|
cd build
|
||||||
|
cmake -DOPENSSL_ROOT_DIR=$HOME/code/openssl -DUSE_ECH=1 -DUSE_HTTPSRR=1 ..
|
||||||
|
...
|
||||||
|
make
|
||||||
|
...
|
||||||
|
[100%] Built target curl
|
||||||
|
```
|
||||||
|
|
||||||
|
The binary produced by the cmake build does not need any ECH-specific
|
||||||
|
``LD_LIBRARY_PATH`` setting.
|
||||||
|
|
||||||
|
## boringssl build
|
||||||
|
|
||||||
|
BoringSSL is also supported by curl and also supports ECH, so to build
|
||||||
|
with that, instead of our ECH-enabled OpenSSL:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd $HOME/code
|
||||||
|
git clone https://boringssl.googlesource.com/boringssl
|
||||||
|
cd boringssl
|
||||||
|
cmake -DCMAKE_INSTALL_PREFIX:PATH=$HOME/code/boringssl/inst -DBUILD_SHARED_LIBS=1
|
||||||
|
make
|
||||||
|
...
|
||||||
|
make install
|
||||||
|
```
|
||||||
|
|
||||||
|
Then:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd $HOME/code
|
||||||
|
git clone https://github.com/curl/curl
|
||||||
|
cd curl
|
||||||
|
autoreconf -fi
|
||||||
|
LDFLAGS="-Wl,-rpath,$HOME/code/boringssl/inst/lib" ./configure --with-ssl=$HOME/code/boringssl/inst --enable-ech --enable-httpsrr
|
||||||
|
...lots of output...
|
||||||
|
WARNING: ech ECH HTTPSRR enabled but marked EXPERIMENTAL. Use with caution!
|
||||||
|
make
|
||||||
|
```
|
||||||
|
|
||||||
|
The boringssl APIs are fairly similar to those in our ECH-enabled OpenSSL
|
||||||
|
fork, so code changes are also in ``lib/vtls/openssl.c``, protected
|
||||||
|
via ``#ifdef OPENSSL_IS_BORINGSSL`` and are mostly obvious API variations.
|
||||||
|
|
||||||
|
The boringssl APIs however do not support the ``--ech pn:`` command line
|
||||||
|
variant as of now.
|
||||||
|
|
||||||
|
## WolfSSL build
|
||||||
|
|
||||||
|
WolfSSL also supports ECH and can be used by curl, so here's how:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd $HOME/code
|
||||||
|
git clone https://github.com/wolfSSL/wolfssl
|
||||||
|
cd wolfssl
|
||||||
|
./autogen.sh
|
||||||
|
./configure --prefix=$HOME/code/wolfssl/inst --enable-ech --enable-debug --enable-opensslextra
|
||||||
|
make
|
||||||
|
make install
|
||||||
|
```
|
||||||
|
|
||||||
|
The install prefix (``inst``) in the above causes WolfSSL to be installed there
|
||||||
|
and we seem to need that for the curl configure command to work out. The
|
||||||
|
``--enable-opensslextra`` turns out (after much faffing about;-) to be
|
||||||
|
important or else we get build problems with curl below.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd $HOME/code
|
||||||
|
git clone https://github.com/curl/curl
|
||||||
|
cd curl
|
||||||
|
autoreconf -fi
|
||||||
|
./configure --with-wolfssl=$HOME/code/wolfssl/inst --enable-ech --enable-httpsrr
|
||||||
|
make
|
||||||
|
```
|
||||||
|
|
||||||
|
There are some known issues with the ECH implementation in WolfSSL:
|
||||||
|
|
||||||
|
- The main issue is that the client currently handles HelloRetryRequest
|
||||||
|
incorrectly. [HRR issue](https://github.com/wolfSSL/wolfssl/issues/6802).)
|
||||||
|
The HRR issue means that the client does not work for
|
||||||
|
[this ECH test web site](https://tls-ech.dev) and any other similarly configured
|
||||||
|
sites.
|
||||||
|
- There is also an issue related to so-called middlebox compatibility mode.
|
||||||
|
[middlebox compatibility issue](https://github.com/wolfSSL/wolfssl/issues/6774)
|
||||||
|
|
||||||
|
### Code changes to support WolfSSL
|
||||||
|
|
||||||
|
There are what seem like oddball differences:
|
||||||
|
|
||||||
|
- The DoH URL in``$HOME/.curlrc`` can use "1.1.1.1" for OpenSSL but has to be
|
||||||
|
"one.one.one.one" for WolfSSL. The latter works for both, so OK, we'll change
|
||||||
|
to that.
|
||||||
|
- There seems to be some difference in CA databases too - the WolfSSL version
|
||||||
|
does not like ``defo.ie``, whereas the system and OpenSSL ones do. We can ignore
|
||||||
|
that for our purposes via ``--insecure``/``-k`` but would need to fix for a
|
||||||
|
real setup. (Browsers do like those certificates though.)
|
||||||
|
|
||||||
|
Then there are some functional code changes:
|
||||||
|
|
||||||
|
- tweak to ``configure.ac`` to check if WolfSSL has ECH or not
|
||||||
|
- added code to ``lib/vtls/wolfssl.c`` mirroring what's done in the
|
||||||
|
OpenSSL equivalent above.
|
||||||
|
- WolfSSL does not support ``--ech false`` or the ``--ech pn:`` command line
|
||||||
|
argument.
|
||||||
|
|
||||||
|
The lack of support for ``--ech false`` is because wolfSSL has decided to
|
||||||
|
always at least GREASE if built to support ECH. In other words, GREASE is
|
||||||
|
a compile time choice for wolfSSL, but a runtime choice for OpenSSL or
|
||||||
|
boringssl. (Both are reasonable.)
|
||||||
|
|
||||||
|
## Additional notes
|
||||||
|
|
||||||
|
### Supporting ECH without DoH
|
||||||
|
|
||||||
|
All of the above only applies if DoH is being used. There should be a use-case
|
||||||
|
for ECH when DoH is not used by curl - if a system stub resolver supports DoT
|
||||||
|
or DoH, then, considering only ECH and the network threat model, it would make
|
||||||
|
sense for curl to support ECH without curl itself using DoH. The author for
|
||||||
|
example uses a combination of stubby+unbound as the system resolver listening
|
||||||
|
on localhost:53, so would fit this use-case. That said, it is unclear if
|
||||||
|
this is a niche that is worth trying to address. (The author is just as happy to
|
||||||
|
let curl use DoH to talk to the same public recursive that stubby might use:-)
|
||||||
|
|
||||||
|
Assuming for the moment this is a use-case we'd like to support, then
|
||||||
|
if DoH is not being used by curl, it is not clear at this time how to provide
|
||||||
|
support for ECH. One option would seem to be to extend the ``c-ares`` library
|
||||||
|
to support HTTPS RRs, but in that case it is not now clear whether such changes
|
||||||
|
would be attractive to the ``c-ares`` maintainers, nor whether the "tag=value"
|
||||||
|
extensibility inherent in the HTTPS/SVCB specification is a good match for the
|
||||||
|
``c-ares`` approach of defining structures specific to decoded answers for each
|
||||||
|
supported RRtype. We're also not sure how many downstream curl deployments
|
||||||
|
actually make use of the ``c-ares`` library, which would affect the utility of
|
||||||
|
such changes. Another option might be to consider using some other generic DNS
|
||||||
|
library that does support HTTPS RRs, but it is unclear if such a library could
|
||||||
|
or would be used by all or almost all curl builds and downstream releases of
|
||||||
|
curl.
|
||||||
|
|
||||||
|
Our current conclusion is that doing the above is likely best left until we
|
||||||
|
have some experience with the "using DoH" approach, so we're going to punt on
|
||||||
|
this for now.
|
||||||
|
|
||||||
|
### Debugging
|
||||||
|
|
||||||
|
Just a note to self as remembering this is a nuisance:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
LD_LIBRARY_PATH=$HOME/code/openssl:./lib/.libs gdb ./src/.libs/curl
|
||||||
|
```
|
||||||
|
|
||||||
|
### Localhost testing
|
||||||
|
|
||||||
|
It can be useful to be able to run against a localhost OpenSSL ``s_server``
|
||||||
|
for testing. We have published instructions for such
|
||||||
|
[localhost tests](https://github.com/defo-project/ech-dev-utils/blob/main/howtos/localhost-tests.md)
|
||||||
|
in another repository. Once you have that set up, you can start a server
|
||||||
|
and then run curl against that:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd $HOME/code/ech-dev-utils
|
||||||
|
./scripts/echsvr.sh -d
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
The ``echsvr.sh`` script supports many ECH-related options. Use ``echsvr.sh -h``
|
||||||
|
for details.
|
||||||
|
|
||||||
|
In another window:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd $HOME/code/curl/
|
||||||
|
./src/curl -vvv --insecure --connect-to foo.example.com:8443:localhost:8443 --ech ecl:AD7+DQA6uwAgACBix2B78sX+EQhEbxMspDOc8Z3xVS5aQpYP0Cxpc2AWPAAEAAEAAQALZXhhbXBsZS5jb20AAA==
|
||||||
|
```
|
||||||
|
|
||||||
|
### Automated use of ``retry_configs`` not supported so far...
|
||||||
|
|
||||||
|
As of now we have not added support for using ``retry_config`` handling in the
|
||||||
|
application - for a command line tool, one can just use ``dig`` (or ``kdig``)
|
||||||
|
to get the HTTPS RR and pass the ECHConfigList from that on the command line,
|
||||||
|
if needed, or one can access the value from command line output in verbose more
|
||||||
|
and then re-use that in another invocation.
|
||||||
|
|
||||||
|
Both our OpenSSL fork and boringssl have APIs for both controlling GREASE and
|
||||||
|
accessing and logging ``retry_configs``, it seems WolfSSL has neither.
|
80
src/dependencies/curl-8.8.0/docs/EXPERIMENTAL.md
Normal file
80
src/dependencies/curl-8.8.0/docs/EXPERIMENTAL.md
Normal file
|
@ -0,0 +1,80 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
# Experimental
|
||||||
|
|
||||||
|
Some features and functionality in curl and libcurl are considered
|
||||||
|
**EXPERIMENTAL**.
|
||||||
|
|
||||||
|
Experimental support in curl means:
|
||||||
|
|
||||||
|
1. Experimental features are provided to allow users to try them out and
|
||||||
|
provide feedback on functionality and API etc before they ship and get
|
||||||
|
"carved in stone".
|
||||||
|
2. You must enable the feature when invoking configure as otherwise curl is
|
||||||
|
not built with the feature present.
|
||||||
|
3. We strongly advise against using this feature in production.
|
||||||
|
4. **We reserve the right to change behavior** of the feature without sticking
|
||||||
|
to our API/ABI rules as we do for regular features, as long as it is marked
|
||||||
|
experimental.
|
||||||
|
5. Experimental features are clearly marked so in documentation. Beware.
|
||||||
|
|
||||||
|
## Graduation
|
||||||
|
|
||||||
|
1. Each experimental feature should have a set of documented requirements of
|
||||||
|
what is needed for that feature to graduate. Graduation means being removed
|
||||||
|
from the list of experiments.
|
||||||
|
2. An experiment should NOT graduate if it needs test cases to be disabled,
|
||||||
|
unless they are for minor features that are clearly documented as not
|
||||||
|
provided by the experiment and then the disabling should be managed inside
|
||||||
|
each affected test case.
|
||||||
|
|
||||||
|
## Experimental features right now
|
||||||
|
|
||||||
|
### The Hyper HTTP backend
|
||||||
|
|
||||||
|
Graduation requirements:
|
||||||
|
|
||||||
|
- HTTP/1 and HTTP/2 support, including multiplexing
|
||||||
|
|
||||||
|
### HTTP/3 support (non-ngtcp2 backends)
|
||||||
|
|
||||||
|
Graduation requirements:
|
||||||
|
|
||||||
|
- The used libraries should be considered out-of-beta with a reasonable
|
||||||
|
expectation of a stable API going forward.
|
||||||
|
|
||||||
|
- Using HTTP/3 with the given build should perform without risking busy-loops
|
||||||
|
|
||||||
|
### The rustls backend
|
||||||
|
|
||||||
|
Graduation requirements:
|
||||||
|
|
||||||
|
- a reasonable expectation of a stable API going forward.
|
||||||
|
|
||||||
|
### WebSocket
|
||||||
|
|
||||||
|
Graduation requirements:
|
||||||
|
|
||||||
|
- feedback from users saying that the API works for their specific use cases
|
||||||
|
|
||||||
|
- unless the above happens, we consider WebSocket silently working by
|
||||||
|
September 2024 when it has been stewing as EXPERIMENTAL for two years.
|
||||||
|
|
||||||
|
## ECH
|
||||||
|
|
||||||
|
Use of the HTTPS resource record and Encrypted Client Hello (ECH) when using
|
||||||
|
DoH
|
||||||
|
|
||||||
|
Graduation requirements:
|
||||||
|
|
||||||
|
- ECH support exists in at least one widely used TLS library apart from
|
||||||
|
BoringSSL and wolfSSL.
|
||||||
|
|
||||||
|
- feedback from users saying that ECH works for their use cases
|
||||||
|
|
||||||
|
- it has been given time to mature, so no earlier than April 2025 (twelve
|
||||||
|
months after being added here)
|
1561
src/dependencies/curl-8.8.0/docs/FAQ
Normal file
1561
src/dependencies/curl-8.8.0/docs/FAQ
Normal file
File diff suppressed because it is too large
Load diff
225
src/dependencies/curl-8.8.0/docs/FEATURES.md
Normal file
225
src/dependencies/curl-8.8.0/docs/FEATURES.md
Normal file
|
@ -0,0 +1,225 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
# Features -- what curl can do
|
||||||
|
|
||||||
|
## curl tool
|
||||||
|
|
||||||
|
- config file support
|
||||||
|
- multiple URLs in a single command line
|
||||||
|
- range "globbing" support: [0-13], {one,two,three}
|
||||||
|
- multiple file upload on a single command line
|
||||||
|
- custom maximum transfer rate
|
||||||
|
- redirect stderr
|
||||||
|
- parallel transfers
|
||||||
|
|
||||||
|
## libcurl
|
||||||
|
|
||||||
|
- URL RFC 3986 syntax
|
||||||
|
- custom maximum download time
|
||||||
|
- custom least download speed acceptable
|
||||||
|
- custom output result after completion
|
||||||
|
- guesses protocol from hostname unless specified
|
||||||
|
- uses .netrc
|
||||||
|
- progress bar with time statistics while downloading
|
||||||
|
- "standard" proxy environment variables support
|
||||||
|
- compiles on win32 (reported builds on 70+ operating systems)
|
||||||
|
- selectable network interface for outgoing traffic
|
||||||
|
- IPv6 support on Unix and Windows
|
||||||
|
- happy eyeballs dual-stack connects
|
||||||
|
- persistent connections
|
||||||
|
- SOCKS 4 + 5 support, with or without local name resolving
|
||||||
|
- supports username and password in proxy environment variables
|
||||||
|
- operations through HTTP proxy "tunnel" (using CONNECT)
|
||||||
|
- replaceable memory functions (malloc, free, realloc, etc)
|
||||||
|
- asynchronous name resolving (6)
|
||||||
|
- both a push and a pull style interface
|
||||||
|
- international domain names (10)
|
||||||
|
|
||||||
|
## HTTP
|
||||||
|
|
||||||
|
- HTTP/0.9 responses are optionally accepted
|
||||||
|
- HTTP/1.0
|
||||||
|
- HTTP/1.1
|
||||||
|
- HTTP/2, including multiplexing and server push (5)
|
||||||
|
- GET
|
||||||
|
- PUT
|
||||||
|
- HEAD
|
||||||
|
- POST
|
||||||
|
- multipart formpost (RFC 1867-style)
|
||||||
|
- authentication: Basic, Digest, NTLM (9) and Negotiate (SPNEGO) (3)
|
||||||
|
to server and proxy
|
||||||
|
- resume (both GET and PUT)
|
||||||
|
- follow redirects
|
||||||
|
- maximum amount of redirects to follow
|
||||||
|
- custom HTTP request
|
||||||
|
- cookie get/send fully parsed
|
||||||
|
- reads/writes the Netscape cookie file format
|
||||||
|
- custom headers (replace/remove internally generated headers)
|
||||||
|
- custom user-agent string
|
||||||
|
- custom referrer string
|
||||||
|
- range
|
||||||
|
- proxy authentication
|
||||||
|
- time conditions
|
||||||
|
- via HTTP proxy, HTTPS proxy or SOCKS proxy
|
||||||
|
- retrieve file modification date
|
||||||
|
- Content-Encoding support for deflate and gzip
|
||||||
|
- "Transfer-Encoding: chunked" support in uploads
|
||||||
|
- automatic data compression (11)
|
||||||
|
|
||||||
|
## HTTPS (1)
|
||||||
|
|
||||||
|
- (all the HTTP features)
|
||||||
|
- HTTP/3 experimental support
|
||||||
|
- using client certificates
|
||||||
|
- verify server certificate
|
||||||
|
- via HTTP proxy, HTTPS proxy or SOCKS proxy
|
||||||
|
- select desired encryption
|
||||||
|
- select usage of a specific SSL version
|
||||||
|
|
||||||
|
## FTP
|
||||||
|
|
||||||
|
- download
|
||||||
|
- authentication
|
||||||
|
- Kerberos 5 (12)
|
||||||
|
- active/passive using PORT, EPRT, PASV or EPSV
|
||||||
|
- single file size information (compare to HTTP HEAD)
|
||||||
|
- 'type=' URL support
|
||||||
|
- directory listing
|
||||||
|
- directory listing names-only
|
||||||
|
- upload
|
||||||
|
- upload append
|
||||||
|
- upload via http-proxy as HTTP PUT
|
||||||
|
- download resume
|
||||||
|
- upload resume
|
||||||
|
- custom ftp commands (before and/or after the transfer)
|
||||||
|
- simple "range" support
|
||||||
|
- via HTTP proxy, HTTPS proxy or SOCKS proxy
|
||||||
|
- all operations can be tunneled through proxy
|
||||||
|
- customizable to retrieve file modification date
|
||||||
|
- no directory depth limit
|
||||||
|
|
||||||
|
## FTPS (1)
|
||||||
|
|
||||||
|
- implicit `ftps://` support that use SSL on both connections
|
||||||
|
- explicit "AUTH TLS" and "AUTH SSL" usage to "upgrade" plain `ftp://`
|
||||||
|
connection to use SSL for both or one of the connections
|
||||||
|
|
||||||
|
## SCP (8)
|
||||||
|
|
||||||
|
- both password and public key auth
|
||||||
|
|
||||||
|
## SFTP (7)
|
||||||
|
|
||||||
|
- both password and public key auth
|
||||||
|
- with custom commands sent before/after the transfer
|
||||||
|
|
||||||
|
## TFTP
|
||||||
|
|
||||||
|
- download
|
||||||
|
- upload
|
||||||
|
|
||||||
|
## TELNET
|
||||||
|
|
||||||
|
- connection negotiation
|
||||||
|
- custom telnet options
|
||||||
|
- stdin/stdout I/O
|
||||||
|
|
||||||
|
## LDAP (2)
|
||||||
|
|
||||||
|
- full LDAP URL support
|
||||||
|
|
||||||
|
## DICT
|
||||||
|
|
||||||
|
- extended DICT URL support
|
||||||
|
|
||||||
|
## FILE
|
||||||
|
|
||||||
|
- URL support
|
||||||
|
- upload
|
||||||
|
- resume
|
||||||
|
|
||||||
|
## SMB
|
||||||
|
|
||||||
|
- SMBv1 over TCP and SSL
|
||||||
|
- download
|
||||||
|
- upload
|
||||||
|
- authentication with NTLMv1
|
||||||
|
|
||||||
|
## SMTP
|
||||||
|
|
||||||
|
- authentication: Plain, Login, CRAM-MD5, Digest-MD5, NTLM (9), Kerberos 5
|
||||||
|
(4) and External.
|
||||||
|
- send emails
|
||||||
|
- mail from support
|
||||||
|
- mail size support
|
||||||
|
- mail auth support for trusted server-to-server relaying
|
||||||
|
- multiple recipients
|
||||||
|
- via http-proxy
|
||||||
|
|
||||||
|
## SMTPS (1)
|
||||||
|
|
||||||
|
- implicit `smtps://` support
|
||||||
|
- explicit "STARTTLS" usage to "upgrade" plain `smtp://` connections to use SSL
|
||||||
|
- via http-proxy
|
||||||
|
|
||||||
|
## POP3
|
||||||
|
|
||||||
|
- authentication: Clear Text, APOP and SASL
|
||||||
|
- SASL based authentication: Plain, Login, CRAM-MD5, Digest-MD5, NTLM (9),
|
||||||
|
Kerberos 5 (4) and External.
|
||||||
|
- list emails
|
||||||
|
- retrieve emails
|
||||||
|
- enhanced command support for: CAPA, DELE, TOP, STAT, UIDL and NOOP via
|
||||||
|
custom requests
|
||||||
|
- via http-proxy
|
||||||
|
|
||||||
|
## POP3S (1)
|
||||||
|
|
||||||
|
- implicit `pop3s://` support
|
||||||
|
- explicit `STLS` usage to "upgrade" plain `pop3://` connections to use SSL
|
||||||
|
- via http-proxy
|
||||||
|
|
||||||
|
## IMAP
|
||||||
|
|
||||||
|
- authentication: Clear Text and SASL
|
||||||
|
- SASL based authentication: Plain, Login, CRAM-MD5, Digest-MD5, NTLM (9),
|
||||||
|
Kerberos 5 (4) and External.
|
||||||
|
- list the folders of a mailbox
|
||||||
|
- select a mailbox with support for verifying the `UIDVALIDITY`
|
||||||
|
- fetch emails with support for specifying the UID and SECTION
|
||||||
|
- upload emails via the append command
|
||||||
|
- enhanced command support for: EXAMINE, CREATE, DELETE, RENAME, STATUS,
|
||||||
|
STORE, COPY and UID via custom requests
|
||||||
|
- via http-proxy
|
||||||
|
|
||||||
|
## IMAPS (1)
|
||||||
|
|
||||||
|
- implicit `imaps://` support
|
||||||
|
- explicit "STARTTLS" usage to "upgrade" plain `imap://` connections to use SSL
|
||||||
|
- via http-proxy
|
||||||
|
|
||||||
|
## MQTT
|
||||||
|
|
||||||
|
- Subscribe to and publish topics using URL scheme `mqtt://broker/topic`
|
||||||
|
|
||||||
|
## Footnotes
|
||||||
|
|
||||||
|
1. requires a TLS library
|
||||||
|
2. requires OpenLDAP or WinLDAP
|
||||||
|
3. requires a GSS-API implementation (such as Heimdal or MIT Kerberos) or
|
||||||
|
SSPI (native Windows)
|
||||||
|
4. requires a GSS-API implementation, however, only Windows SSPI is
|
||||||
|
currently supported
|
||||||
|
5. requires nghttp2
|
||||||
|
6. requires c-ares
|
||||||
|
7. requires libssh2, libssh or wolfSSH
|
||||||
|
8. requires libssh2 or libssh
|
||||||
|
9. requires OpenSSL, GnuTLS, mbedTLS, Secure Transport or SSPI
|
||||||
|
(native Windows)
|
||||||
|
10. requires libidn2 or Windows
|
||||||
|
11. requires libz, brotli and/or zstd
|
||||||
|
12. requires a GSS-API implementation (such as Heimdal or MIT Kerberos)
|
202
src/dependencies/curl-8.8.0/docs/GOVERNANCE.md
Normal file
202
src/dependencies/curl-8.8.0/docs/GOVERNANCE.md
Normal file
|
@ -0,0 +1,202 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
# Decision making in the curl project
|
||||||
|
|
||||||
|
A rough guide to how we make decisions and who does what.
|
||||||
|
|
||||||
|
## BDFL
|
||||||
|
|
||||||
|
This project was started by and has to some extent been pushed forward over
|
||||||
|
the years with Daniel Stenberg as the driving force. It matches a standard
|
||||||
|
BDFL (Benevolent Dictator For Life) style project.
|
||||||
|
|
||||||
|
This setup has been used due to convenience and the fact that it has worked
|
||||||
|
fine this far. It is not because someone thinks of it as a superior project
|
||||||
|
leadership model. It also only works as long as Daniel manages to listen in to
|
||||||
|
what the project and the general user population wants and expects from us.
|
||||||
|
|
||||||
|
## Legal entity
|
||||||
|
|
||||||
|
There is no legal entity. The curl project is just a bunch of people scattered
|
||||||
|
around the globe with the common goal to produce source code that creates
|
||||||
|
great products. We are not part of any umbrella organization and we are not
|
||||||
|
located in any specific country. We are totally independent.
|
||||||
|
|
||||||
|
The copyrights in the project are owned by the individuals and organizations
|
||||||
|
that wrote those parts of the code.
|
||||||
|
|
||||||
|
## Decisions
|
||||||
|
|
||||||
|
The curl project is not a democracy, but everyone is entitled to state their
|
||||||
|
opinion and may argue for their sake within the community.
|
||||||
|
|
||||||
|
All and any changes that have been done or are done are eligible to bring up
|
||||||
|
for discussion, to object to or to praise. Ideally, we find consensus for the
|
||||||
|
appropriate way forward in any given situation or challenge.
|
||||||
|
|
||||||
|
If there is no obvious consensus, a maintainer who's knowledgeable in the
|
||||||
|
specific area takes an "executive" decision that they think is the right for
|
||||||
|
the project.
|
||||||
|
|
||||||
|
## Donations
|
||||||
|
|
||||||
|
Donating plain money to curl is best done to curl's [Open Collective
|
||||||
|
fund](https://opencollective.com/curl). Open Collective is a US based
|
||||||
|
non-profit organization that holds on to funds for us. This fund is then used
|
||||||
|
for paying the curl security bug bounties, to reimburse project related
|
||||||
|
expenses etc.
|
||||||
|
|
||||||
|
Donations to the project can also come in the form of server hosting, providing
|
||||||
|
services and paying for people to work on curl related code etc. Usually, such
|
||||||
|
donations are services paid for directly by the sponsors.
|
||||||
|
|
||||||
|
We grade sponsors in a few different levels and if they meet the criteria,
|
||||||
|
they can be mentioned on the Sponsors page on the curl website.
|
||||||
|
|
||||||
|
## Commercial Support
|
||||||
|
|
||||||
|
The curl project does not do or offer commercial support. It only hosts
|
||||||
|
mailing lists, runs bug trackers etc to facilitate communication and work.
|
||||||
|
|
||||||
|
However, Daniel works for wolfSSL and we offer commercial curl support there.
|
||||||
|
|
||||||
|
# Key roles
|
||||||
|
|
||||||
|
## User
|
||||||
|
|
||||||
|
Someone who uses or has used curl or libcurl.
|
||||||
|
|
||||||
|
## Contributor
|
||||||
|
|
||||||
|
Someone who has helped the curl project, who has contributed to bring it
|
||||||
|
forward. Contributing could be to provide advice, debug a problem, file a bug
|
||||||
|
report, run test infrastructure or writing code etc.
|
||||||
|
|
||||||
|
## Commit author
|
||||||
|
|
||||||
|
Sometimes also called 'committer'. Someone who has authored a commit in the
|
||||||
|
curl source code repository. Committers are recorded as `Author` in git.
|
||||||
|
|
||||||
|
## Maintainers
|
||||||
|
|
||||||
|
A maintainer in the curl project is an individual who has been given
|
||||||
|
permissions to push commits to one of the git repositories.
|
||||||
|
|
||||||
|
Maintainers are free to push commits to the repositories at they see fit.
|
||||||
|
Maintainers are however expected to listen to feedback from users and any
|
||||||
|
change that is non-trivial in size or nature *should* be brought to the
|
||||||
|
project as a Pull-Request (PR) to allow others to comment/object before merge.
|
||||||
|
|
||||||
|
## Former maintainers
|
||||||
|
|
||||||
|
A maintainer who stops being active in the project gets their push permissions
|
||||||
|
removed at some point. We do this for security reasons but also to make sure
|
||||||
|
that we always have the list of maintainers as "the team that push stuff to
|
||||||
|
curl".
|
||||||
|
|
||||||
|
Getting push permissions removed is not a punishment. Everyone who ever worked
|
||||||
|
on maintaining curl is considered a hero, for all time hereafter.
|
||||||
|
|
||||||
|
## Security team members
|
||||||
|
|
||||||
|
We have a security team. That is the team of people who are subscribed to the
|
||||||
|
curl-security mailing list; the receivers of security reports from users and
|
||||||
|
developers. This list of people varies over time but they are all skilled
|
||||||
|
developers familiar with the curl project.
|
||||||
|
|
||||||
|
The security team works best when it consists of a small set of active
|
||||||
|
persons. We invite new members when the team seems to need it, and we also
|
||||||
|
expect to retire security team members as they "drift off" from the project or
|
||||||
|
just find themselves unable to perform their duties there.
|
||||||
|
|
||||||
|
## Core team
|
||||||
|
|
||||||
|
There is a curl core team. It currently has the same set of members as the
|
||||||
|
security team. It can also be reached on the security email address.
|
||||||
|
|
||||||
|
The core team nominates and invites new members to the team when it sees fit.
|
||||||
|
There is no open member voting or formal ways to be a candidate. Active
|
||||||
|
participants in the curl project who want to join the core team can ask to
|
||||||
|
join.
|
||||||
|
|
||||||
|
The core team is a board of advisors. It deals with project management
|
||||||
|
subjects that need confidentiality or for other reasons cannot be dealt with
|
||||||
|
and discussed in the open (for example reports of code of conduct violations).
|
||||||
|
Project matters should always as far as possible be discussed on open mailing
|
||||||
|
lists.
|
||||||
|
|
||||||
|
## Server admins
|
||||||
|
|
||||||
|
We run a web server, a mailing list and more on the curl project's primary
|
||||||
|
server. That physical machine is owned and run by Haxx. Daniel is the primary
|
||||||
|
admin of all things curl related server stuff, but Björn Stenberg and Linus
|
||||||
|
Feltzing serve as backup admins for when Daniel is gone or unable.
|
||||||
|
|
||||||
|
The primary server is paid for by Haxx. The machine is physically located in a
|
||||||
|
server bunker in Stockholm Sweden, operated by the company Glesys.
|
||||||
|
|
||||||
|
The website contents are served to the web via Fastly and Daniel is the
|
||||||
|
primary curl contact with Fastly.
|
||||||
|
|
||||||
|
## BDFL
|
||||||
|
|
||||||
|
That is Daniel.
|
||||||
|
|
||||||
|
# Maintainers
|
||||||
|
|
||||||
|
A curl maintainer is a project volunteer who has the authority and rights to
|
||||||
|
merge changes into a git repository in the curl project.
|
||||||
|
|
||||||
|
Anyone can aspire to become a curl maintainer.
|
||||||
|
|
||||||
|
### Duties
|
||||||
|
|
||||||
|
There are no mandatory duties. We hope and wish that maintainers consider
|
||||||
|
reviewing patches and help merging them, especially when the changes are
|
||||||
|
within the area of personal expertise and experience.
|
||||||
|
|
||||||
|
### Requirements
|
||||||
|
|
||||||
|
- only merge code that meets our quality and style guide requirements.
|
||||||
|
- *never* merge code without doing a PR first, unless the change is "trivial"
|
||||||
|
- if in doubt, ask for input/feedback from others
|
||||||
|
|
||||||
|
### Recommendations
|
||||||
|
|
||||||
|
- we require two-factor authentication enabled on your GitHub account to
|
||||||
|
reduce risk of malicious source code tampering
|
||||||
|
- consider enabling signed git commits for additional verification of changes
|
||||||
|
|
||||||
|
### Merge advice
|
||||||
|
|
||||||
|
When you are merging patches/pull requests...
|
||||||
|
|
||||||
|
- make sure the commit messages follow our template
|
||||||
|
- squash patch sets into a few logical commits even if the PR did not, if
|
||||||
|
necessary
|
||||||
|
- avoid the "merge" button on GitHub, do it "manually" instead to get full
|
||||||
|
control and full audit trail (GitHub leaves out you as "Committer:")
|
||||||
|
- remember to credit the reporter and the helpers.
|
||||||
|
|
||||||
|
## Who are maintainers?
|
||||||
|
|
||||||
|
The [list of maintainers](https://github.com/orgs/curl/people). Be aware that
|
||||||
|
the level of presence and activity in the project vary greatly between
|
||||||
|
different individuals and over time.
|
||||||
|
|
||||||
|
### Become a maintainer?
|
||||||
|
|
||||||
|
If you think you can help making the project better by shouldering some
|
||||||
|
maintaining responsibilities, then please get in touch.
|
||||||
|
|
||||||
|
You are expected to be familiar with the curl project and its ways of working.
|
||||||
|
You need to have gotten a few quality patches merged as a proof of this.
|
||||||
|
|
||||||
|
### Stop being a maintainer
|
||||||
|
|
||||||
|
If you (appear to) not be active in the project anymore, you may be removed as
|
||||||
|
a maintainer. Thank you for your service.
|
94
src/dependencies/curl-8.8.0/docs/HELP-US.md
Normal file
94
src/dependencies/curl-8.8.0/docs/HELP-US.md
Normal file
|
@ -0,0 +1,94 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
# How to get started helping out in the curl project
|
||||||
|
|
||||||
|
We are always in need of more help. If you are new to the project and are
|
||||||
|
looking for ways to contribute and help out, this document aims to give a few
|
||||||
|
good starting points.
|
||||||
|
|
||||||
|
You may subscribe to the [curl-library mailing
|
||||||
|
list](https://lists.haxx.se/listinfo/curl-library) to keep track of the
|
||||||
|
current discussion topics; or if you are registered on GitHub, you can use the
|
||||||
|
[Discussions section](https://github.com/curl/curl/discussions) on the main
|
||||||
|
curl repository.
|
||||||
|
|
||||||
|
## Scratch your own itch
|
||||||
|
|
||||||
|
One of the best ways is to start working on any problems or issues you have
|
||||||
|
found yourself or perhaps got annoyed at in the past. It can be a spelling
|
||||||
|
error in an error text or a weirdly phrased section in a man page. Hunt it
|
||||||
|
down and report the bug. Or make your first pull request with a fix for that.
|
||||||
|
|
||||||
|
## Smaller tasks
|
||||||
|
|
||||||
|
Some projects mark small issues as "beginner friendly", "bite-sized" or
|
||||||
|
similar. We do not do that in curl since such issues never linger around long
|
||||||
|
enough. Simple issues get handled fast.
|
||||||
|
|
||||||
|
If you are looking for a smaller or simpler task in the project to help out
|
||||||
|
with as an entry-point into the project, perhaps because you are a newcomer or
|
||||||
|
even maybe not a terribly experienced developer, here's our advice:
|
||||||
|
|
||||||
|
- Read through this document to get a grasp on a general approach to use
|
||||||
|
- Consider adding a test case for something not currently tested (correctly)
|
||||||
|
- Consider updating or adding documentation
|
||||||
|
- One way to get started gently in the project, is to participate in an
|
||||||
|
existing issue/PR and help out by reproducing the issue, review the code in
|
||||||
|
the PR etc.
|
||||||
|
|
||||||
|
## Help wanted
|
||||||
|
|
||||||
|
In the issue tracker we occasionally mark bugs with [help
|
||||||
|
wanted](https://github.com/curl/curl/labels/help%20wanted), as a sign that the
|
||||||
|
bug is acknowledged to exist and that there is nobody known to work on this
|
||||||
|
issue for the moment. Those are bugs that are fine to "grab" and provide a
|
||||||
|
pull request for. The complexity level of these of course varies, so pick one
|
||||||
|
that piques your interest.
|
||||||
|
|
||||||
|
## Work on known bugs
|
||||||
|
|
||||||
|
Some bugs are known and have not yet received attention and work enough to get
|
||||||
|
fixed. We collect such known existing flaws in the
|
||||||
|
[KNOWN_BUGS](https://curl.se/docs/knownbugs.html) page. Many of them link
|
||||||
|
to the original bug report with some additional details, but some may also
|
||||||
|
have aged a bit and may require some verification that the bug still exists in
|
||||||
|
the same way and that what was said about it in the past is still valid.
|
||||||
|
|
||||||
|
## Fix autobuild problems
|
||||||
|
|
||||||
|
On the [autobuilds page](https://curl.se/dev/builds.html) we show a
|
||||||
|
collection of test results from the automatic curl build and tests that are
|
||||||
|
performed by volunteers. Fixing compiler warnings and errors shown there is
|
||||||
|
something we value greatly. Also, if you own or run systems or architectures
|
||||||
|
that are not already tested in the autobuilds, we also appreciate more
|
||||||
|
volunteers running builds automatically to help us keep curl portable.
|
||||||
|
|
||||||
|
## TODO items
|
||||||
|
|
||||||
|
Ideas for features and functions that we have considered worthwhile to
|
||||||
|
implement and provide are kept in the
|
||||||
|
[TODO](https://curl.se/docs/todo.html) file. Some of the ideas are
|
||||||
|
rough. Some are well thought out. Some probably are not really suitable
|
||||||
|
anymore.
|
||||||
|
|
||||||
|
Before you invest a lot of time on a TODO item, do bring it up for discussion
|
||||||
|
on the mailing list. For discussion on applicability but also for ideas and
|
||||||
|
brainstorming on specific ways to do the implementation etc.
|
||||||
|
|
||||||
|
## You decide
|
||||||
|
|
||||||
|
You can also come up with a completely new thing you think we should do. Or
|
||||||
|
not do. Or fix. Or add to the project. You then either bring it to the mailing
|
||||||
|
list first to see if people shoot down the idea at once, or you bring a first
|
||||||
|
draft of the idea as a pull request and take the discussion there around the
|
||||||
|
specific implementation. Either way is fine.
|
||||||
|
|
||||||
|
## CONTRIBUTE
|
||||||
|
|
||||||
|
We offer [guidelines](https://curl.se/dev/contribute.html) that are suitable
|
||||||
|
to be familiar with before you decide to contribute to curl. If you are used
|
||||||
|
to open source development, you probably do not find many surprises there.
|
443
src/dependencies/curl-8.8.0/docs/HISTORY.md
Normal file
443
src/dependencies/curl-8.8.0/docs/HISTORY.md
Normal file
|
@ -0,0 +1,443 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
How curl Became Like This
|
||||||
|
=========================
|
||||||
|
|
||||||
|
Towards the end of 1996, Daniel Stenberg was spending time writing an IRC bot
|
||||||
|
for an Amiga related channel on EFnet. He then came up with the idea to make
|
||||||
|
currency-exchange calculations available to Internet Relay Chat (IRC)
|
||||||
|
users. All the necessary data were published on the Web; he just needed to
|
||||||
|
automate their retrieval.
|
||||||
|
|
||||||
|
1996
|
||||||
|
----
|
||||||
|
|
||||||
|
On November 11, 1996 the Brazilian developer Rafael Sagula wrote and released
|
||||||
|
HttpGet version 0.1.
|
||||||
|
|
||||||
|
Daniel extended this existing command-line open-source tool. After a few minor
|
||||||
|
adjustments, it did just what he needed. The first release with Daniel's
|
||||||
|
additions was 0.2, released on December 17, 1996. Daniel quickly became the
|
||||||
|
new maintainer of the project.
|
||||||
|
|
||||||
|
1997
|
||||||
|
----
|
||||||
|
|
||||||
|
HttpGet 0.3 was released in January 1997 and now it accepted HTTP URLs on the
|
||||||
|
command line.
|
||||||
|
|
||||||
|
HttpGet 1.0 was released on April 8 1997 with brand new HTTP proxy support.
|
||||||
|
|
||||||
|
We soon found and fixed support for getting currencies over GOPHER. Once FTP
|
||||||
|
download support was added, the name of the project was changed and urlget 2.0
|
||||||
|
was released in August 1997. The http-only days were already passed.
|
||||||
|
|
||||||
|
Version 2.2 was released on August 14 1997 and introduced support to build for
|
||||||
|
and run on Windows and Solaris.
|
||||||
|
|
||||||
|
November 24 1997: Version 3.1 added FTP upload support.
|
||||||
|
|
||||||
|
Version 3.5 added support for HTTP POST.
|
||||||
|
|
||||||
|
1998
|
||||||
|
----
|
||||||
|
|
||||||
|
February 4: urlget 3.10
|
||||||
|
|
||||||
|
February 9: urlget 3.11
|
||||||
|
|
||||||
|
March 14: urlget 3.12 added proxy authentication.
|
||||||
|
|
||||||
|
The project slowly grew bigger. With upload capabilities, the name was once
|
||||||
|
again misleading and a second name change was made. On March 20, 1998 curl 4
|
||||||
|
was released. (The version numbering from the previous names was kept.)
|
||||||
|
|
||||||
|
(Unrelated to this project a company called Curl Corporation registered a US
|
||||||
|
trademark on the name "CURL" on May 18 1998. That company had then already
|
||||||
|
registered the curl.com domain back in November of the previous year. All this
|
||||||
|
was revealed to us much later.)
|
||||||
|
|
||||||
|
SSL support was added, powered by the SSLeay library.
|
||||||
|
|
||||||
|
August: first announcement of curl on freshmeat.net.
|
||||||
|
|
||||||
|
October: with the curl 4.9 release and the introduction of cookie support,
|
||||||
|
curl was no longer released under the GPL license. Now we are at 4000 lines of
|
||||||
|
code, we switched over to the MPL license to restrict the effects of
|
||||||
|
"copyleft".
|
||||||
|
|
||||||
|
November: configure script and reported successful compiles on several
|
||||||
|
major operating systems. The never-quite-understood -F option was added and
|
||||||
|
curl could now simulate quite a lot of a browser. TELNET support was added.
|
||||||
|
|
||||||
|
Curl 5 was released in December 1998 and introduced the first ever curl man
|
||||||
|
page. People started making Linux RPM packages out of it.
|
||||||
|
|
||||||
|
1999
|
||||||
|
----
|
||||||
|
|
||||||
|
January: DICT support added.
|
||||||
|
|
||||||
|
OpenSSL took over and SSLeay was abandoned.
|
||||||
|
|
||||||
|
May: first Debian package.
|
||||||
|
|
||||||
|
August: LDAP:// and FILE:// support added. The curl website gets 1300 visits
|
||||||
|
weekly. Moved site to curl.haxx.nu.
|
||||||
|
|
||||||
|
September: Released curl 6.0. 15000 lines of code.
|
||||||
|
|
||||||
|
December 28: added the project on Sourceforge and started using its services
|
||||||
|
for managing the project.
|
||||||
|
|
||||||
|
2000
|
||||||
|
----
|
||||||
|
|
||||||
|
Spring: major internal overhaul to provide a suitable library interface.
|
||||||
|
The first non-beta release was named 7.1 and arrived in August. This offered
|
||||||
|
the easy interface and turned out to be the beginning of actually getting
|
||||||
|
other software and programs to be based on and powered by libcurl. Almost
|
||||||
|
20000 lines of code.
|
||||||
|
|
||||||
|
June: the curl site moves to "curl.haxx.se"
|
||||||
|
|
||||||
|
August, the curl website gets 4000 visits weekly.
|
||||||
|
|
||||||
|
The PHP guys adopted libcurl already the same month, when the first ever third
|
||||||
|
party libcurl binding showed up. CURL has been a supported module in PHP since
|
||||||
|
the release of PHP 4.0.2. This would soon get followers. More than 16
|
||||||
|
different bindings exist at the time of this writing.
|
||||||
|
|
||||||
|
September: kerberos4 support was added.
|
||||||
|
|
||||||
|
November: started the work on a test suite for curl. It was later re-written
|
||||||
|
from scratch again. The libcurl major SONAME number was set to 1.
|
||||||
|
|
||||||
|
2001
|
||||||
|
----
|
||||||
|
|
||||||
|
January: Daniel released curl 7.5.2 under a new license again: MIT (or
|
||||||
|
MPL). The MIT license is extremely liberal and can be combined with GPL
|
||||||
|
in other projects. This would finally put an end to the "complaints" from
|
||||||
|
people involved in GPLed projects that previously were prohibited from using
|
||||||
|
libcurl while it was released under MPL only. (Due to the fact that MPL is
|
||||||
|
deemed "GPL incompatible".)
|
||||||
|
|
||||||
|
March 22: curl supports HTTP 1.1 starting with the release of 7.7. This
|
||||||
|
also introduced libcurl's ability to do persistent connections. 24000 lines of
|
||||||
|
code. The libcurl major SONAME number was bumped to 2 due to this overhaul.
|
||||||
|
The first experimental ftps:// support was added.
|
||||||
|
|
||||||
|
August: The curl website gets 8000 visits weekly. Curl Corporation contacted
|
||||||
|
Daniel to discuss "the name issue". After Daniel's reply, they have never
|
||||||
|
since got back in touch again.
|
||||||
|
|
||||||
|
September: libcurl 7.9 introduces cookie jar and `curl_formadd()`. During the
|
||||||
|
forthcoming 7.9.x releases, we introduced the multi interface slowly and
|
||||||
|
without many whistles.
|
||||||
|
|
||||||
|
September 25: curl (7.7.2) is bundled in Mac OS X (10.1) for the first time. It was
|
||||||
|
already becoming more and more of a standard utility of Linux distributions
|
||||||
|
and a regular in the BSD ports collections.
|
||||||
|
|
||||||
|
2002
|
||||||
|
----
|
||||||
|
|
||||||
|
June: the curl website gets 13000 visits weekly. curl and libcurl is
|
||||||
|
35000 lines of code. Reported successful compiles on more than 40 combinations
|
||||||
|
of CPUs and operating systems.
|
||||||
|
|
||||||
|
To estimate the number of users of the curl tool or libcurl library is next to
|
||||||
|
impossible. Around 5000 downloaded packages each week from the main site gives
|
||||||
|
a hint, but the packages are mirrored extensively, bundled with numerous OS
|
||||||
|
distributions and otherwise retrieved as part of other software.
|
||||||
|
|
||||||
|
October 1: with the release of curl 7.10 it is released under the MIT license
|
||||||
|
only.
|
||||||
|
|
||||||
|
Starting with 7.10, curl verifies SSL server certificates by default.
|
||||||
|
|
||||||
|
2003
|
||||||
|
----
|
||||||
|
|
||||||
|
January: Started working on the distributed curl tests. The autobuilds.
|
||||||
|
|
||||||
|
February: the curl site averages at 20000 visits weekly. At any given moment,
|
||||||
|
there is an average of 3 people browsing the website.
|
||||||
|
|
||||||
|
Multiple new authentication schemes are supported: Digest (May), NTLM (June)
|
||||||
|
and Negotiate (June).
|
||||||
|
|
||||||
|
November: curl 7.10.8 is released. 45000 lines of code. ~55000 unique visitors
|
||||||
|
to the website. Five official web mirrors.
|
||||||
|
|
||||||
|
December: full-fledged SSL for FTP is supported.
|
||||||
|
|
||||||
|
2004
|
||||||
|
----
|
||||||
|
|
||||||
|
January: curl 7.11.0 introduced large file support.
|
||||||
|
|
||||||
|
June: curl 7.12.0 introduced IDN support. 10 official web mirrors.
|
||||||
|
|
||||||
|
This release bumped the major SONAME to 3 due to the removal of the
|
||||||
|
`curl_formparse()` function
|
||||||
|
|
||||||
|
August: Curl and libcurl 7.12.1
|
||||||
|
|
||||||
|
Public curl release number: 82
|
||||||
|
Releases counted from the beginning: 109
|
||||||
|
Available command line options: 96
|
||||||
|
Available curl_easy_setopt() options: 120
|
||||||
|
Number of public functions in libcurl: 36
|
||||||
|
Amount of public website mirrors: 12
|
||||||
|
Number of known libcurl bindings: 26
|
||||||
|
|
||||||
|
2005
|
||||||
|
----
|
||||||
|
|
||||||
|
April: GnuTLS can now optionally be used for the secure layer when curl is
|
||||||
|
built.
|
||||||
|
|
||||||
|
April: Added the multi_socket() API
|
||||||
|
|
||||||
|
September: TFTP support was added.
|
||||||
|
|
||||||
|
More than 100,000 unique visitors of the curl website. 25 mirrors.
|
||||||
|
|
||||||
|
December: security vulnerability: libcurl URL Buffer Overflow
|
||||||
|
|
||||||
|
2006
|
||||||
|
----
|
||||||
|
|
||||||
|
January: We dropped support for Gopher. We found bugs in the implementation
|
||||||
|
that turned out to have been introduced years ago, so with the conclusion that
|
||||||
|
nobody had found out in all this time we removed it instead of fixing it.
|
||||||
|
|
||||||
|
March: security vulnerability: libcurl TFTP Packet Buffer Overflow
|
||||||
|
|
||||||
|
September: The major SONAME number for libcurl was bumped to 4 due to the
|
||||||
|
removal of ftp third party transfer support.
|
||||||
|
|
||||||
|
November: Added SCP and SFTP support
|
||||||
|
|
||||||
|
2007
|
||||||
|
----
|
||||||
|
|
||||||
|
February: Added support for the Mozilla NSS library to do the SSL/TLS stuff
|
||||||
|
|
||||||
|
July: security vulnerability: libcurl GnuTLS insufficient cert verification
|
||||||
|
|
||||||
|
2008
|
||||||
|
----
|
||||||
|
|
||||||
|
November:
|
||||||
|
|
||||||
|
Command line options: 128
|
||||||
|
curl_easy_setopt() options: 158
|
||||||
|
Public functions in libcurl: 58
|
||||||
|
Known libcurl bindings: 37
|
||||||
|
Contributors: 683
|
||||||
|
|
||||||
|
145,000 unique visitors. >100 GB downloaded.
|
||||||
|
|
||||||
|
2009
|
||||||
|
----
|
||||||
|
|
||||||
|
March: security vulnerability: libcurl Arbitrary File Access
|
||||||
|
|
||||||
|
April: added CMake support
|
||||||
|
|
||||||
|
August: security vulnerability: libcurl embedded zero in cert name
|
||||||
|
|
||||||
|
December: Added support for IMAP, POP3 and SMTP
|
||||||
|
|
||||||
|
2010
|
||||||
|
----
|
||||||
|
|
||||||
|
January: Added support for RTSP
|
||||||
|
|
||||||
|
February: security vulnerability: libcurl data callback excessive length
|
||||||
|
|
||||||
|
March: The project switched over to use git (hosted by GitHub) instead of CVS
|
||||||
|
for source code control
|
||||||
|
|
||||||
|
May: Added support for RTMP
|
||||||
|
|
||||||
|
Added support for PolarSSL to do the SSL/TLS stuff
|
||||||
|
|
||||||
|
August:
|
||||||
|
|
||||||
|
Public curl releases: 117
|
||||||
|
Command line options: 138
|
||||||
|
curl_easy_setopt() options: 180
|
||||||
|
Public functions in libcurl: 58
|
||||||
|
Known libcurl bindings: 39
|
||||||
|
Contributors: 808
|
||||||
|
|
||||||
|
Gopher support added (re-added actually, see January 2006)
|
||||||
|
|
||||||
|
2011
|
||||||
|
----
|
||||||
|
|
||||||
|
February: added support for the axTLS backend
|
||||||
|
|
||||||
|
April: added the cyassl backend (later renamed to WolfSSL)
|
||||||
|
|
||||||
|
2012
|
||||||
|
----
|
||||||
|
|
||||||
|
July: Added support for Schannel (native Windows TLS backend) and Darwin SSL
|
||||||
|
(Native Mac OS X and iOS TLS backend).
|
||||||
|
|
||||||
|
Supports Metalink
|
||||||
|
|
||||||
|
October: SSH-agent support.
|
||||||
|
|
||||||
|
2013
|
||||||
|
----
|
||||||
|
|
||||||
|
February: Cleaned up internals to always uses the "multi" non-blocking
|
||||||
|
approach internally and only expose the blocking API with a wrapper.
|
||||||
|
|
||||||
|
September: First small steps on supporting HTTP/2 with nghttp2.
|
||||||
|
|
||||||
|
October: Removed krb4 support.
|
||||||
|
|
||||||
|
December: Happy eyeballs.
|
||||||
|
|
||||||
|
2014
|
||||||
|
----
|
||||||
|
|
||||||
|
March: first real release supporting HTTP/2
|
||||||
|
|
||||||
|
September: Website had 245,000 unique visitors and served 236GB data
|
||||||
|
|
||||||
|
SMB and SMBS support
|
||||||
|
|
||||||
|
2015
|
||||||
|
----
|
||||||
|
|
||||||
|
June: support for multiplexing with HTTP/2
|
||||||
|
|
||||||
|
August: support for HTTP/2 server push
|
||||||
|
|
||||||
|
December: Public Suffix List
|
||||||
|
|
||||||
|
2016
|
||||||
|
----
|
||||||
|
|
||||||
|
January: the curl tool defaults to HTTP/2 for HTTPS URLs
|
||||||
|
|
||||||
|
December: curl 7.52.0 introduced support for HTTPS-proxy
|
||||||
|
|
||||||
|
First TLS 1.3 support
|
||||||
|
|
||||||
|
2017
|
||||||
|
----
|
||||||
|
|
||||||
|
July: OSS-Fuzz started fuzzing libcurl
|
||||||
|
|
||||||
|
September: Added Multi-SSL support
|
||||||
|
|
||||||
|
The website serves 3100 GB/month
|
||||||
|
|
||||||
|
Public curl releases: 169
|
||||||
|
Command line options: 211
|
||||||
|
curl_easy_setopt() options: 249
|
||||||
|
Public functions in libcurl: 74
|
||||||
|
Contributors: 1609
|
||||||
|
|
||||||
|
October: SSLKEYLOGFILE support, new MIME API
|
||||||
|
|
||||||
|
October: Daniel received the Polhem Prize for his work on curl
|
||||||
|
|
||||||
|
November: brotli
|
||||||
|
|
||||||
|
2018
|
||||||
|
----
|
||||||
|
|
||||||
|
January: new SSH backend powered by libssh
|
||||||
|
|
||||||
|
March: starting with the 1803 release of Windows 10, curl is shipped bundled
|
||||||
|
with Microsoft's operating system.
|
||||||
|
|
||||||
|
July: curl shows headers using bold type face
|
||||||
|
|
||||||
|
October: added DNS-over-HTTPS (DoH) and the URL API
|
||||||
|
|
||||||
|
MesaLink is a new supported TLS backend
|
||||||
|
|
||||||
|
libcurl now does HTTP/2 (and multiplexing) by default on HTTPS URLs
|
||||||
|
|
||||||
|
curl and libcurl are installed in an estimated 5 *billion* instances
|
||||||
|
world-wide.
|
||||||
|
|
||||||
|
October 31: Curl and libcurl 7.62.0
|
||||||
|
|
||||||
|
Public curl releases: 177
|
||||||
|
Command line options: 219
|
||||||
|
curl_easy_setopt() options: 261
|
||||||
|
Public functions in libcurl: 80
|
||||||
|
Contributors: 1808
|
||||||
|
|
||||||
|
December: removed axTLS support
|
||||||
|
|
||||||
|
2019
|
||||||
|
----
|
||||||
|
|
||||||
|
March: added experimental alt-svc support
|
||||||
|
|
||||||
|
August: the first HTTP/3 requests with curl.
|
||||||
|
|
||||||
|
September: 7.66.0 is released and the tool offers parallel downloads
|
||||||
|
|
||||||
|
2020
|
||||||
|
----
|
||||||
|
|
||||||
|
curl and libcurl are installed in an estimated 10 *billion* instances
|
||||||
|
world-wide.
|
||||||
|
|
||||||
|
January: added BearSSL support
|
||||||
|
|
||||||
|
March: removed support for PolarSSL, added wolfSSH support
|
||||||
|
|
||||||
|
April: experimental MQTT support
|
||||||
|
|
||||||
|
August: zstd support
|
||||||
|
|
||||||
|
November: the website moves to curl.se. The website serves 10TB data monthly.
|
||||||
|
|
||||||
|
December: alt-svc support
|
||||||
|
|
||||||
|
2021
|
||||||
|
----
|
||||||
|
|
||||||
|
February 3: curl 7.75.0 ships with support for Hyper as an HTTP backend
|
||||||
|
|
||||||
|
March 31: curl 7.76.0 ships with support for rustls
|
||||||
|
|
||||||
|
July: HSTS is supported
|
||||||
|
|
||||||
|
2022
|
||||||
|
----
|
||||||
|
|
||||||
|
March: added --json, removed mesalink support
|
||||||
|
|
||||||
|
Public curl releases: 206
|
||||||
|
Command line options: 245
|
||||||
|
curl_easy_setopt() options: 295
|
||||||
|
Public functions in libcurl: 86
|
||||||
|
Contributors: 2601
|
||||||
|
|
||||||
|
The curl.se website serves 16,500 GB/month over 462M requests, the
|
||||||
|
official docker image has been pulled 4,098,015,431 times.
|
||||||
|
|
||||||
|
2023
|
||||||
|
----
|
||||||
|
|
||||||
|
August: Dropped support for the NSS library
|
48
src/dependencies/curl-8.8.0/docs/HSTS.md
Normal file
48
src/dependencies/curl-8.8.0/docs/HSTS.md
Normal file
|
@ -0,0 +1,48 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
# HSTS support
|
||||||
|
|
||||||
|
HTTP Strict-Transport-Security. Added as experimental in curl
|
||||||
|
7.74.0. Supported "for real" since 7.77.0.
|
||||||
|
|
||||||
|
## Standard
|
||||||
|
|
||||||
|
[HTTP Strict Transport Security](https://datatracker.ietf.org/doc/html/rfc6797)
|
||||||
|
|
||||||
|
## Behavior
|
||||||
|
|
||||||
|
libcurl features an in-memory cache for HSTS hosts, so that subsequent
|
||||||
|
HTTP-only requests to a hostname present in the cache gets internally
|
||||||
|
"redirected" to the HTTPS version.
|
||||||
|
|
||||||
|
## `curl_easy_setopt()` options:
|
||||||
|
|
||||||
|
- `CURLOPT_HSTS_CTRL` - enable HSTS for this easy handle
|
||||||
|
- `CURLOPT_HSTS` - specify filename where to store the HSTS cache on close
|
||||||
|
(and possibly read from at startup)
|
||||||
|
|
||||||
|
## curl command line options
|
||||||
|
|
||||||
|
- `--hsts [filename]` - enable HSTS, use the file as HSTS cache. If filename
|
||||||
|
is `""` (no length) then no file is used, only in-memory cache.
|
||||||
|
|
||||||
|
## HSTS cache file format
|
||||||
|
|
||||||
|
Lines starting with `#` are ignored.
|
||||||
|
|
||||||
|
For each hsts entry:
|
||||||
|
|
||||||
|
[host name] "YYYYMMDD HH:MM:SS"
|
||||||
|
|
||||||
|
The `[host name]` is dot-prefixed if it includes subdomains.
|
||||||
|
|
||||||
|
The time stamp is when the entry expires.
|
||||||
|
|
||||||
|
## Possible future additions
|
||||||
|
|
||||||
|
- `CURLOPT_HSTS_PRELOAD` - provide a set of HSTS hostnames to load first
|
||||||
|
- ability to save to something else than a file
|
171
src/dependencies/curl-8.8.0/docs/HTTP-COOKIES.md
Normal file
171
src/dependencies/curl-8.8.0/docs/HTTP-COOKIES.md
Normal file
|
@ -0,0 +1,171 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
# HTTP Cookies
|
||||||
|
|
||||||
|
## Cookie overview
|
||||||
|
|
||||||
|
Cookies are `name=contents` pairs that an HTTP server tells the client to
|
||||||
|
hold and then the client sends back those to the server on subsequent
|
||||||
|
requests to the same domains and paths for which the cookies were set.
|
||||||
|
|
||||||
|
Cookies are either "session cookies" which typically are forgotten when the
|
||||||
|
session is over which is often translated to equal when browser quits, or
|
||||||
|
the cookies are not session cookies they have expiration dates after which
|
||||||
|
the client throws them away.
|
||||||
|
|
||||||
|
Cookies are set to the client with the Set-Cookie: header and are sent to
|
||||||
|
servers with the Cookie: header.
|
||||||
|
|
||||||
|
For a long time, the only spec explaining how to use cookies was the
|
||||||
|
original [Netscape spec from 1994](https://curl.se/rfc/cookie_spec.html).
|
||||||
|
|
||||||
|
In 2011, [RFC 6265](https://www.ietf.org/rfc/rfc6265.txt) was finally
|
||||||
|
published and details how cookies work within HTTP. In 2016, an update which
|
||||||
|
added support for prefixes was
|
||||||
|
[proposed](https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-cookie-prefixes-00),
|
||||||
|
and in 2017, another update was
|
||||||
|
[drafted](https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-cookie-alone-01)
|
||||||
|
to deprecate modification of 'secure' cookies from non-secure origins. Both
|
||||||
|
of these drafts have been incorporated into a proposal to
|
||||||
|
[replace](https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis-11)
|
||||||
|
RFC 6265. Cookie prefixes and secure cookie modification protection has been
|
||||||
|
implemented by curl.
|
||||||
|
|
||||||
|
curl considers `http://localhost` to be a *secure context*, meaning that it
|
||||||
|
allows and uses cookies marked with the `secure` keyword even when done over
|
||||||
|
plain HTTP for this host. curl does this to match how popular browsers work
|
||||||
|
with secure cookies.
|
||||||
|
|
||||||
|
## Super cookies
|
||||||
|
|
||||||
|
A single cookie can be set for a domain that matches multiple hosts. Like if
|
||||||
|
set for `example.com` it gets sent to both `aa.example.com` as well as
|
||||||
|
`bb.example.com`.
|
||||||
|
|
||||||
|
A challenge with this concept is that there are certain domains for which
|
||||||
|
cookies should not be allowed at all, because they are *Public
|
||||||
|
Suffixes*. Similarly, a client never accepts cookies set directly for the
|
||||||
|
top-level domain like for example `.com`. Cookies set for *too broad*
|
||||||
|
domains are generally referred to as *super cookies*.
|
||||||
|
|
||||||
|
If curl is built with PSL (**Public Suffix List**) support, it detects and
|
||||||
|
discards cookies that are specified for such suffix domains that should not
|
||||||
|
be allowed to have cookies.
|
||||||
|
|
||||||
|
if curl is *not* built with PSL support, it has no ability to stop super
|
||||||
|
cookies.
|
||||||
|
|
||||||
|
## Cookies saved to disk
|
||||||
|
|
||||||
|
Netscape once created a file format for storing cookies on disk so that they
|
||||||
|
would survive browser restarts. curl adopted that file format to allow
|
||||||
|
sharing the cookies with browsers, only to see browsers move away from that
|
||||||
|
format. Modern browsers no longer use it, while curl still does.
|
||||||
|
|
||||||
|
The Netscape cookie file format stores one cookie per physical line in the
|
||||||
|
file with a bunch of associated meta data, each field separated with
|
||||||
|
TAB. That file is called the cookie jar in curl terminology.
|
||||||
|
|
||||||
|
When libcurl saves a cookie jar, it creates a file header of its own in
|
||||||
|
which there is a URL mention that links to the web version of this document.
|
||||||
|
|
||||||
|
## Cookie file format
|
||||||
|
|
||||||
|
The cookie file format is text based and stores one cookie per line. Lines
|
||||||
|
that start with `#` are treated as comments. An exception is lines that
|
||||||
|
start with `#HttpOnly_`, which is a prefix for cookies that have the
|
||||||
|
`HttpOnly` attribute set.
|
||||||
|
|
||||||
|
Each line that specifies a single cookie consists of seven text fields
|
||||||
|
separated with TAB characters. A valid line must end with a newline
|
||||||
|
character.
|
||||||
|
|
||||||
|
### Fields in the file
|
||||||
|
|
||||||
|
Field number, what type and example data and the meaning of it:
|
||||||
|
|
||||||
|
0. string `example.com` - the domain name
|
||||||
|
1. boolean `FALSE` - include subdomains
|
||||||
|
2. string `/foobar/` - path
|
||||||
|
3. boolean `TRUE` - send/receive over HTTPS only
|
||||||
|
4. number `1462299217` - expires at - seconds since Jan 1st 1970, or 0
|
||||||
|
5. string `person` - name of the cookie
|
||||||
|
6. string `daniel` - value of the cookie
|
||||||
|
|
||||||
|
## Cookies with curl the command line tool
|
||||||
|
|
||||||
|
curl has a full cookie "engine" built in. If you just activate it, you can
|
||||||
|
have curl receive and send cookies exactly as mandated in the specs.
|
||||||
|
|
||||||
|
Command line options:
|
||||||
|
|
||||||
|
`-b, --cookie`
|
||||||
|
|
||||||
|
tell curl a file to read cookies from and start the cookie engine, or if it
|
||||||
|
is not a file it passes on the given string. `-b name=var` works and so does
|
||||||
|
`-b cookiefile`.
|
||||||
|
|
||||||
|
`-j, --junk-session-cookies`
|
||||||
|
|
||||||
|
when used in combination with -b, it skips all "session cookies" on load so
|
||||||
|
as to appear to start a new cookie session.
|
||||||
|
|
||||||
|
`-c, --cookie-jar`
|
||||||
|
|
||||||
|
tell curl to start the cookie engine and write cookies to the given file
|
||||||
|
after the request(s)
|
||||||
|
|
||||||
|
## Cookies with libcurl
|
||||||
|
|
||||||
|
libcurl offers several ways to enable and interface the cookie engine. These
|
||||||
|
options are the ones provided by the native API. libcurl bindings may offer
|
||||||
|
access to them using other means.
|
||||||
|
|
||||||
|
`CURLOPT_COOKIE`
|
||||||
|
|
||||||
|
Is used when you want to specify the exact contents of a cookie header to
|
||||||
|
send to the server.
|
||||||
|
|
||||||
|
`CURLOPT_COOKIEFILE`
|
||||||
|
|
||||||
|
Tell libcurl to activate the cookie engine, and to read the initial set of
|
||||||
|
cookies from the given file. Read-only.
|
||||||
|
|
||||||
|
`CURLOPT_COOKIEJAR`
|
||||||
|
|
||||||
|
Tell libcurl to activate the cookie engine, and when the easy handle is
|
||||||
|
closed save all known cookies to the given cookie jar file. Write-only.
|
||||||
|
|
||||||
|
`CURLOPT_COOKIELIST`
|
||||||
|
|
||||||
|
Provide detailed information about a single cookie to add to the internal
|
||||||
|
storage of cookies. Pass in the cookie as an HTTP header with all the
|
||||||
|
details set, or pass in a line from a Netscape cookie file. This option can
|
||||||
|
also be used to flush the cookies etc.
|
||||||
|
|
||||||
|
`CURLOPT_COOKIESESSION`
|
||||||
|
|
||||||
|
Tell libcurl to ignore all cookies it is about to load that are session
|
||||||
|
cookies.
|
||||||
|
|
||||||
|
`CURLINFO_COOKIELIST`
|
||||||
|
|
||||||
|
Extract cookie information from the internal cookie storage as a linked
|
||||||
|
list.
|
||||||
|
|
||||||
|
## Cookies with JavaScript
|
||||||
|
|
||||||
|
These days a lot of the web is built up by JavaScript. The web browser loads
|
||||||
|
complete programs that render the page you see. These JavaScript programs
|
||||||
|
can also set and access cookies.
|
||||||
|
|
||||||
|
Since curl and libcurl are plain HTTP clients without any knowledge of or
|
||||||
|
capability to handle JavaScript, such cookies are not detected or used.
|
||||||
|
|
||||||
|
Often, if you want to mimic what a browser does on such websites, you can
|
||||||
|
record web browser HTTP traffic when using such a site and then repeat the
|
||||||
|
cookie operations using curl or libcurl.
|
108
src/dependencies/curl-8.8.0/docs/HTTP2.md
Normal file
108
src/dependencies/curl-8.8.0/docs/HTTP2.md
Normal file
|
@ -0,0 +1,108 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
HTTP/2 with curl
|
||||||
|
================
|
||||||
|
|
||||||
|
[HTTP/2 Spec](https://www.rfc-editor.org/rfc/rfc7540.txt)
|
||||||
|
[http2 explained](https://daniel.haxx.se/http2/)
|
||||||
|
|
||||||
|
Build prerequisites
|
||||||
|
-------------------
|
||||||
|
- nghttp2
|
||||||
|
- OpenSSL, libressl, BoringSSL, GnuTLS, mbedTLS, wolfSSL or Schannel
|
||||||
|
with a new enough version.
|
||||||
|
|
||||||
|
[nghttp2](https://nghttp2.org/)
|
||||||
|
-------------------------------
|
||||||
|
|
||||||
|
libcurl uses this 3rd party library for the low level protocol handling
|
||||||
|
parts. The reason for this is that HTTP/2 is much more complex at that layer
|
||||||
|
than HTTP/1.1 (which we implement on our own) and that nghttp2 is an already
|
||||||
|
existing and well functional library.
|
||||||
|
|
||||||
|
We require at least version 1.12.0.
|
||||||
|
|
||||||
|
Over an http:// URL
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
If `CURLOPT_HTTP_VERSION` is set to `CURL_HTTP_VERSION_2_0`, libcurl includes
|
||||||
|
an upgrade header in the initial request to the host to allow upgrading to
|
||||||
|
HTTP/2.
|
||||||
|
|
||||||
|
Possibly we can later introduce an option that causes libcurl to fail if it is
|
||||||
|
not possible to upgrade. Possibly we introduce an option that makes libcurl
|
||||||
|
use HTTP/2 at once over http://
|
||||||
|
|
||||||
|
Over an https:// URL
|
||||||
|
--------------------
|
||||||
|
|
||||||
|
If `CURLOPT_HTTP_VERSION` is set to `CURL_HTTP_VERSION_2_0`, libcurl uses ALPN
|
||||||
|
to negotiate which protocol to continue with. Possibly introduce an option
|
||||||
|
that causes libcurl to fail if not possible to use HTTP/2.
|
||||||
|
|
||||||
|
`CURL_HTTP_VERSION_2TLS` was added in 7.47.0 as a way to ask libcurl to prefer
|
||||||
|
HTTP/2 for HTTPS but stick to 1.1 by default for plain old HTTP connections.
|
||||||
|
|
||||||
|
ALPN is the TLS extension that HTTP/2 is expected to use.
|
||||||
|
|
||||||
|
`CURLOPT_SSL_ENABLE_ALPN` is offered to allow applications to explicitly
|
||||||
|
disable ALPN.
|
||||||
|
|
||||||
|
Multiplexing
|
||||||
|
------------
|
||||||
|
|
||||||
|
Starting in 7.43.0, libcurl fully supports HTTP/2 multiplexing, which is the
|
||||||
|
term for doing multiple independent transfers over the same physical TCP
|
||||||
|
connection.
|
||||||
|
|
||||||
|
To take advantage of multiplexing, you need to use the multi interface and set
|
||||||
|
`CURLMOPT_PIPELINING` to `CURLPIPE_MULTIPLEX`. With that bit set, libcurl
|
||||||
|
attempts to reuse existing HTTP/2 connections and just add a new stream over
|
||||||
|
that when doing subsequent parallel requests.
|
||||||
|
|
||||||
|
While libcurl sets up a connection to an HTTP server there is a period during
|
||||||
|
which it does not know if it can pipeline or do multiplexing and if you add
|
||||||
|
new transfers in that period, libcurl defaults to starting new connections for
|
||||||
|
those transfers. With the new option `CURLOPT_PIPEWAIT` (added in 7.43.0), you
|
||||||
|
can ask that a transfer should rather wait and see in case there is a
|
||||||
|
connection for the same host in progress that might end up being possible to
|
||||||
|
multiplex on. It favors keeping the number of connections low to the cost of
|
||||||
|
slightly longer time to first byte transferred.
|
||||||
|
|
||||||
|
Applications
|
||||||
|
------------
|
||||||
|
|
||||||
|
We hide HTTP/2's binary nature and convert received HTTP/2 traffic to headers
|
||||||
|
in HTTP 1.1 style. This allows applications to work unmodified.
|
||||||
|
|
||||||
|
curl tool
|
||||||
|
---------
|
||||||
|
|
||||||
|
curl offers the `--http2` command line option to enable use of HTTP/2.
|
||||||
|
|
||||||
|
curl offers the `--http2-prior-knowledge` command line option to enable use of
|
||||||
|
HTTP/2 without HTTP/1.1 Upgrade.
|
||||||
|
|
||||||
|
Since 7.47.0, the curl tool enables HTTP/2 by default for HTTPS connections.
|
||||||
|
|
||||||
|
curl tool limitations
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
The command line tool does not support HTTP/2 server push. It supports
|
||||||
|
multiplexing when the parallel transfer option is used.
|
||||||
|
|
||||||
|
HTTP Alternative Services
|
||||||
|
-------------------------
|
||||||
|
|
||||||
|
Alt-Svc is an extension with a corresponding frame (ALTSVC) in HTTP/2 that
|
||||||
|
tells the client about an alternative "route" to the same content for the same
|
||||||
|
origin server that you get the response from. A browser or long-living client
|
||||||
|
can use that hint to create a new connection asynchronously. For libcurl, we
|
||||||
|
may introduce a way to bring such clues to the application and/or let a
|
||||||
|
subsequent request use the alternate route automatically.
|
||||||
|
|
||||||
|
[Detailed in RFC 7838](https://datatracker.ietf.org/doc/html/rfc7838)
|
436
src/dependencies/curl-8.8.0/docs/HTTP3.md
Normal file
436
src/dependencies/curl-8.8.0/docs/HTTP3.md
Normal file
|
@ -0,0 +1,436 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
# HTTP3 (and QUIC)
|
||||||
|
|
||||||
|
## Resources
|
||||||
|
|
||||||
|
[HTTP/3 Explained](https://http3-explained.haxx.se/en/) - the online free
|
||||||
|
book describing the protocols involved.
|
||||||
|
|
||||||
|
[quicwg.org](https://quicwg.org/) - home of the official protocol drafts
|
||||||
|
|
||||||
|
## QUIC libraries
|
||||||
|
|
||||||
|
QUIC libraries we are using:
|
||||||
|
|
||||||
|
[ngtcp2](https://github.com/ngtcp2/ngtcp2)
|
||||||
|
|
||||||
|
[quiche](https://github.com/cloudflare/quiche) - **EXPERIMENTAL**
|
||||||
|
|
||||||
|
[OpenSSL 3.2+ QUIC](https://github.com/openssl/openssl) - **EXPERIMENTAL**
|
||||||
|
|
||||||
|
[msh3](https://github.com/nibanks/msh3) (with [msquic](https://github.com/microsoft/msquic)) - **EXPERIMENTAL**
|
||||||
|
|
||||||
|
## Experimental
|
||||||
|
|
||||||
|
HTTP/3 support in curl is considered **EXPERIMENTAL** until further notice
|
||||||
|
when built to use *quiche* or *msh3*. Only the *ngtcp2* backend is not
|
||||||
|
experimental.
|
||||||
|
|
||||||
|
Further development and tweaking of the HTTP/3 support in curl happens in the
|
||||||
|
master branch using pull-requests, just like ordinary changes.
|
||||||
|
|
||||||
|
To fix before we remove the experimental label:
|
||||||
|
|
||||||
|
- the used QUIC library needs to consider itself non-beta
|
||||||
|
- it is fine to "leave" individual backends as experimental if necessary
|
||||||
|
|
||||||
|
# ngtcp2 version
|
||||||
|
|
||||||
|
Building curl with ngtcp2 involves 3 components: `ngtcp2` itself, `nghttp3` and a QUIC supporting TLS library. The supported TLS libraries are covered below.
|
||||||
|
|
||||||
|
* `ngtcp2`: v1.2.0
|
||||||
|
* `nghttp3`: v1.1.0
|
||||||
|
|
||||||
|
## Build with quictls
|
||||||
|
|
||||||
|
OpenSSL does not offer the required APIs for building a QUIC client. You need
|
||||||
|
to use a TLS library that has such APIs and that works with *ngtcp2*.
|
||||||
|
|
||||||
|
Build quictls
|
||||||
|
|
||||||
|
% git clone --depth 1 -b openssl-3.1.4+quic https://github.com/quictls/openssl
|
||||||
|
% cd openssl
|
||||||
|
% ./config enable-tls1_3 --prefix=<somewhere1>
|
||||||
|
% make
|
||||||
|
% make install
|
||||||
|
|
||||||
|
Build nghttp3
|
||||||
|
|
||||||
|
% cd ..
|
||||||
|
% git clone -b v1.1.0 https://github.com/ngtcp2/nghttp3
|
||||||
|
% cd nghttp3
|
||||||
|
% git submodule update --init
|
||||||
|
% autoreconf -fi
|
||||||
|
% ./configure --prefix=<somewhere2> --enable-lib-only
|
||||||
|
% make
|
||||||
|
% make install
|
||||||
|
|
||||||
|
Build ngtcp2
|
||||||
|
|
||||||
|
% cd ..
|
||||||
|
% git clone -b v1.2.0 https://github.com/ngtcp2/ngtcp2
|
||||||
|
% cd ngtcp2
|
||||||
|
% autoreconf -fi
|
||||||
|
% ./configure PKG_CONFIG_PATH=<somewhere1>/lib/pkgconfig:<somewhere2>/lib/pkgconfig LDFLAGS="-Wl,-rpath,<somewhere1>/lib" --prefix=<somewhere3> --enable-lib-only
|
||||||
|
% make
|
||||||
|
% make install
|
||||||
|
|
||||||
|
Build curl
|
||||||
|
|
||||||
|
% cd ..
|
||||||
|
% git clone https://github.com/curl/curl
|
||||||
|
% cd curl
|
||||||
|
% autoreconf -fi
|
||||||
|
% LDFLAGS="-Wl,-rpath,<somewhere1>/lib" ./configure --with-openssl=<somewhere1> --with-nghttp3=<somewhere2> --with-ngtcp2=<somewhere3>
|
||||||
|
% make
|
||||||
|
% make install
|
||||||
|
|
||||||
|
For OpenSSL 3.0.0 or later builds on Linux for x86_64 architecture, substitute all occurrences of "/lib" with "/lib64"
|
||||||
|
|
||||||
|
## Build with GnuTLS
|
||||||
|
|
||||||
|
Build GnuTLS
|
||||||
|
|
||||||
|
% git clone --depth 1 https://gitlab.com/gnutls/gnutls.git
|
||||||
|
% cd gnutls
|
||||||
|
% ./bootstrap
|
||||||
|
% ./configure --prefix=<somewhere1>
|
||||||
|
% make
|
||||||
|
% make install
|
||||||
|
|
||||||
|
Build nghttp3
|
||||||
|
|
||||||
|
% cd ..
|
||||||
|
% git clone -b v1.1.0 https://github.com/ngtcp2/nghttp3
|
||||||
|
% cd nghttp3
|
||||||
|
% git submodule update --init
|
||||||
|
% autoreconf -fi
|
||||||
|
% ./configure --prefix=<somewhere2> --enable-lib-only
|
||||||
|
% make
|
||||||
|
% make install
|
||||||
|
|
||||||
|
Build ngtcp2
|
||||||
|
|
||||||
|
% cd ..
|
||||||
|
% git clone -b v1.2.0 https://github.com/ngtcp2/ngtcp2
|
||||||
|
% cd ngtcp2
|
||||||
|
% autoreconf -fi
|
||||||
|
% ./configure PKG_CONFIG_PATH=<somewhere1>/lib/pkgconfig:<somewhere2>/lib/pkgconfig LDFLAGS="-Wl,-rpath,<somewhere1>/lib" --prefix=<somewhere3> --enable-lib-only --with-gnutls
|
||||||
|
% make
|
||||||
|
% make install
|
||||||
|
|
||||||
|
Build curl
|
||||||
|
|
||||||
|
% cd ..
|
||||||
|
% git clone https://github.com/curl/curl
|
||||||
|
% cd curl
|
||||||
|
% autoreconf -fi
|
||||||
|
% ./configure --with-gnutls=<somewhere1> --with-nghttp3=<somewhere2> --with-ngtcp2=<somewhere3>
|
||||||
|
% make
|
||||||
|
% make install
|
||||||
|
|
||||||
|
## Build with wolfSSL
|
||||||
|
|
||||||
|
Build wolfSSL
|
||||||
|
|
||||||
|
% git clone https://github.com/wolfSSL/wolfssl.git
|
||||||
|
% cd wolfssl
|
||||||
|
% autoreconf -fi
|
||||||
|
% ./configure --prefix=<somewhere1> --enable-quic --enable-session-ticket --enable-earlydata --enable-psk --enable-harden --enable-altcertchains
|
||||||
|
% make
|
||||||
|
% make install
|
||||||
|
|
||||||
|
Build nghttp3
|
||||||
|
|
||||||
|
% cd ..
|
||||||
|
% git clone -b v1.1.0 https://github.com/ngtcp2/nghttp3
|
||||||
|
% cd nghttp3
|
||||||
|
% git submodule update --init
|
||||||
|
% autoreconf -fi
|
||||||
|
% ./configure --prefix=<somewhere2> --enable-lib-only
|
||||||
|
% make
|
||||||
|
% make install
|
||||||
|
|
||||||
|
Build ngtcp2
|
||||||
|
|
||||||
|
% cd ..
|
||||||
|
% git clone -b v1.2.0 https://github.com/ngtcp2/ngtcp2
|
||||||
|
% cd ngtcp2
|
||||||
|
% autoreconf -fi
|
||||||
|
% ./configure PKG_CONFIG_PATH=<somewhere1>/lib/pkgconfig:<somewhere2>/lib/pkgconfig LDFLAGS="-Wl,-rpath,<somewhere1>/lib" --prefix=<somewhere3> --enable-lib-only --with-wolfssl
|
||||||
|
% make
|
||||||
|
% make install
|
||||||
|
|
||||||
|
Build curl
|
||||||
|
|
||||||
|
% cd ..
|
||||||
|
% git clone https://github.com/curl/curl
|
||||||
|
% cd curl
|
||||||
|
% autoreconf -fi
|
||||||
|
% ./configure --with-wolfssl=<somewhere1> --with-nghttp3=<somewhere2> --with-ngtcp2=<somewhere3>
|
||||||
|
% make
|
||||||
|
% make install
|
||||||
|
|
||||||
|
# quiche version
|
||||||
|
|
||||||
|
quiche support is **EXPERIMENTAL**
|
||||||
|
|
||||||
|
Since the quiche build manages its dependencies, curl can be built against the latest version. You are *probably* able to build against their main branch, but in case of problems, we recommend their latest release tag.
|
||||||
|
|
||||||
|
## build
|
||||||
|
|
||||||
|
Build quiche and BoringSSL:
|
||||||
|
|
||||||
|
% git clone --recursive -b 0.20.0 https://github.com/cloudflare/quiche
|
||||||
|
% cd quiche
|
||||||
|
% cargo build --package quiche --release --features ffi,pkg-config-meta,qlog
|
||||||
|
% mkdir quiche/deps/boringssl/src/lib
|
||||||
|
% ln -vnf $(find target/release -name libcrypto.a -o -name libssl.a) quiche/deps/boringssl/src/lib/
|
||||||
|
|
||||||
|
Build curl:
|
||||||
|
|
||||||
|
% cd ..
|
||||||
|
% git clone https://github.com/curl/curl
|
||||||
|
% cd curl
|
||||||
|
% autoreconf -fi
|
||||||
|
% ./configure LDFLAGS="-Wl,-rpath,$PWD/../quiche/target/release" --with-openssl=$PWD/../quiche/quiche/deps/boringssl/src --with-quiche=$PWD/../quiche/target/release
|
||||||
|
% make
|
||||||
|
% make install
|
||||||
|
|
||||||
|
If `make install` results in `Permission denied` error, you need to prepend
|
||||||
|
it with `sudo`.
|
||||||
|
|
||||||
|
# OpenSSL version
|
||||||
|
|
||||||
|
QUIC support is **EXPERIMENTAL**
|
||||||
|
|
||||||
|
Build OpenSSL 3.2.0
|
||||||
|
|
||||||
|
% cd ..
|
||||||
|
% git clone -b openssl-3.2.0 https://github.com/openssl/openssl
|
||||||
|
% cd openssl
|
||||||
|
% ./config enable-tls1_3 --prefix=<somewhere> --libdir=<somewhere>/lib
|
||||||
|
% make
|
||||||
|
% make install
|
||||||
|
|
||||||
|
Build nghttp3
|
||||||
|
|
||||||
|
% cd ..
|
||||||
|
% git clone -b v1.1.0 https://github.com/ngtcp2/nghttp3
|
||||||
|
% cd nghttp3
|
||||||
|
% git submodule update --init
|
||||||
|
% autoreconf -fi
|
||||||
|
% ./configure --prefix=<somewhere2> --enable-lib-only
|
||||||
|
% make
|
||||||
|
% make install
|
||||||
|
|
||||||
|
Build curl:
|
||||||
|
|
||||||
|
% cd ..
|
||||||
|
% git clone https://github.com/curl/curl
|
||||||
|
% cd curl
|
||||||
|
% autoreconf -fi
|
||||||
|
% LDFLAGS="-Wl,-rpath,<somewhere>/lib" ./configure --with-openssl=<somewhere> --with-openssl-quic --with-nghttp3=<somewhere2>
|
||||||
|
% make
|
||||||
|
% make install
|
||||||
|
|
||||||
|
You can build curl with cmake:
|
||||||
|
|
||||||
|
% cd ..
|
||||||
|
% git clone https://github.com/curl/curl
|
||||||
|
% cd curl
|
||||||
|
% cmake . -B build -DCURL_USE_OPENSSL=ON -DUSE_OPENSSL_QUIC=ON
|
||||||
|
% cmake --build build
|
||||||
|
% cmake --install build
|
||||||
|
|
||||||
|
If `make install` results in `Permission denied` error, you need to prepend
|
||||||
|
it with `sudo`.
|
||||||
|
|
||||||
|
# msh3 (msquic) version
|
||||||
|
|
||||||
|
**Note**: The msquic HTTP/3 backend is immature and is not properly functional
|
||||||
|
one as of September 2023. Feel free to help us test it and improve it, but
|
||||||
|
there is no point in filing bugs about it just yet.
|
||||||
|
|
||||||
|
msh3 support is **EXPERIMENTAL**
|
||||||
|
|
||||||
|
## Build Linux (with quictls fork of OpenSSL)
|
||||||
|
|
||||||
|
Build msh3:
|
||||||
|
|
||||||
|
% git clone -b v0.6.0 --depth 1 --recursive https://github.com/nibanks/msh3
|
||||||
|
% cd msh3 && mkdir build && cd build
|
||||||
|
% cmake -G 'Unix Makefiles' -DCMAKE_BUILD_TYPE=RelWithDebInfo ..
|
||||||
|
% cmake --build .
|
||||||
|
% cmake --install .
|
||||||
|
|
||||||
|
Build curl:
|
||||||
|
|
||||||
|
% git clone https://github.com/curl/curl
|
||||||
|
% cd curl
|
||||||
|
% autoreconf -fi
|
||||||
|
% ./configure LDFLAGS="-Wl,-rpath,/usr/local/lib" --with-msh3=/usr/local --with-openssl
|
||||||
|
% make
|
||||||
|
% make install
|
||||||
|
|
||||||
|
Run from `/usr/local/bin/curl`.
|
||||||
|
|
||||||
|
## Build Windows
|
||||||
|
|
||||||
|
Build msh3:
|
||||||
|
|
||||||
|
% git clone -b v0.6.0 --depth 1 --recursive https://github.com/nibanks/msh3
|
||||||
|
% cd msh3 && mkdir build && cd build
|
||||||
|
% cmake -G 'Visual Studio 17 2022' -DCMAKE_BUILD_TYPE=RelWithDebInfo ..
|
||||||
|
% cmake --build . --config Release
|
||||||
|
% cmake --install . --config Release
|
||||||
|
|
||||||
|
**Note** - On Windows, Schannel is used for TLS support by default. If you
|
||||||
|
with to use (the quictls fork of) OpenSSL, specify the `-DQUIC_TLS=openssl`
|
||||||
|
option to the generate command above. Also note that OpenSSL brings with it an
|
||||||
|
additional set of build dependencies not specified here.
|
||||||
|
|
||||||
|
Build curl (in [Visual Studio Command
|
||||||
|
prompt](../winbuild/README.md#open-a-command-prompt)):
|
||||||
|
|
||||||
|
% git clone https://github.com/curl/curl
|
||||||
|
% cd curl/winbuild
|
||||||
|
% nmake /f Makefile.vc mode=dll WITH_MSH3=dll MSH3_PATH="C:/Program Files/msh3" MACHINE=x64
|
||||||
|
|
||||||
|
**Note** - If you encounter a build error with `tool_hugehelp.c` being
|
||||||
|
missing, rename `tool_hugehelp.c.cvs` in the same directory to
|
||||||
|
`tool_hugehelp.c` and then run `nmake` again.
|
||||||
|
|
||||||
|
Run in the `C:/Program Files/msh3/lib` directory, copy `curl.exe` to that
|
||||||
|
directory, or copy `msquic.dll` and `msh3.dll` from that directory to the
|
||||||
|
`curl.exe` directory. For example:
|
||||||
|
|
||||||
|
% C:\Program Files\msh3\lib> F:\curl\builds\libcurl-vc-x64-release-dll-ipv6-sspi-schannel-msh3\bin\curl.exe --http3 https://curl.se/
|
||||||
|
|
||||||
|
# `--http3`
|
||||||
|
|
||||||
|
Use only HTTP/3:
|
||||||
|
|
||||||
|
curl --http3-only https://example.org:4433/
|
||||||
|
|
||||||
|
Use HTTP/3 with fallback to HTTP/2 or HTTP/1.1 (see "HTTPS eyeballing" below):
|
||||||
|
|
||||||
|
curl --http3 https://example.org:4433/
|
||||||
|
|
||||||
|
Upgrade via Alt-Svc:
|
||||||
|
|
||||||
|
curl --alt-svc altsvc.cache https://curl.se/
|
||||||
|
|
||||||
|
See this [list of public HTTP/3 servers](https://bagder.github.io/HTTP3-test/)
|
||||||
|
|
||||||
|
### HTTPS eyeballing
|
||||||
|
|
||||||
|
With option `--http3` curl attempts earlier HTTP versions as well should the
|
||||||
|
connect attempt via HTTP/3 not succeed "fast enough". This strategy is similar
|
||||||
|
to IPv4/6 happy eyeballing where the alternate address family is used in
|
||||||
|
parallel after a short delay.
|
||||||
|
|
||||||
|
The IPv4/6 eyeballing has a default of 200ms and you may override that via
|
||||||
|
`--happy-eyeballs-timeout-ms value`. Since HTTP/3 is still relatively new, we
|
||||||
|
decided to use this timeout also for the HTTP eyeballing - with a slight
|
||||||
|
twist.
|
||||||
|
|
||||||
|
The `happy-eyeballs-timeout-ms` value is the **hard** timeout, meaning after
|
||||||
|
that time expired, a TLS connection is opened in addition to negotiate HTTP/2
|
||||||
|
or HTTP/1.1. At half of that value - currently - is the **soft** timeout. The
|
||||||
|
soft timeout fires, when there has been **no data at all** seen from the
|
||||||
|
server on the HTTP/3 connection.
|
||||||
|
|
||||||
|
So, without you specifying anything, the hard timeout is 200ms and the soft is 100ms:
|
||||||
|
|
||||||
|
* Ideally, the whole QUIC handshake happens and curl has an HTTP/3 connection
|
||||||
|
in less than 100ms.
|
||||||
|
* When QUIC is not supported (or UDP does not work for this network path), no
|
||||||
|
reply is seen and the HTTP/2 TLS+TCP connection starts 100ms later.
|
||||||
|
* In the worst case, UDP replies start before 100ms, but drag on. This starts
|
||||||
|
the TLS+TCP connection after 200ms.
|
||||||
|
* When the QUIC handshake fails, the TLS+TCP connection is attempted right
|
||||||
|
away. For example, when the QUIC server presents the wrong certificate.
|
||||||
|
|
||||||
|
The whole transfer only fails, when **both** QUIC and TLS+TCP fail to
|
||||||
|
handshake or time out.
|
||||||
|
|
||||||
|
Note that all this happens in addition to IP version happy eyeballing. If the
|
||||||
|
name resolution for the server gives more than one IP address, curl tries all
|
||||||
|
those until one succeeds - just as with all other protocols. If those IP
|
||||||
|
addresses contain both IPv6 and IPv4, those attempts happen, delayed, in
|
||||||
|
parallel (the actual eyeballing).
|
||||||
|
|
||||||
|
## Known Bugs
|
||||||
|
|
||||||
|
Check out the [list of known HTTP3 bugs](https://curl.se/docs/knownbugs.html#HTTP3).
|
||||||
|
|
||||||
|
# HTTP/3 Test server
|
||||||
|
|
||||||
|
This is not advice on how to run anything in production. This is for
|
||||||
|
development and experimenting.
|
||||||
|
|
||||||
|
## Prerequisite(s)
|
||||||
|
|
||||||
|
An existing local HTTP/1.1 server that hosts files. Preferably also a few huge
|
||||||
|
ones. You can easily create huge local files like `truncate -s=8G 8GB` - they
|
||||||
|
are huge but do not occupy that much space on disk since they are just big
|
||||||
|
holes.
|
||||||
|
|
||||||
|
In a Debian setup you can install **apache2**. It runs on port 80 and has a
|
||||||
|
document root in `/var/www/html`. Download the 8GB file from apache with `curl
|
||||||
|
localhost/8GB -o dev/null`
|
||||||
|
|
||||||
|
In this description we setup and run an HTTP/3 reverse-proxy in front of the
|
||||||
|
HTTP/1 server.
|
||||||
|
|
||||||
|
## Setup
|
||||||
|
|
||||||
|
You can select either or both of these server solutions.
|
||||||
|
|
||||||
|
### nghttpx
|
||||||
|
|
||||||
|
Get, build and install **quictls**, **nghttp3** and **ngtcp2** as described
|
||||||
|
above.
|
||||||
|
|
||||||
|
Get, build and install **nghttp2**:
|
||||||
|
|
||||||
|
git clone https://github.com/nghttp2/nghttp2.git
|
||||||
|
cd nghttp2
|
||||||
|
autoreconf -fi
|
||||||
|
PKG_CONFIG_PATH=$PKG_CONFIG_PATH:/home/daniel/build-quictls/lib/pkgconfig:/home/daniel/build-nghttp3/lib/pkgconfig:/home/daniel/build-ngtcp2/lib/pkgconfig LDFLAGS=-L/home/daniel/build-quictls/lib CFLAGS=-I/home/daniel/build-quictls/include ./configure --enable-maintainer-mode --prefix=/home/daniel/build-nghttp2 --disable-shared --enable-app --enable-http3 --without-jemalloc --without-libxml2 --without-systemd
|
||||||
|
make && make install
|
||||||
|
|
||||||
|
Run the local h3 server on port 9443, make it proxy all traffic through to
|
||||||
|
HTTP/1 on localhost port 80. For local toying, we can just use the test cert
|
||||||
|
that exists in curl's test dir.
|
||||||
|
|
||||||
|
CERT=$CURLSRC/tests/stunnel.pem
|
||||||
|
$HOME/bin/nghttpx $CERT $CERT --backend=localhost,80 \
|
||||||
|
--frontend="localhost,9443;quic"
|
||||||
|
|
||||||
|
### Caddy
|
||||||
|
|
||||||
|
[Install Caddy](https://caddyserver.com/docs/install). For easiest use, the binary
|
||||||
|
should be either in your PATH or your current directory.
|
||||||
|
|
||||||
|
Create a `Caddyfile` with the following content:
|
||||||
|
~~~
|
||||||
|
localhost:7443 {
|
||||||
|
respond "Hello, world! you are using {http.request.proto}"
|
||||||
|
}
|
||||||
|
~~~
|
||||||
|
|
||||||
|
Then run Caddy:
|
||||||
|
|
||||||
|
./caddy start
|
||||||
|
|
||||||
|
Making requests to `https://localhost:7443` should tell you which protocol is being used.
|
||||||
|
|
||||||
|
You can change the hard-coded response to something more useful by replacing `respond`
|
||||||
|
with `reverse_proxy` or `file_server`, for example: `reverse_proxy localhost:80`
|
78
src/dependencies/curl-8.8.0/docs/HYPER.md
Normal file
78
src/dependencies/curl-8.8.0/docs/HYPER.md
Normal file
|
@ -0,0 +1,78 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
# Hyper
|
||||||
|
|
||||||
|
Hyper is a separate HTTP library written in Rust. curl can be told to use this
|
||||||
|
library as a backend to deal with HTTP.
|
||||||
|
|
||||||
|
## Experimental!
|
||||||
|
|
||||||
|
Hyper support in curl is considered **EXPERIMENTAL** until further notice. It
|
||||||
|
needs to be explicitly enabled at build-time.
|
||||||
|
|
||||||
|
Further development and tweaking of the Hyper backend support in curl happens
|
||||||
|
in the master branch using pull-requests, just like ordinary changes.
|
||||||
|
|
||||||
|
## Hyper version
|
||||||
|
|
||||||
|
The C API for Hyper is brand new and is still under development.
|
||||||
|
|
||||||
|
## build curl with hyper
|
||||||
|
|
||||||
|
Using Rust 1.64.0 or later, build hyper and enable its C API like this:
|
||||||
|
|
||||||
|
% git clone https://github.com/hyperium/hyper
|
||||||
|
% cd hyper
|
||||||
|
% RUSTFLAGS="--cfg hyper_unstable_ffi" cargo rustc --features client,http1,http2,ffi --crate-type cdylib
|
||||||
|
|
||||||
|
Also, `--release` can be added for a release (optimized) build.
|
||||||
|
|
||||||
|
Build curl to use hyper's C API:
|
||||||
|
|
||||||
|
% git clone https://github.com/curl/curl
|
||||||
|
% cd curl
|
||||||
|
% autoreconf -fi
|
||||||
|
% ./configure LDFLAGS="-Wl,-rpath,<hyper-dir>/target/debug -Wl,-rpath,<hyper-dir>/target/release" --with-openssl --with-hyper=<hyper-dir>
|
||||||
|
% make
|
||||||
|
|
||||||
|
# using Hyper internally
|
||||||
|
|
||||||
|
Hyper is a low level HTTP transport library. curl itself provides all HTTP
|
||||||
|
headers and Hyper provides all received headers back to curl.
|
||||||
|
|
||||||
|
Therefore, most of the "header logic" in curl as in responding to and acting
|
||||||
|
on specific input and output headers are done the same way in curl code.
|
||||||
|
|
||||||
|
The API in Hyper delivers received HTTP headers as (cleaned up) name=value
|
||||||
|
pairs, making it impossible for curl to know the exact byte representation
|
||||||
|
over the wire with Hyper.
|
||||||
|
|
||||||
|
## Limitations
|
||||||
|
|
||||||
|
The hyper backend does not support
|
||||||
|
|
||||||
|
- `CURLOPT_IGNORE_CONTENT_LENGTH`
|
||||||
|
- `--raw` and disabling `CURLOPT_HTTP_TRANSFER_DECODING`
|
||||||
|
- RTSP
|
||||||
|
- hyper is much stricter about what HTTP header contents it allows
|
||||||
|
- leading whitespace in first HTTP/1 response header
|
||||||
|
- HTTP/0.9
|
||||||
|
- HTTP/2 upgrade using HTTP:// URLs. Aka 'h2c'
|
||||||
|
- HTTP/2 in general. Hyper has support for HTTP/2 but the curl side
|
||||||
|
needs changes so that a `hyper_clientconn` can last for the duration
|
||||||
|
of a connection. Probably this means turning the Hyper HTTP/2 backend
|
||||||
|
into a connection filter.
|
||||||
|
|
||||||
|
## Remaining issues
|
||||||
|
|
||||||
|
This backend is still not feature complete with the native backend. Areas that
|
||||||
|
still need attention and verification include:
|
||||||
|
|
||||||
|
- multiplexed HTTP/2
|
||||||
|
- h2 Upgrade:
|
||||||
|
- receiving HTTP/1 trailers
|
||||||
|
- sending HTTP/1 trailers
|
9
src/dependencies/curl-8.8.0/docs/INSTALL
Normal file
9
src/dependencies/curl-8.8.0/docs/INSTALL
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
_ _ ____ _
|
||||||
|
___| | | | _ \| |
|
||||||
|
/ __| | | | |_) | |
|
||||||
|
| (__| |_| | _ <| |___
|
||||||
|
\___|\___/|_| \_\_____|
|
||||||
|
|
||||||
|
How To Compile
|
||||||
|
|
||||||
|
see INSTALL.md
|
138
src/dependencies/curl-8.8.0/docs/INSTALL-CMAKE.md
Normal file
138
src/dependencies/curl-8.8.0/docs/INSTALL-CMAKE.md
Normal file
|
@ -0,0 +1,138 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
# Building with CMake
|
||||||
|
|
||||||
|
This document describes how to configure, build and install curl and libcurl
|
||||||
|
from source code using the CMake build tool. To build with CMake, you of
|
||||||
|
course first have to install CMake. The minimum required version of CMake is
|
||||||
|
specified in the file `CMakeLists.txt` found in the top of the curl source
|
||||||
|
tree. Once the correct version of CMake is installed you can follow the
|
||||||
|
instructions below for the platform you are building on.
|
||||||
|
|
||||||
|
CMake builds can be configured either from the command line, or from one of
|
||||||
|
CMake's GUIs.
|
||||||
|
|
||||||
|
# Current flaws in the curl CMake build
|
||||||
|
|
||||||
|
Missing features in the CMake build:
|
||||||
|
|
||||||
|
- Builds libcurl without large file support
|
||||||
|
- Does not support all SSL libraries (only OpenSSL, Schannel, Secure
|
||||||
|
Transport, and mbedTLS, WolfSSL)
|
||||||
|
- Does not allow different resolver backends (no c-ares build support)
|
||||||
|
- No RTMP support built
|
||||||
|
- Does not allow build curl and libcurl debug enabled
|
||||||
|
- Does not allow a custom CA bundle path
|
||||||
|
- Does not allow you to disable specific protocols from the build
|
||||||
|
- Does not find or use krb4 or GSS
|
||||||
|
- Rebuilds test files too eagerly, but still cannot run the tests
|
||||||
|
- Does not detect the correct `strerror_r` flavor when cross-compiling
|
||||||
|
(issue #1123)
|
||||||
|
|
||||||
|
# Configuring
|
||||||
|
|
||||||
|
A CMake configuration of curl is similar to the autotools build of curl.
|
||||||
|
It consists of the following steps after you have unpacked the source.
|
||||||
|
|
||||||
|
## Using `cmake`
|
||||||
|
|
||||||
|
You can configure for in source tree builds or for a build tree
|
||||||
|
that is apart from the source tree.
|
||||||
|
|
||||||
|
- Build in the source tree.
|
||||||
|
|
||||||
|
$ cmake -B .
|
||||||
|
|
||||||
|
- Build in a separate directory (parallel to the curl source tree in this
|
||||||
|
example). The build directory is created for you.
|
||||||
|
|
||||||
|
$ cmake -B ../curl-build
|
||||||
|
|
||||||
|
### Fallback for CMake before version 3.13
|
||||||
|
|
||||||
|
CMake before version 3.13 does not support the `-B` option. In that case,
|
||||||
|
you must create the build directory yourself, `cd` to it and run `cmake`
|
||||||
|
from there:
|
||||||
|
|
||||||
|
$ mkdir ../curl-build
|
||||||
|
$ cd ../curl-build
|
||||||
|
$ cmake ../curl
|
||||||
|
|
||||||
|
If you want to build in the source tree, it is enough to do this:
|
||||||
|
|
||||||
|
$ cmake .
|
||||||
|
|
||||||
|
### Build system generator selection
|
||||||
|
|
||||||
|
You can override CMake's default by using `-G <generator-name>`. For example
|
||||||
|
on Windows with multiple build systems if you have MinGW-w64 then you could use
|
||||||
|
`-G "MinGW Makefiles"`.
|
||||||
|
[List of generator names](https://cmake.org/cmake/help/latest/manual/cmake-generators.7.html).
|
||||||
|
|
||||||
|
## Using `ccmake`
|
||||||
|
|
||||||
|
CMake comes with a curses based interface called `ccmake`. To run `ccmake`
|
||||||
|
on a curl use the instructions for the command line cmake, but substitute
|
||||||
|
`ccmake` for `cmake`.
|
||||||
|
|
||||||
|
This brings up a curses interface with instructions on the bottom of the
|
||||||
|
screen. You can press the "c" key to configure the project, and the "g" key to
|
||||||
|
generate the project. After the project is generated, you can run make.
|
||||||
|
|
||||||
|
## Using `cmake-gui`
|
||||||
|
|
||||||
|
CMake also comes with a Qt based GUI called `cmake-gui`. To configure with
|
||||||
|
`cmake-gui`, you run `cmake-gui` and follow these steps:
|
||||||
|
|
||||||
|
1. Fill in the "Where is the source code" combo box with the path to
|
||||||
|
the curl source tree.
|
||||||
|
2. Fill in the "Where to build the binaries" combo box with the path to
|
||||||
|
the directory for your build tree, ideally this should not be the same
|
||||||
|
as the source tree, but a parallel directory called curl-build or
|
||||||
|
something similar.
|
||||||
|
3. Once the source and binary directories are specified, press the
|
||||||
|
"Configure" button.
|
||||||
|
4. Select the native build tool that you want to use.
|
||||||
|
5. At this point you can change any of the options presented in the GUI.
|
||||||
|
Once you have selected all the options you want, click the "Generate"
|
||||||
|
button.
|
||||||
|
|
||||||
|
# Building
|
||||||
|
|
||||||
|
Build (you have to specify the build directory).
|
||||||
|
|
||||||
|
$ cmake --build ../curl-build
|
||||||
|
|
||||||
|
### Fallback for CMake before version 3.13
|
||||||
|
|
||||||
|
CMake before version 3.13 does not support the `--build` option. In that
|
||||||
|
case, you have to `cd` to the build directory and use the building tool that
|
||||||
|
corresponds to the build files that CMake generated for you. This example
|
||||||
|
assumes that CMake generates `Makefile`:
|
||||||
|
|
||||||
|
$ cd ../curl-build
|
||||||
|
$ make
|
||||||
|
|
||||||
|
# Testing
|
||||||
|
|
||||||
|
(The test suite does not yet work with the cmake build)
|
||||||
|
|
||||||
|
# Installing
|
||||||
|
|
||||||
|
Install to default location (you have to specify the build directory).
|
||||||
|
|
||||||
|
$ cmake --install ../curl-build
|
||||||
|
|
||||||
|
### Fallback for CMake before version 3.15
|
||||||
|
|
||||||
|
CMake before version 3.15 does not support the `--install` option. In that
|
||||||
|
case, you have to `cd` to the build directory and use the building tool that
|
||||||
|
corresponds to the build files that CMake generated for you. This example
|
||||||
|
assumes that CMake generates `Makefile`:
|
||||||
|
|
||||||
|
$ cd ../curl-build
|
||||||
|
$ make install
|
584
src/dependencies/curl-8.8.0/docs/INSTALL.md
Normal file
584
src/dependencies/curl-8.8.0/docs/INSTALL.md
Normal file
|
@ -0,0 +1,584 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
# how to install curl and libcurl
|
||||||
|
|
||||||
|
## Installing Binary Packages
|
||||||
|
|
||||||
|
Lots of people download binary distributions of curl and libcurl. This
|
||||||
|
document does not describe how to install curl or libcurl using such a binary
|
||||||
|
package. This document describes how to compile, build and install curl and
|
||||||
|
libcurl from source code.
|
||||||
|
|
||||||
|
## Building using vcpkg
|
||||||
|
|
||||||
|
You can download and install curl and libcurl using the [vcpkg](https://github.com/Microsoft/vcpkg/) dependency manager:
|
||||||
|
|
||||||
|
git clone https://github.com/Microsoft/vcpkg.git
|
||||||
|
cd vcpkg
|
||||||
|
./bootstrap-vcpkg.sh
|
||||||
|
./vcpkg integrate install
|
||||||
|
vcpkg install curl[tool]
|
||||||
|
|
||||||
|
The curl port in vcpkg is kept up to date by Microsoft team members and
|
||||||
|
community contributors. If the version is out of date, please [create an issue
|
||||||
|
or pull request](https://github.com/Microsoft/vcpkg) on the vcpkg repository.
|
||||||
|
|
||||||
|
## Building from git
|
||||||
|
|
||||||
|
If you get your code off a git repository instead of a release tarball, see
|
||||||
|
the `GIT-INFO.md` file in the root directory for specific instructions on how
|
||||||
|
to proceed.
|
||||||
|
|
||||||
|
# Unix
|
||||||
|
|
||||||
|
A normal Unix installation is made in three or four steps (after you have
|
||||||
|
unpacked the source archive):
|
||||||
|
|
||||||
|
./configure --with-openssl [--with-gnutls --with-wolfssl]
|
||||||
|
make
|
||||||
|
make test (optional)
|
||||||
|
make install
|
||||||
|
|
||||||
|
(Adjust the configure line accordingly to use the TLS library you want.)
|
||||||
|
|
||||||
|
You probably need to be root when doing the last command.
|
||||||
|
|
||||||
|
Get a full listing of all available configure options by invoking it like:
|
||||||
|
|
||||||
|
./configure --help
|
||||||
|
|
||||||
|
If you want to install curl in a different file hierarchy than `/usr/local`,
|
||||||
|
specify that when running configure:
|
||||||
|
|
||||||
|
./configure --prefix=/path/to/curl/tree
|
||||||
|
|
||||||
|
If you have write permission in that directory, you can do 'make install'
|
||||||
|
without being root. An example of this would be to make a local install in
|
||||||
|
your own home directory:
|
||||||
|
|
||||||
|
./configure --prefix=$HOME
|
||||||
|
make
|
||||||
|
make install
|
||||||
|
|
||||||
|
The configure script always tries to find a working SSL library unless
|
||||||
|
explicitly told not to. If you have OpenSSL installed in the default search
|
||||||
|
path for your compiler/linker, you do not need to do anything special. If you
|
||||||
|
have OpenSSL installed in `/usr/local/ssl`, you can run configure like:
|
||||||
|
|
||||||
|
./configure --with-openssl
|
||||||
|
|
||||||
|
If you have OpenSSL installed somewhere else (for example, `/opt/OpenSSL`) and
|
||||||
|
you have pkg-config installed, set the pkg-config path first, like this:
|
||||||
|
|
||||||
|
env PKG_CONFIG_PATH=/opt/OpenSSL/lib/pkgconfig ./configure --with-openssl
|
||||||
|
|
||||||
|
Without pkg-config installed, use this:
|
||||||
|
|
||||||
|
./configure --with-openssl=/opt/OpenSSL
|
||||||
|
|
||||||
|
If you insist on forcing a build without SSL support, you can run configure
|
||||||
|
like this:
|
||||||
|
|
||||||
|
./configure --without-ssl
|
||||||
|
|
||||||
|
If you have OpenSSL installed, but with the libraries in one place and the
|
||||||
|
header files somewhere else, you have to set the `LDFLAGS` and `CPPFLAGS`
|
||||||
|
environment variables prior to running configure. Something like this should
|
||||||
|
work:
|
||||||
|
|
||||||
|
CPPFLAGS="-I/path/to/ssl/include" LDFLAGS="-L/path/to/ssl/lib" ./configure
|
||||||
|
|
||||||
|
If you have shared SSL libs installed in a directory where your runtime
|
||||||
|
linker does not find them (which usually causes configure failures), you can
|
||||||
|
provide this option to gcc to set a hard-coded path to the runtime linker:
|
||||||
|
|
||||||
|
LDFLAGS=-Wl,-R/usr/local/ssl/lib ./configure --with-openssl
|
||||||
|
|
||||||
|
## Static builds
|
||||||
|
|
||||||
|
To force a static library compile, disable the shared library creation by
|
||||||
|
running configure like:
|
||||||
|
|
||||||
|
./configure --disable-shared
|
||||||
|
|
||||||
|
The configure script is primarily done to work with shared/dynamic third party
|
||||||
|
dependencies. When linking with shared libraries, the dependency "chain" is
|
||||||
|
handled automatically by the library loader - on all modern systems.
|
||||||
|
|
||||||
|
If you instead link with a static library, you need to provide all the
|
||||||
|
dependency libraries already at the link command line.
|
||||||
|
|
||||||
|
Figuring out all the dependency libraries for a given library is hard, as it
|
||||||
|
might involve figuring out the dependencies of the dependencies and they vary
|
||||||
|
between platforms and change between versions.
|
||||||
|
|
||||||
|
When using static dependencies, the build scripts mostly assume that you, the
|
||||||
|
user, provide all the necessary additional dependency libraries as additional
|
||||||
|
arguments in the build. With configure, by setting `LIBS` or `LDFLAGS` on the
|
||||||
|
command line.
|
||||||
|
|
||||||
|
Building statically is not for the faint of heart.
|
||||||
|
|
||||||
|
## Debug
|
||||||
|
|
||||||
|
If you are a curl developer and use gcc, you might want to enable more debug
|
||||||
|
options with the `--enable-debug` option.
|
||||||
|
|
||||||
|
curl can be built to use a whole range of libraries to provide various useful
|
||||||
|
services, and configure tries to auto-detect a decent default. If you want to
|
||||||
|
alter it, you can select how to deal with each individual library.
|
||||||
|
|
||||||
|
## Select TLS backend
|
||||||
|
|
||||||
|
These options are provided to select the TLS backend to use.
|
||||||
|
|
||||||
|
- AmiSSL: `--with-amissl`
|
||||||
|
- BearSSL: `--with-bearssl`
|
||||||
|
- GnuTLS: `--with-gnutls`.
|
||||||
|
- mbedTLS: `--with-mbedtls`
|
||||||
|
- OpenSSL: `--with-openssl` (also for BoringSSL, AWS-LC, libressl, and quictls)
|
||||||
|
- rustls: `--with-rustls`
|
||||||
|
- Schannel: `--with-schannel`
|
||||||
|
- Secure Transport: `--with-secure-transport`
|
||||||
|
- wolfSSL: `--with-wolfssl`
|
||||||
|
|
||||||
|
You can build curl with *multiple* TLS backends at your choice, but some TLS
|
||||||
|
backends cannot be combined: if you build with an OpenSSL fork (or wolfSSL),
|
||||||
|
you cannot add another OpenSSL fork (or wolfSSL) simply because they have
|
||||||
|
conflicting identical symbol names.
|
||||||
|
|
||||||
|
When you build with multiple TLS backends, you can select the active one at
|
||||||
|
runtime when curl starts up.
|
||||||
|
|
||||||
|
## configure finding libs in wrong directory
|
||||||
|
|
||||||
|
When the configure script checks for third-party libraries, it adds those
|
||||||
|
directories to the `LDFLAGS` variable and then tries linking to see if it
|
||||||
|
works. When successful, the found directory is kept in the `LDFLAGS` variable
|
||||||
|
when the script continues to execute and do more tests and possibly check for
|
||||||
|
more libraries.
|
||||||
|
|
||||||
|
This can make subsequent checks for libraries wrongly detect another
|
||||||
|
installation in a directory that was previously added to `LDFLAGS` by another
|
||||||
|
library check.
|
||||||
|
|
||||||
|
# Windows
|
||||||
|
|
||||||
|
Building for Windows XP is required as a minimum.
|
||||||
|
|
||||||
|
## Building Windows DLLs and C runtime (CRT) linkage issues
|
||||||
|
|
||||||
|
As a general rule, building a DLL with static CRT linkage is highly
|
||||||
|
discouraged, and intermixing CRTs in the same app is something to avoid at
|
||||||
|
any cost.
|
||||||
|
|
||||||
|
Reading and comprehending Microsoft Knowledge Base articles KB94248 and
|
||||||
|
KB140584 is a must for any Windows developer. Especially important is full
|
||||||
|
understanding if you are not going to follow the advice given above.
|
||||||
|
|
||||||
|
- [How To Use the C Runtime](https://support.microsoft.com/help/94248/how-to-use-the-c-run-time)
|
||||||
|
- [Runtime Library Compiler Options](https://docs.microsoft.com/cpp/build/reference/md-mt-ld-use-run-time-library)
|
||||||
|
- [Potential Errors Passing CRT Objects Across DLL Boundaries](https://docs.microsoft.com/cpp/c-runtime-library/potential-errors-passing-crt-objects-across-dll-boundaries)
|
||||||
|
|
||||||
|
If your app is misbehaving in some strange way, or it is suffering from memory
|
||||||
|
corruption, before asking for further help, please try first to rebuild every
|
||||||
|
single library your app uses as well as your app using the debug
|
||||||
|
multi-threaded dynamic C runtime.
|
||||||
|
|
||||||
|
If you get linkage errors read section 5.7 of the FAQ document.
|
||||||
|
|
||||||
|
## Cygwin
|
||||||
|
|
||||||
|
Almost identical to the Unix installation. Run the configure script in the
|
||||||
|
curl source tree root with `sh configure`. Make sure you have the `sh`
|
||||||
|
executable in `/bin/` or you see the configure fail toward the end.
|
||||||
|
|
||||||
|
Run `make`
|
||||||
|
|
||||||
|
## MS-DOS
|
||||||
|
|
||||||
|
Requires DJGPP in the search path and pointing to the Watt-32 stack via
|
||||||
|
`WATT_PATH=c:/djgpp/net/watt`.
|
||||||
|
|
||||||
|
Run `make -f Makefile.dist djgpp` in the root curl dir.
|
||||||
|
|
||||||
|
For build configuration options, please see the mingw-w64 section.
|
||||||
|
|
||||||
|
Notes:
|
||||||
|
|
||||||
|
- DJGPP 2.04 beta has a `sscanf()` bug so the URL parsing is not done
|
||||||
|
properly. Use DJGPP 2.03 until they fix it.
|
||||||
|
|
||||||
|
- Compile Watt-32 (and OpenSSL) with the same version of DJGPP. Otherwise
|
||||||
|
things go wrong because things like FS-extensions and `errno` values have
|
||||||
|
been changed between releases.
|
||||||
|
|
||||||
|
## AmigaOS
|
||||||
|
|
||||||
|
Run `make -f Makefile.dist amiga` in the root curl dir.
|
||||||
|
|
||||||
|
For build configuration options, please see the mingw-w64 section.
|
||||||
|
|
||||||
|
## Disabling Specific Protocols in Windows builds
|
||||||
|
|
||||||
|
The configure utility, unfortunately, is not available for the Windows
|
||||||
|
environment, therefore, you cannot use the various disable-protocol options of
|
||||||
|
the configure utility on this platform.
|
||||||
|
|
||||||
|
You can use specific defines to disable specific protocols and features. See
|
||||||
|
[CURL-DISABLE](CURL-DISABLE.md) for the full list.
|
||||||
|
|
||||||
|
If you want to set any of these defines you have the following options:
|
||||||
|
|
||||||
|
- Modify `lib/config-win32.h`
|
||||||
|
- Modify `lib/curl_setup.h`
|
||||||
|
- Modify `winbuild/Makefile.vc`
|
||||||
|
- Modify the "Preprocessor Definitions" in the libcurl project
|
||||||
|
|
||||||
|
Note: The pre-processor settings can be found using the Visual Studio IDE
|
||||||
|
under "Project -> Properties -> Configuration Properties -> C/C++ ->
|
||||||
|
Preprocessor".
|
||||||
|
|
||||||
|
## Using BSD-style lwIP instead of Winsock TCP/IP stack in Win32 builds
|
||||||
|
|
||||||
|
In order to compile libcurl and curl using BSD-style lwIP TCP/IP stack it is
|
||||||
|
necessary to make the definition of the preprocessor symbol `USE_LWIPSOCK`
|
||||||
|
visible to libcurl and curl compilation processes. To set this definition you
|
||||||
|
have the following alternatives:
|
||||||
|
|
||||||
|
- Modify `lib/config-win32.h` and `src/config-win32.h`
|
||||||
|
- Modify `winbuild/Makefile.vc`
|
||||||
|
- Modify the "Preprocessor Definitions" in the libcurl project
|
||||||
|
|
||||||
|
Note: The pre-processor settings can be found using the Visual Studio IDE
|
||||||
|
under "Project -> Properties -> Configuration Properties -> C/C++ ->
|
||||||
|
Preprocessor".
|
||||||
|
|
||||||
|
Once that libcurl has been built with BSD-style lwIP TCP/IP stack support, in
|
||||||
|
order to use it with your program it is mandatory that your program includes
|
||||||
|
lwIP header file `<lwip/opt.h>` (or another lwIP header that includes this)
|
||||||
|
before including any libcurl header. Your program does not need the
|
||||||
|
`USE_LWIPSOCK` preprocessor definition which is for libcurl internals only.
|
||||||
|
|
||||||
|
Compilation has been verified with lwIP 1.4.0.
|
||||||
|
|
||||||
|
This BSD-style lwIP TCP/IP stack support must be considered experimental given
|
||||||
|
that it has been verified that lwIP 1.4.0 still needs some polish, and libcurl
|
||||||
|
might yet need some additional adjustment.
|
||||||
|
|
||||||
|
## Important static libcurl usage note
|
||||||
|
|
||||||
|
When building an application that uses the static libcurl library on Windows,
|
||||||
|
you must add `-DCURL_STATICLIB` to your `CFLAGS`. Otherwise the linker looks
|
||||||
|
for dynamic import symbols.
|
||||||
|
|
||||||
|
## Legacy Windows and SSL
|
||||||
|
|
||||||
|
Schannel (from Windows SSPI), is the native SSL library in Windows. However,
|
||||||
|
Schannel in Windows <= XP is unable to connect to servers that no longer
|
||||||
|
support the legacy handshakes and algorithms used by those versions. If you
|
||||||
|
are using curl in one of those earlier versions of Windows you should choose
|
||||||
|
another SSL backend such as OpenSSL.
|
||||||
|
|
||||||
|
# Apple Platforms (macOS, iOS, tvOS, watchOS, and their simulator counterparts)
|
||||||
|
|
||||||
|
On modern Apple operating systems, curl can be built to use Apple's SSL/TLS
|
||||||
|
implementation, Secure Transport, instead of OpenSSL. To build with Secure
|
||||||
|
Transport for SSL/TLS, use the configure option `--with-secure-transport`.
|
||||||
|
|
||||||
|
When Secure Transport is in use, the curl options `--cacert` and `--capath`
|
||||||
|
and their libcurl equivalents, are ignored, because Secure Transport uses the
|
||||||
|
certificates stored in the Keychain to evaluate whether or not to trust the
|
||||||
|
server. This, of course, includes the root certificates that ship with the OS.
|
||||||
|
The `--cert` and `--engine` options, and their libcurl equivalents, are
|
||||||
|
currently unimplemented in curl with Secure Transport.
|
||||||
|
|
||||||
|
In general, a curl build for an Apple `ARCH/SDK/DEPLOYMENT_TARGET` combination
|
||||||
|
can be taken by providing appropriate values for `ARCH`, `SDK`, `DEPLOYMENT_TARGET`
|
||||||
|
below and running the commands:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Set these three according to your needs
|
||||||
|
export ARCH=x86_64
|
||||||
|
export SDK=macosx
|
||||||
|
export DEPLOYMENT_TARGET=10.8
|
||||||
|
|
||||||
|
export CFLAGS="-arch $ARCH -isysroot $(xcrun -sdk $SDK --show-sdk-path) -m$SDK-version-min=$DEPLOYMENT_TARGET"
|
||||||
|
./configure --host=$ARCH-apple-darwin --prefix $(pwd)/artifacts --with-secure-transport
|
||||||
|
make -j8
|
||||||
|
make install
|
||||||
|
```
|
||||||
|
|
||||||
|
The above command lines build curl for macOS platform with `x86_64`
|
||||||
|
architecture and `10.8` as deployment target.
|
||||||
|
|
||||||
|
Here is an example for iOS device:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
export ARCH=arm64
|
||||||
|
export SDK=iphoneos
|
||||||
|
export DEPLOYMENT_TARGET=11.0
|
||||||
|
|
||||||
|
export CFLAGS="-arch $ARCH -isysroot $(xcrun -sdk $SDK --show-sdk-path) -m$SDK-version-min=$DEPLOYMENT_TARGET"
|
||||||
|
./configure --host=$ARCH-apple-darwin --prefix $(pwd)/artifacts --with-secure-transport
|
||||||
|
make -j8
|
||||||
|
make install
|
||||||
|
```
|
||||||
|
|
||||||
|
Another example for watchOS simulator for macs with Apple Silicon:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
export ARCH=arm64
|
||||||
|
export SDK=watchsimulator
|
||||||
|
export DEPLOYMENT_TARGET=5.0
|
||||||
|
|
||||||
|
export CFLAGS="-arch $ARCH -isysroot $(xcrun -sdk $SDK --show-sdk-path) -m$SDK-version-min=$DEPLOYMENT_TARGET"
|
||||||
|
./configure --host=$ARCH-apple-darwin --prefix $(pwd)/artifacts --with-secure-transport
|
||||||
|
make -j8
|
||||||
|
make install
|
||||||
|
```
|
||||||
|
|
||||||
|
In all above, the built libraries and executables can be found in the
|
||||||
|
`artifacts` folder.
|
||||||
|
|
||||||
|
# Android
|
||||||
|
|
||||||
|
When building curl for Android it is recommended to use a Linux/macOS
|
||||||
|
environment since using curl's `configure` script is the easiest way to build
|
||||||
|
curl for Android. Before you can build curl for Android, you need to install
|
||||||
|
the Android NDK first. This can be done using the SDK Manager that is part of
|
||||||
|
Android Studio. Once you have installed the Android NDK, you need to figure
|
||||||
|
out where it has been installed and then set up some environment variables
|
||||||
|
before launching `configure`. On macOS, those variables could look like this
|
||||||
|
to compile for `aarch64` and API level 29:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
export ANDROID_NDK_HOME=~/Library/Android/sdk/ndk/25.1.8937393 # Point into your NDK.
|
||||||
|
export HOST_TAG=darwin-x86_64 # Same tag for Apple Silicon. Other OS values here: https://developer.android.com/ndk/guides/other_build_systems#overview
|
||||||
|
export TOOLCHAIN=$ANDROID_NDK_HOME/toolchains/llvm/prebuilt/$HOST_TAG
|
||||||
|
export AR=$TOOLCHAIN/bin/llvm-ar
|
||||||
|
export AS=$TOOLCHAIN/bin/llvm-as
|
||||||
|
export CC=$TOOLCHAIN/bin/aarch64-linux-android21-clang
|
||||||
|
export CXX=$TOOLCHAIN/bin/aarch64-linux-android21-clang++
|
||||||
|
export LD=$TOOLCHAIN/bin/ld
|
||||||
|
export RANLIB=$TOOLCHAIN/bin/llvm-ranlib
|
||||||
|
export STRIP=$TOOLCHAIN/bin/llvm-strip
|
||||||
|
```
|
||||||
|
|
||||||
|
When building on Linux or targeting other API levels or architectures, you need
|
||||||
|
to adjust those variables accordingly. After that you can build curl like this:
|
||||||
|
|
||||||
|
./configure --host aarch64-linux-android --with-pic --disable-shared
|
||||||
|
|
||||||
|
Note that this does not give you SSL/TLS support. If you need SSL/TLS, you
|
||||||
|
have to build curl with a SSL/TLS library, e.g. OpenSSL, because it is
|
||||||
|
impossible for curl to access Android's native SSL/TLS layer. To build curl
|
||||||
|
for Android using OpenSSL, follow the OpenSSL build instructions and then
|
||||||
|
install `libssl.a` and `libcrypto.a` to `$TOOLCHAIN/sysroot/usr/lib` and copy
|
||||||
|
`include/openssl` to `$TOOLCHAIN/sysroot/usr/include`. Now you can build curl
|
||||||
|
for Android using OpenSSL like this:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
LIBS="-lssl -lcrypto -lc++" # For OpenSSL/BoringSSL. In general, you need to the SSL/TLS layer's transitive dependencies if you are linking statically.
|
||||||
|
./configure --host aarch64-linux-android --with-pic --disable-shared --with-openssl="$TOOLCHAIN/sysroot/usr"
|
||||||
|
```
|
||||||
|
|
||||||
|
# IBM i
|
||||||
|
|
||||||
|
For IBM i (formerly OS/400), you can use curl in two different ways:
|
||||||
|
|
||||||
|
- Natively, running in the **ILE**. The obvious use is being able to call curl
|
||||||
|
from ILE C or RPG applications.
|
||||||
|
- You need to build this from source. See `packages/OS400/README` for the ILE
|
||||||
|
specific build instructions.
|
||||||
|
- In the **PASE** environment, which runs AIX programs. curl is built as it
|
||||||
|
would be on AIX.
|
||||||
|
- IBM provides builds of curl in their Yum repository for PASE software.
|
||||||
|
- To build from source, follow the Unix instructions.
|
||||||
|
|
||||||
|
There are some additional limitations and quirks with curl on this platform;
|
||||||
|
they affect both environments.
|
||||||
|
|
||||||
|
## Multi-threading notes
|
||||||
|
|
||||||
|
By default, jobs in IBM i does not start with threading enabled. (Exceptions
|
||||||
|
include interactive PASE sessions started by `QP2TERM` or SSH.) If you use
|
||||||
|
curl in an environment without threading when options like asynchronous DNS
|
||||||
|
were enabled, you get messages like:
|
||||||
|
|
||||||
|
```
|
||||||
|
getaddrinfo() thread failed to start
|
||||||
|
```
|
||||||
|
|
||||||
|
Do not panic. curl and your program are not broken. You can fix this by:
|
||||||
|
|
||||||
|
- Set the environment variable `QIBM_MULTI_THREADED` to `Y` before starting
|
||||||
|
your program. This can be done at whatever scope you feel is appropriate.
|
||||||
|
- Alternatively, start the job with the `ALWMLTTHD` parameter set to `*YES`.
|
||||||
|
|
||||||
|
# Cross compile
|
||||||
|
|
||||||
|
Download and unpack the curl package.
|
||||||
|
|
||||||
|
`cd` to the new directory. (e.g. `cd curl-7.12.3`)
|
||||||
|
|
||||||
|
Set environment variables to point to the cross-compile toolchain and call
|
||||||
|
configure with any options you need. Be sure and specify the `--host` and
|
||||||
|
`--build` parameters at configuration time. The following script is an example
|
||||||
|
of cross-compiling for the IBM 405GP PowerPC processor using the toolchain on
|
||||||
|
Linux.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
#! /bin/sh
|
||||||
|
|
||||||
|
export PATH=$PATH:/opt/hardhat/devkit/ppc/405/bin
|
||||||
|
export CPPFLAGS="-I/opt/hardhat/devkit/ppc/405/target/usr/include"
|
||||||
|
export AR=ppc_405-ar
|
||||||
|
export AS=ppc_405-as
|
||||||
|
export LD=ppc_405-ld
|
||||||
|
export RANLIB=ppc_405-ranlib
|
||||||
|
export CC=ppc_405-gcc
|
||||||
|
export NM=ppc_405-nm
|
||||||
|
|
||||||
|
./configure --target=powerpc-hardhat-linux
|
||||||
|
--host=powerpc-hardhat-linux
|
||||||
|
--build=i586-pc-linux-gnu
|
||||||
|
--prefix=/opt/hardhat/devkit/ppc/405/target/usr/local
|
||||||
|
--exec-prefix=/usr/local
|
||||||
|
```
|
||||||
|
|
||||||
|
You may also need to provide a parameter like `--with-random=/dev/urandom` to
|
||||||
|
configure as it cannot detect the presence of a random number generating
|
||||||
|
device for a target system. The `--prefix` parameter specifies where curl gets
|
||||||
|
installed. If `configure` completes successfully, do `make` and `make install`
|
||||||
|
as usual.
|
||||||
|
|
||||||
|
In some cases, you may be able to simplify the above commands to as little as:
|
||||||
|
|
||||||
|
./configure --host=ARCH-OS
|
||||||
|
|
||||||
|
# REDUCING SIZE
|
||||||
|
|
||||||
|
There are a number of configure options that can be used to reduce the size of
|
||||||
|
libcurl for embedded applications where binary size is an important factor.
|
||||||
|
First, be sure to set the `CFLAGS` variable when configuring with any relevant
|
||||||
|
compiler optimization flags to reduce the size of the binary. For gcc, this
|
||||||
|
would mean at minimum the `-Os` option, and others like the following that
|
||||||
|
may be relevant in some environments: `-march=X`, `-mthumb`, `-m32`,
|
||||||
|
`-mdynamic-no-pic`, `-flto`, `-fdata-sections`, `-ffunction-sections`,
|
||||||
|
`-fno-unwind-tables`, `-fno-asynchronous-unwind-tables`,
|
||||||
|
`-fno-record-gcc-switches`, `-fsection-anchors`, `-fno-plt`,
|
||||||
|
`-Wl,--gc-sections`, `-Wl,-Bsymbolic`, `-Wl,-s`,
|
||||||
|
|
||||||
|
For example, this is how to combine a few of these options:
|
||||||
|
|
||||||
|
./configure CC=gcc CFLAGS='-Os -ffunction-sections' LDFLAGS='-Wl,--gc-sections'...
|
||||||
|
|
||||||
|
Note that newer compilers often produce smaller code than older versions
|
||||||
|
due to improved optimization.
|
||||||
|
|
||||||
|
Be sure to specify as many `--disable-` and `--without-` flags on the
|
||||||
|
configure command-line as you can to disable all the libcurl features that you
|
||||||
|
know your application is not going to need. Besides specifying the
|
||||||
|
`--disable-PROTOCOL` flags for all the types of URLs your application do not
|
||||||
|
use, here are some other flags that can reduce the size of the library by
|
||||||
|
disabling support for some feature (run `./configure --help` to see them all):
|
||||||
|
|
||||||
|
- `--disable-alt-svc` (HTTP Alt-Svc)
|
||||||
|
- `--disable-ares` (the C-ARES DNS library)
|
||||||
|
- `--disable-cookies` (HTTP cookies)
|
||||||
|
- `--disable-basic-auth` (cryptographic authentication)
|
||||||
|
- `--disable-bearer-auth` (cryptographic authentication)
|
||||||
|
- `--disable-digest-auth` (cryptographic authentication)
|
||||||
|
- `--disable-kerberos-auth` (cryptographic authentication)
|
||||||
|
- `--disable-negotiate-auth` (cryptographic authentication)
|
||||||
|
- `--disable-aws` (cryptographic authentication)
|
||||||
|
- `--disable-dateparse` (date parsing for time conditionals)
|
||||||
|
- `--disable-dnsshuffle` (internal server load spreading)
|
||||||
|
- `--disable-doh` (DNS-over-HTTP)
|
||||||
|
- `--disable-form-api` (POST form API)
|
||||||
|
- `--disable-get-easy-options` (lookup easy options at runtime)
|
||||||
|
- `--disable-headers-api` (API to access headers)
|
||||||
|
- `--disable-hsts` (HTTP Strict Transport Security)
|
||||||
|
- `--disable-http-auth` (all HTTP authentication)
|
||||||
|
- `--disable-ipv6` (IPv6)
|
||||||
|
- `--disable-libcurl-option` (--libcurl C code generation support)
|
||||||
|
- `--disable-manual` (--manual built-in documentation)
|
||||||
|
- `--disable-mime` (MIME API)
|
||||||
|
- `--disable-netrc` (.netrc file)
|
||||||
|
- `--disable-ntlm` (NTLM authentication)
|
||||||
|
- `--disable-ntlm-wb` (NTLM WinBind)
|
||||||
|
- `--disable-progress-meter` (graphical progress meter in library)
|
||||||
|
- `--disable-proxy` (HTTP and SOCKS proxies)
|
||||||
|
- `--disable-pthreads` (multi-threading)
|
||||||
|
- `--disable-socketpair` (socketpair for asynchronous name resolving)
|
||||||
|
- `--disable-threaded-resolver` (threaded name resolver)
|
||||||
|
- `--disable-tls-srp` (Secure Remote Password authentication for TLS)
|
||||||
|
- `--disable-unix-sockets` (UNIX sockets)
|
||||||
|
- `--disable-verbose` (eliminates debugging strings and error code strings)
|
||||||
|
- `--disable-versioned-symbols` (versioned symbols)
|
||||||
|
- `--enable-symbol-hiding` (eliminates unneeded symbols in the shared library)
|
||||||
|
- `--without-brotli` (Brotli on-the-fly decompression)
|
||||||
|
- `--without-libpsl` (Public Suffix List in cookies)
|
||||||
|
- `--without-nghttp2` (HTTP/2 using nghttp2)
|
||||||
|
- `--without-ngtcp2` (HTTP/2 using ngtcp2)
|
||||||
|
- `--without-zstd` (Zstd on-the-fly decompression)
|
||||||
|
- `--without-libidn2` (internationalized domain names)
|
||||||
|
- `--without-librtmp` (RTMP)
|
||||||
|
- `--without-ssl` (SSL/TLS)
|
||||||
|
- `--without-zlib` (on-the-fly decompression)
|
||||||
|
|
||||||
|
Be sure also to strip debugging symbols from your binaries after compiling
|
||||||
|
using 'strip' or an option like `-s`. If space is really tight, you may be able
|
||||||
|
to gain a few bytes by removing some unneeded sections of the shared library
|
||||||
|
using the -R option to objcopy (e.g. the .comment section).
|
||||||
|
|
||||||
|
Using these techniques it is possible to create a basic HTTP-only libcurl
|
||||||
|
shared library for i386 Linux platforms that is only 130 KiB in size
|
||||||
|
(as of libcurl version 8.6.0, using gcc 13.2.0).
|
||||||
|
|
||||||
|
You may find that statically linking libcurl to your application results in a
|
||||||
|
lower total size than dynamically linking.
|
||||||
|
|
||||||
|
The curl test harness can detect the use of some, but not all, of the
|
||||||
|
`--disable` statements suggested above. Use of these can cause tests relying
|
||||||
|
on those features to fail. The test harness can be manually forced to skip the
|
||||||
|
relevant tests by specifying certain key words on the `runtests.pl` command
|
||||||
|
line. Following is a list of appropriate key words for those configure options
|
||||||
|
that are not automatically detected:
|
||||||
|
|
||||||
|
- `--disable-cookies` !cookies
|
||||||
|
- `--disable-dateparse` !RETRY-AFTER !`CURLOPT_TIMECONDITION` !`CURLINFO_FILETIME` !`If-Modified-Since` !`curl_getdate` !`-z`
|
||||||
|
- `--disable-libcurl-option` !`--libcurl`
|
||||||
|
- `--disable-verbose` !verbose\ logs
|
||||||
|
|
||||||
|
# Ports
|
||||||
|
|
||||||
|
This is a probably incomplete list of known CPU architectures and operating
|
||||||
|
systems that curl has been compiled for. If you know a system curl compiles
|
||||||
|
and runs on, that is not listed, please let us know!
|
||||||
|
|
||||||
|
## 101 Operating Systems
|
||||||
|
|
||||||
|
AIX, AmigaOS, Android, ArcoOS, Aros, Atari FreeMiNT, BeOS, Blackberry 10,
|
||||||
|
Blackberry Tablet OS, Cell OS, CheriBSD, Chrome OS, Cisco IOS, DG/UX,
|
||||||
|
Dragonfly BSD, DR DOS, eCOS, FreeBSD, FreeDOS, FreeRTOS, Fuchsia, Garmin OS,
|
||||||
|
Genode, Haiku, HardenedBSD, HP-UX, Hurd, Illumos, Integrity, iOS, ipadOS, IRIX,
|
||||||
|
Linux, Lua RTOS, Mac OS 9, macOS, Mbed, Meego, Micrium, MINIX, Moblin, MorphOS,
|
||||||
|
MPE/iX, MS-DOS, NCR MP-RAS, NetBSD, Netware, NextStep, Nintendo Switch,
|
||||||
|
NonStop OS, NuttX, OpenBSD, OpenStep, Orbis OS, OS/2, OS/400, OS21, Plan 9,
|
||||||
|
PlayStation Portable, QNX, Qubes OS, ReactOS, Redox, RICS OS, ROS, RTEMS,
|
||||||
|
Sailfish OS, SCO Unix, Serenity, SINIX-Z, SkyOS, Solaris, Sortix, SunOS,
|
||||||
|
Syllable OS, Symbian, Tizen, TPF, Tru64, tvOS, ucLinux, Ultrix, UNICOS,
|
||||||
|
UnixWare, VMS, vxWorks, watchOS, Wear OS, WebOS, Wii system software, Wii U,
|
||||||
|
Windows, Windows CE, Xbox System, Xenix, Zephyr, z/OS, z/TPF, z/VM, z/VSE
|
||||||
|
|
||||||
|
## 28 CPU Architectures
|
||||||
|
|
||||||
|
Alpha, ARC, ARM, AVR32, C-SKY, CompactRISC, Elbrus, ETRAX, HP-PA, Itanium,
|
||||||
|
LoongArch, m68k, m88k, MicroBlaze, MIPS, Nios, OpenRISC, POWER, PowerPC,
|
||||||
|
RISC-V, s390, SH4, SPARC, Tilera, VAX, x86, Xtensa, z/arch
|
61
src/dependencies/curl-8.8.0/docs/INTERNALS.md
Normal file
61
src/dependencies/curl-8.8.0/docs/INTERNALS.md
Normal file
|
@ -0,0 +1,61 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
# curl internals
|
||||||
|
|
||||||
|
The canonical libcurl internals documentation is now in the [everything
|
||||||
|
curl](https://everything.curl.dev/internals) book. This file lists supported
|
||||||
|
versions of libs and build tools.
|
||||||
|
|
||||||
|
## Portability
|
||||||
|
|
||||||
|
We write curl and libcurl to compile with C89 compilers on 32-bit and up
|
||||||
|
machines. Most of libcurl assumes more or less POSIX compliance but that is
|
||||||
|
not a requirement.
|
||||||
|
|
||||||
|
We write libcurl to build and work with lots of third party tools, and we
|
||||||
|
want it to remain functional and buildable with these and later versions
|
||||||
|
(older versions may still work but is not what we work hard to maintain):
|
||||||
|
|
||||||
|
## Dependencies
|
||||||
|
|
||||||
|
We aim to support these or later versions.
|
||||||
|
|
||||||
|
- OpenSSL 0.9.7
|
||||||
|
- GnuTLS 3.1.10
|
||||||
|
- zlib 1.1.4
|
||||||
|
- libssh2 1.0
|
||||||
|
- c-ares 1.16.0
|
||||||
|
- libidn2 2.0.0
|
||||||
|
- wolfSSL 2.0.0
|
||||||
|
- OpenLDAP 2.0
|
||||||
|
- MIT Kerberos 1.2.4
|
||||||
|
- Heimdal ?
|
||||||
|
- nghttp2 1.15.0
|
||||||
|
- WinSock 2.2 (on Windows 95+ and Windows CE .NET 4.1+)
|
||||||
|
|
||||||
|
## Build tools
|
||||||
|
|
||||||
|
When writing code (mostly for generating stuff included in release tarballs)
|
||||||
|
we use a few "build tools" and we make sure that we remain functional with
|
||||||
|
these versions:
|
||||||
|
|
||||||
|
- GNU Libtool 1.4.2
|
||||||
|
- GNU Autoconf 2.59
|
||||||
|
- GNU Automake 1.7
|
||||||
|
- GNU M4 1.4
|
||||||
|
- perl 5.6
|
||||||
|
- roffit 0.5
|
||||||
|
- cmake 3.7
|
||||||
|
|
||||||
|
Library Symbols
|
||||||
|
===============
|
||||||
|
|
||||||
|
All symbols used internally in libcurl must use a `Curl_` prefix if they are
|
||||||
|
used in more than a single file. Single-file symbols must be made static.
|
||||||
|
Public ("exported") symbols must use a `curl_` prefix. Public API functions
|
||||||
|
are marked with `CURL_EXTERN` in the public header files so that all others
|
||||||
|
can be hidden on platforms where this is possible.
|
133
src/dependencies/curl-8.8.0/docs/IPFS.md
Normal file
133
src/dependencies/curl-8.8.0/docs/IPFS.md
Normal file
|
@ -0,0 +1,133 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
# IPFS
|
||||||
|
For an overview about IPFS, visit the [IPFS project site](https://ipfs.tech/).
|
||||||
|
|
||||||
|
In IPFS there are two protocols. IPFS and IPNS (their workings are explained in detail [here](https://docs.ipfs.tech/concepts/)). The ideal way to access data on the IPFS network is through those protocols. For example to access the Big Buck Bunny video the ideal way to access it is like: `ipfs://bafybeigagd5nmnn2iys2f3doro7ydrevyr2mzarwidgadawmamiteydbzi`
|
||||||
|
|
||||||
|
## IPFS Gateways
|
||||||
|
|
||||||
|
IPFS Gateway acts as a bridge between traditional HTTP clients and IPFS.
|
||||||
|
IPFS Gateway specifications of HTTP semantics can be found [here](https://specs.ipfs.tech/http-gateways/).
|
||||||
|
|
||||||
|
### Deserialized responses
|
||||||
|
|
||||||
|
By default, a gateway acts as a bridge between traditional HTTP clients and IPFS and performs necessary hash verification and deserialization. Through such gateway, users can download files, directories, and other content-addressed data stored with IPFS or IPNS as if they were stored in a traditional web server.
|
||||||
|
|
||||||
|
### Verifiable responses
|
||||||
|
|
||||||
|
By explicitly requesting [application/vnd.ipld.raw](https://www.iana.org/assignments/media-types/application/vnd.ipld.raw) or [application/vnd.ipld.car](https://www.iana.org/assignments/media-types/application/vnd.ipld.car) responses, by means defined in [Trustless Gateway Specification](https://specs.ipfs.tech/http-gateways/trustless-gateway/), the user is able to fetch raw content-addressed data and [perform hash verification themselves](https://docs.ipfs.tech/reference/http/gateway/#trustless-verifiable-retrieval).
|
||||||
|
|
||||||
|
This enables users to use untrusted, public gateways without worrying they might return invalid/malicious bytes.
|
||||||
|
|
||||||
|
## IPFS and IPNS protocol handling
|
||||||
|
|
||||||
|
There are various ways to access data from the IPFS network. One such way is
|
||||||
|
through the concept of public
|
||||||
|
"[gateways](https://docs.ipfs.tech/concepts/ipfs-gateway/#overview)". The
|
||||||
|
short version is that entities can offer gateway services. An example here
|
||||||
|
that is hosted by Protocol Labs (who also makes IPFS) is `dweb.link` and
|
||||||
|
`ipfs.io`. Both sites expose gateway functionality. Getting a file through
|
||||||
|
`ipfs.io` looks like this:
|
||||||
|
`https://ipfs.io/ipfs/bafybeigagd5nmnn2iys2f3doro7ydrevyr2mzarwidgadawmamiteydbzi`
|
||||||
|
|
||||||
|
If you were to be [running your own IPFS
|
||||||
|
node](https://docs.ipfs.tech/how-to/command-line-quick-start/) then you, by
|
||||||
|
default, also have a [local gateway](https://specs.ipfs.tech/http-gateways/)
|
||||||
|
running. In its default configuration the earlier example would then also work
|
||||||
|
in this link:
|
||||||
|
|
||||||
|
`http://127.0.0.1:8080/ipfs/bafybeigagd5nmnn2iys2f3doro7ydrevyr2mzarwidgadawmamiteydbzi`
|
||||||
|
|
||||||
|
## cURL handling of the IPFS protocols
|
||||||
|
|
||||||
|
The IPFS integration in cURL hides this gateway logic for you. Instead of
|
||||||
|
providing a full URL to a file on IPFS like this:
|
||||||
|
|
||||||
|
```
|
||||||
|
curl http://127.0.0.1:8080/ipfs/bafybeigagd5nmnn2iys2f3doro7ydrevyr2mzarwidgadawmamiteydbzi
|
||||||
|
```
|
||||||
|
|
||||||
|
You can provide it with the IPFS protocol instead:
|
||||||
|
```
|
||||||
|
curl ipfs://bafybeigagd5nmnn2iys2f3doro7ydrevyr2mzarwidgadawmamiteydbzi
|
||||||
|
```
|
||||||
|
|
||||||
|
With the IPFS protocol way of asking a file, cURL still needs to know the
|
||||||
|
gateway. curl essentially just rewrites the IPFS based URL to a gateway URL.
|
||||||
|
|
||||||
|
### IPFS_GATEWAY environment variable
|
||||||
|
|
||||||
|
If the `IPFS_GATEWAY` environment variable is found, its value is used as
|
||||||
|
gateway.
|
||||||
|
|
||||||
|
### Automatic gateway detection
|
||||||
|
|
||||||
|
When you provide no additional details to cURL then it:
|
||||||
|
|
||||||
|
1. First looks for the `IPFS_GATEWAY` environment variable and use that if it
|
||||||
|
is set.
|
||||||
|
2. Looks for the file: `~/.ipfs/gateway`. If it can find that file then it
|
||||||
|
means that you have a local gateway running and that file contains the URL
|
||||||
|
to your local gateway.
|
||||||
|
|
||||||
|
If cURL fails, you are presented with an error message and a link to this page
|
||||||
|
to the option most applicable to solving the issue.
|
||||||
|
|
||||||
|
### `--ipfs-gateway` argument
|
||||||
|
|
||||||
|
You can also provide a `--ipfs-gateway` argument to cURL. This overrules any
|
||||||
|
other gateway setting. curl does not fallback to the other options if the
|
||||||
|
provided gateway did not work.
|
||||||
|
|
||||||
|
## Gateway redirects
|
||||||
|
|
||||||
|
A gateway could redirect to another place. For example, `dweb.link` redirects
|
||||||
|
[path based](https://docs.ipfs.tech/how-to/address-ipfs-on-web/#path-gateway)
|
||||||
|
requests to [subdomain
|
||||||
|
based](https://docs.ipfs.tech/how-to/address-ipfs-on-web/#subdomain-gateway)
|
||||||
|
ones. A request using:
|
||||||
|
|
||||||
|
curl ipfs://bafybeigagd5nmnn2iys2f3doro7ydrevyr2mzarwidgadawmamiteydbzi --ipfs-gateway https://dweb.link
|
||||||
|
|
||||||
|
Which would be translated to:
|
||||||
|
|
||||||
|
https://dweb.link/ipfs/bafybeigagd5nmnn2iys2f3doro7ydrevyr2mzarwidgadawmamiteydbzi
|
||||||
|
|
||||||
|
redirects to:
|
||||||
|
|
||||||
|
https://bafybeigagd5nmnn2iys2f3doro7ydrevyr2mzarwidgadawmamiteydbzi.ipfs.dweb.link
|
||||||
|
|
||||||
|
If you trust this behavior from your gateway of choice then passing the `-L`
|
||||||
|
option follows the redirect.
|
||||||
|
|
||||||
|
## Error messages and hints
|
||||||
|
|
||||||
|
Depending on the arguments, cURL could present the user with an error.
|
||||||
|
|
||||||
|
### Gateway file and environment variable
|
||||||
|
|
||||||
|
cURL tried to look for the file: `~/.ipfs/gateway` but could not find it. It
|
||||||
|
also tried to look for the `IPFS_GATEWAY` environment variable but could not
|
||||||
|
find that either. This happens when no extra arguments are passed to cURL and
|
||||||
|
letting it try to figure it out [automatically](#automatic-gateway-detection).
|
||||||
|
|
||||||
|
Any IPFS implementation that has gateway support should expose its URL in
|
||||||
|
`~/.ipfs/gateway`. If you are already running a gateway, make sure it exposes
|
||||||
|
the file where cURL expects to find it.
|
||||||
|
|
||||||
|
Alternatively you could set the `IPFS_GATEWAY` environment variable or pass
|
||||||
|
the `--ipfs-gateway` flag to the cURL command.
|
||||||
|
|
||||||
|
### Malformed gateway URL
|
||||||
|
|
||||||
|
The command executed evaluates in an invalid URL. This could be anywhere in
|
||||||
|
the URL, but a likely point is a wrong gateway URL.
|
||||||
|
|
||||||
|
Inspect the URL set via the `IPFS_GATEWAY` environment variable or passed with
|
||||||
|
the `--ipfs-gateway` flag. Alternatively opt to go for the
|
||||||
|
[automatic](#automatic-gateway-detection) gateway detection.
|
658
src/dependencies/curl-8.8.0/docs/KNOWN_BUGS
Normal file
658
src/dependencies/curl-8.8.0/docs/KNOWN_BUGS
Normal file
|
@ -0,0 +1,658 @@
|
||||||
|
_ _ ____ _
|
||||||
|
___| | | | _ \| |
|
||||||
|
/ __| | | | |_) | |
|
||||||
|
| (__| |_| | _ <| |___
|
||||||
|
\___|\___/|_| \_\_____|
|
||||||
|
|
||||||
|
Known Bugs
|
||||||
|
|
||||||
|
These are problems and bugs known to exist at the time of this release. Feel
|
||||||
|
free to join in and help us correct one or more of these. Also be sure to
|
||||||
|
check the changelog of the current development status, as one or more of these
|
||||||
|
problems may have been fixed or changed somewhat since this was written.
|
||||||
|
|
||||||
|
1. HTTP
|
||||||
|
1.2 hyper is slow
|
||||||
|
1.5 Expect-100 meets 417
|
||||||
|
|
||||||
|
2. TLS
|
||||||
|
2.1 IMAPS connection fails with rustls error
|
||||||
|
2.3 Unable to use PKCS12 certificate with Secure Transport
|
||||||
|
2.4 Secure Transport will not import PKCS#12 client certificates without a password
|
||||||
|
2.5 Client cert handling with Issuer DN differs between backends
|
||||||
|
2.7 Client cert (MTLS) issues with Schannel
|
||||||
|
2.11 Schannel TLS 1.2 handshake bug in old Windows versions
|
||||||
|
2.13 CURLOPT_CERTINFO results in CURLE_OUT_OF_MEMORY with Schannel
|
||||||
|
|
||||||
|
3. Email protocols
|
||||||
|
3.1 IMAP SEARCH ALL truncated response
|
||||||
|
3.2 No disconnect command
|
||||||
|
3.3 POP3 expects "CRLF.CRLF" eob for some single-line responses
|
||||||
|
3.4 AUTH PLAIN for SMTP is not working on all servers
|
||||||
|
3.5 APOP authentication fails on POP3
|
||||||
|
3.6 POP3 issue when reading small chunks
|
||||||
|
|
||||||
|
4. Command line
|
||||||
|
|
||||||
|
5. Build and portability issues
|
||||||
|
5.1 OS400 port requires deprecated IBM library
|
||||||
|
5.2 curl-config --libs contains private details
|
||||||
|
5.3 building for old macOS fails with gcc
|
||||||
|
5.5 cannot handle Unicode arguments in non-Unicode builds on Windows
|
||||||
|
5.6 cygwin: make install installs curl-config.1 twice
|
||||||
|
5.9 Utilize Requires.private directives in libcurl.pc
|
||||||
|
5.11 configure --with-gssapi with Heimdal is ignored on macOS
|
||||||
|
5.12 flaky CI builds
|
||||||
|
5.13 long paths are not fully supported on Windows
|
||||||
|
5.14 Windows Unicode builds use homedir in current locale
|
||||||
|
5.15 Unicode on Windows
|
||||||
|
|
||||||
|
6. Authentication
|
||||||
|
6.1 NTLM authentication and unicode
|
||||||
|
6.2 MIT Kerberos for Windows build
|
||||||
|
6.3 NTLM in system context uses wrong name
|
||||||
|
6.5 NTLM does not support password with § character
|
||||||
|
6.6 libcurl can fail to try alternatives with --proxy-any
|
||||||
|
6.7 Do not clear digest for single realm
|
||||||
|
6.9 SHA-256 digest not supported in Windows SSPI builds
|
||||||
|
6.10 curl never completes Negotiate over HTTP
|
||||||
|
6.11 Negotiate on Windows fails
|
||||||
|
6.12 cannot use Secure Transport with Crypto Token Kit
|
||||||
|
6.13 Negotiate against Hadoop HDFS
|
||||||
|
|
||||||
|
7. FTP
|
||||||
|
7.1 FTP upload fails if remembered dir is deleted
|
||||||
|
7.2 Implicit FTPS upload timeout
|
||||||
|
7.3 FTP with NOBODY and FAILONERROR
|
||||||
|
7.4 FTP with ACCT
|
||||||
|
7.5 FTPS upload, FileZilla, GnuTLS and close_notify
|
||||||
|
7.11 FTPS upload data loss with TLS 1.3
|
||||||
|
7.12 FTPS directory listing hangs on Windows with Schannel
|
||||||
|
|
||||||
|
9. SFTP and SCP
|
||||||
|
9.1 SFTP does not do CURLOPT_POSTQUOTE correct
|
||||||
|
9.2 wolfssh: publickey auth does not work
|
||||||
|
9.3 Remote recursive folder creation with SFTP
|
||||||
|
9.4 libssh blocking and infinite loop problem
|
||||||
|
9.5 cygwin: "WARNING: UNPROTECTED PRIVATE KEY FILE!"
|
||||||
|
|
||||||
|
10. SOCKS
|
||||||
|
10.3 FTPS over SOCKS
|
||||||
|
|
||||||
|
11. Internals
|
||||||
|
11.1 gssapi library name + version is missing in curl_version_info()
|
||||||
|
11.2 error buffer not set if connection to multiple addresses fails
|
||||||
|
11.4 HTTP test server 'connection-monitor' problems
|
||||||
|
11.5 Connection information when using TCP Fast Open
|
||||||
|
|
||||||
|
12. LDAP
|
||||||
|
12.1 OpenLDAP hangs after returning results
|
||||||
|
12.2 LDAP on Windows does authentication wrong?
|
||||||
|
12.3 LDAP on Windows does not work
|
||||||
|
12.4 LDAPS requests to ActiveDirectory server hang
|
||||||
|
|
||||||
|
13. TCP/IP
|
||||||
|
13.2 Trying local ports fails on Windows
|
||||||
|
|
||||||
|
15. CMake
|
||||||
|
15.1 cmake outputs: no version information available
|
||||||
|
15.2 support build with GnuTLS
|
||||||
|
15.3 unusable tool_hugehelp.c with MinGW
|
||||||
|
15.6 uses -lpthread instead of Threads::Threads
|
||||||
|
15.7 generated .pc file contains strange entries
|
||||||
|
15.11 ExternalProject_Add does not set CURL_CA_PATH
|
||||||
|
15.13 CMake build with MIT Kerberos does not work
|
||||||
|
|
||||||
|
16. aws-sigv4
|
||||||
|
16.1 aws-sigv4 does not sign requests with * correctly
|
||||||
|
16.6 aws-sigv4 does not behave well with AWS VPC Lattice
|
||||||
|
|
||||||
|
17. HTTP/2
|
||||||
|
17.1 HTTP/2 prior knowledge over proxy
|
||||||
|
17.2 HTTP/2 frames while in the connection pool kill reuse
|
||||||
|
17.3 ENHANCE_YOUR_CALM causes infinite retries
|
||||||
|
|
||||||
|
18. HTTP/3
|
||||||
|
18.1 connection migration does not work
|
||||||
|
|
||||||
|
19. RTSP
|
||||||
|
19.1 Some methods do not support response bodies
|
||||||
|
|
||||||
|
==============================================================================
|
||||||
|
|
||||||
|
1. HTTP
|
||||||
|
|
||||||
|
1.2 hyper is slow
|
||||||
|
|
||||||
|
When curl is built to use hyper for HTTP, it is unnecessary slow.
|
||||||
|
|
||||||
|
https://github.com/curl/curl/issues/11203
|
||||||
|
|
||||||
|
1.5 Expect-100 meets 417
|
||||||
|
|
||||||
|
If an upload using Expect: 100-continue receives an HTTP 417 response, it
|
||||||
|
ought to be automatically resent without the Expect:. A workaround is for
|
||||||
|
the client application to redo the transfer after disabling Expect:.
|
||||||
|
https://curl.se/mail/archive-2008-02/0043.html
|
||||||
|
|
||||||
|
2. TLS
|
||||||
|
|
||||||
|
2.1 IMAPS connection fails with rustls error
|
||||||
|
|
||||||
|
https://github.com/curl/curl/issues/10457
|
||||||
|
|
||||||
|
2.3 Unable to use PKCS12 certificate with Secure Transport
|
||||||
|
|
||||||
|
See https://github.com/curl/curl/issues/5403
|
||||||
|
|
||||||
|
2.4 Secure Transport will not import PKCS#12 client certificates without a password
|
||||||
|
|
||||||
|
libcurl calls SecPKCS12Import with the PKCS#12 client certificate, but that
|
||||||
|
function rejects certificates that do not have a password.
|
||||||
|
https://github.com/curl/curl/issues/1308
|
||||||
|
|
||||||
|
2.5 Client cert handling with Issuer DN differs between backends
|
||||||
|
|
||||||
|
When the specified client certificate does not match any of the
|
||||||
|
server-specified DNs, the OpenSSL and GnuTLS backends behave differently.
|
||||||
|
The github discussion may contain a solution.
|
||||||
|
|
||||||
|
See https://github.com/curl/curl/issues/1411
|
||||||
|
|
||||||
|
2.7 Client cert (MTLS) issues with Schannel
|
||||||
|
|
||||||
|
See https://github.com/curl/curl/issues/3145
|
||||||
|
|
||||||
|
2.11 Schannel TLS 1.2 handshake bug in old Windows versions
|
||||||
|
|
||||||
|
In old versions of Windows such as 7 and 8.1 the Schannel TLS 1.2 handshake
|
||||||
|
implementation likely has a bug that can rarely cause the key exchange to
|
||||||
|
fail, resulting in error SEC_E_BUFFER_TOO_SMALL or SEC_E_MESSAGE_ALTERED.
|
||||||
|
|
||||||
|
https://github.com/curl/curl/issues/5488
|
||||||
|
|
||||||
|
2.13 CURLOPT_CERTINFO results in CURLE_OUT_OF_MEMORY with Schannel
|
||||||
|
|
||||||
|
https://github.com/curl/curl/issues/8741
|
||||||
|
|
||||||
|
3. Email protocols
|
||||||
|
|
||||||
|
3.1 IMAP SEARCH ALL truncated response
|
||||||
|
|
||||||
|
IMAP "SEARCH ALL" truncates output on large boxes. "A quick search of the
|
||||||
|
code reveals that pingpong.c contains some truncation code, at line 408, when
|
||||||
|
it deems the server response to be too large truncating it to 40 characters"
|
||||||
|
https://curl.se/bug/view.cgi?id=1366
|
||||||
|
|
||||||
|
3.2 No disconnect command
|
||||||
|
|
||||||
|
The disconnect commands (LOGOUT and QUIT) may not be sent by IMAP, POP3 and
|
||||||
|
SMTP if a failure occurs during the authentication phase of a connection.
|
||||||
|
|
||||||
|
3.3 POP3 expects "CRLF.CRLF" eob for some single-line responses
|
||||||
|
|
||||||
|
You have to tell libcurl not to expect a body, when dealing with one line
|
||||||
|
response commands. Please see the POP3 examples and test cases which show
|
||||||
|
this for the NOOP and DELE commands. https://curl.se/bug/?i=740
|
||||||
|
|
||||||
|
3.4 AUTH PLAIN for SMTP is not working on all servers
|
||||||
|
|
||||||
|
Specifying "--login-options AUTH=PLAIN" on the command line does not seem to
|
||||||
|
work correctly.
|
||||||
|
|
||||||
|
See https://github.com/curl/curl/issues/4080
|
||||||
|
|
||||||
|
3.5 APOP authentication fails on POP3
|
||||||
|
|
||||||
|
See https://github.com/curl/curl/issues/10073
|
||||||
|
|
||||||
|
3.6 POP3 issue when reading small chunks
|
||||||
|
|
||||||
|
CURL_DBG_SOCK_RMAX=4 ./runtests.pl -v 982
|
||||||
|
|
||||||
|
See https://github.com/curl/curl/issues/12063
|
||||||
|
|
||||||
|
4. Command line
|
||||||
|
|
||||||
|
5. Build and portability issues
|
||||||
|
|
||||||
|
5.1 OS400 port requires deprecated IBM library
|
||||||
|
|
||||||
|
curl for OS400 requires QADRT to build, which provides ASCII wrappers for
|
||||||
|
libc/POSIX functions in the ILE, but IBM no longer supports or even offers
|
||||||
|
this library to download.
|
||||||
|
|
||||||
|
See https://github.com/curl/curl/issues/5176
|
||||||
|
|
||||||
|
5.2 curl-config --libs contains private details
|
||||||
|
|
||||||
|
"curl-config --libs" will include details set in LDFLAGS when configure is
|
||||||
|
run that might be needed only for building libcurl. Further, curl-config
|
||||||
|
--cflags suffers from the same effects with CFLAGS/CPPFLAGS.
|
||||||
|
|
||||||
|
5.3 building for old macOS fails with gcc
|
||||||
|
|
||||||
|
Building curl for certain old macOS versions fails when gcc is used. We
|
||||||
|
command using clang in those cases.
|
||||||
|
|
||||||
|
See https://github.com/curl/curl/issues/11441
|
||||||
|
|
||||||
|
5.5 cannot handle Unicode arguments in non-Unicode builds on Windows
|
||||||
|
|
||||||
|
If a URL or filename cannot be encoded using the user's current codepage then
|
||||||
|
it can only be encoded properly in the Unicode character set. Windows uses
|
||||||
|
UTF-16 encoding for Unicode and stores it in wide characters, however curl
|
||||||
|
and libcurl are not equipped for that at the moment except when built with
|
||||||
|
_UNICODE and UNICODE defined. And, except for Cygwin, Windows cannot use UTF-8
|
||||||
|
as a locale.
|
||||||
|
|
||||||
|
https://curl.se/bug/?i=345
|
||||||
|
https://curl.se/bug/?i=731
|
||||||
|
https://curl.se/bug/?i=3747
|
||||||
|
|
||||||
|
5.6 cygwin: make install installs curl-config.1 twice
|
||||||
|
|
||||||
|
https://github.com/curl/curl/issues/8839
|
||||||
|
|
||||||
|
5.9 Utilize Requires.private directives in libcurl.pc
|
||||||
|
|
||||||
|
https://github.com/curl/curl/issues/864
|
||||||
|
|
||||||
|
5.11 configure --with-gssapi with Heimdal is ignored on macOS
|
||||||
|
|
||||||
|
... unless you also pass --with-gssapi-libs
|
||||||
|
|
||||||
|
https://github.com/curl/curl/issues/3841
|
||||||
|
|
||||||
|
5.12 flaky CI builds
|
||||||
|
|
||||||
|
We run many CI builds for each commit and PR on github, and especially a
|
||||||
|
number of the Windows builds are flaky. This means that we rarely get all CI
|
||||||
|
builds go green and complete without errors. This is unfortunate as it makes
|
||||||
|
us sometimes miss actual build problems and it is surprising to newcomers to
|
||||||
|
the project who (rightfully) do not expect this.
|
||||||
|
|
||||||
|
See https://github.com/curl/curl/issues/6972
|
||||||
|
|
||||||
|
5.13 long paths are not fully supported on Windows
|
||||||
|
|
||||||
|
curl on Windows cannot access long paths (paths longer than 260 characters).
|
||||||
|
However, as a workaround, the Windows path prefix \\?\ which disables all path
|
||||||
|
interpretation may work to allow curl to access the path. For example:
|
||||||
|
\\?\c:\longpath.
|
||||||
|
|
||||||
|
See https://github.com/curl/curl/issues/8361
|
||||||
|
|
||||||
|
5.14 Windows Unicode builds use homedir in current locale
|
||||||
|
|
||||||
|
The Windows Unicode builds of curl use the current locale, but expect Unicode
|
||||||
|
UTF-8 encoded paths for internal use such as open, access and stat. The user's
|
||||||
|
home directory is retrieved via curl_getenv in the current locale and not as
|
||||||
|
UTF-8 encoded Unicode.
|
||||||
|
|
||||||
|
See https://github.com/curl/curl/pull/7252 and
|
||||||
|
https://github.com/curl/curl/pull/7281
|
||||||
|
|
||||||
|
5.15 Unicode on Windows
|
||||||
|
|
||||||
|
Passing in a unicode filename with -o:
|
||||||
|
|
||||||
|
https://github.com/curl/curl/issues/11461
|
||||||
|
|
||||||
|
Passing in unicode character with -d:
|
||||||
|
|
||||||
|
https://github.com/curl/curl/issues/12231
|
||||||
|
|
||||||
|
6. Authentication
|
||||||
|
|
||||||
|
6.1 NTLM authentication and unicode
|
||||||
|
|
||||||
|
NTLM authentication involving unicode user name or password only works
|
||||||
|
properly if built with UNICODE defined together with the Schannel
|
||||||
|
backend. The original problem was mentioned in:
|
||||||
|
https://curl.se/mail/lib-2009-10/0024.html
|
||||||
|
https://curl.se/bug/view.cgi?id=896
|
||||||
|
|
||||||
|
The Schannel version verified to work as mentioned in
|
||||||
|
https://curl.se/mail/lib-2012-07/0073.html
|
||||||
|
|
||||||
|
6.2 MIT Kerberos for Windows build
|
||||||
|
|
||||||
|
libcurl fails to build with MIT Kerberos for Windows (KfW) due to KfW's
|
||||||
|
library header files exporting symbols/macros that should be kept private to
|
||||||
|
the KfW library. See ticket #5601 at https://krbdev.mit.edu/rt/
|
||||||
|
|
||||||
|
6.3 NTLM in system context uses wrong name
|
||||||
|
|
||||||
|
NTLM authentication using SSPI (on Windows) when (lib)curl is running in
|
||||||
|
"system context" will make it use wrong(?) user name - at least when compared
|
||||||
|
to what winhttp does. See https://curl.se/bug/view.cgi?id=535
|
||||||
|
|
||||||
|
6.5 NTLM does not support password with § character
|
||||||
|
|
||||||
|
https://github.com/curl/curl/issues/2120
|
||||||
|
|
||||||
|
6.6 libcurl can fail to try alternatives with --proxy-any
|
||||||
|
|
||||||
|
When connecting via a proxy using --proxy-any, a failure to establish an
|
||||||
|
authentication will cause libcurl to abort trying other options if the
|
||||||
|
failed method has a higher preference than the alternatives. As an example,
|
||||||
|
--proxy-any against a proxy which advertise Negotiate and NTLM, but which
|
||||||
|
fails to set up Kerberos authentication will not proceed to try authentication
|
||||||
|
using NTLM.
|
||||||
|
|
||||||
|
https://github.com/curl/curl/issues/876
|
||||||
|
|
||||||
|
6.7 Do not clear digest for single realm
|
||||||
|
|
||||||
|
https://github.com/curl/curl/issues/3267
|
||||||
|
|
||||||
|
6.9 SHA-256 digest not supported in Windows SSPI builds
|
||||||
|
|
||||||
|
Windows builds of curl that have SSPI enabled use the native Windows API calls
|
||||||
|
to create authentication strings. The call to InitializeSecurityContext fails
|
||||||
|
with SEC_E_QOP_NOT_SUPPORTED which causes curl to fail with CURLE_AUTH_ERROR.
|
||||||
|
|
||||||
|
Microsoft does not document supported digest algorithms and that SEC_E error
|
||||||
|
code is not a documented error for InitializeSecurityContext (digest).
|
||||||
|
|
||||||
|
https://github.com/curl/curl/issues/6302
|
||||||
|
|
||||||
|
6.10 curl never completes Negotiate over HTTP
|
||||||
|
|
||||||
|
Apparently it is not working correctly...?
|
||||||
|
|
||||||
|
See https://github.com/curl/curl/issues/5235
|
||||||
|
|
||||||
|
6.11 Negotiate on Windows fails
|
||||||
|
|
||||||
|
When using --negotiate (or NTLM) with curl on Windows, SSL/TLS handshake
|
||||||
|
fails despite having a valid kerberos ticket cached. Works without any issue
|
||||||
|
in Unix/Linux.
|
||||||
|
|
||||||
|
https://github.com/curl/curl/issues/5881
|
||||||
|
|
||||||
|
6.12 cannot use Secure Transport with Crypto Token Kit
|
||||||
|
|
||||||
|
https://github.com/curl/curl/issues/7048
|
||||||
|
|
||||||
|
6.13 Negotiate authentication against Hadoop HDFS
|
||||||
|
|
||||||
|
https://github.com/curl/curl/issues/8264
|
||||||
|
|
||||||
|
7. FTP
|
||||||
|
|
||||||
|
7.1 FTP upload fails if remembered dir is deleted
|
||||||
|
|
||||||
|
curl's FTP code assumes that the directory it entered in a previous transfer
|
||||||
|
still exists when it comes back to do a second transfer, and does not respond
|
||||||
|
well if it was indeed deleted in the mean time.
|
||||||
|
|
||||||
|
https://github.com/curl/curl/issues/12181
|
||||||
|
|
||||||
|
7.2 Implicit FTPS upload timeout
|
||||||
|
|
||||||
|
https://github.com/curl/curl/issues/11720
|
||||||
|
|
||||||
|
7.3 FTP with NOBODY and FAILONERROR
|
||||||
|
|
||||||
|
It seems sensible to be able to use CURLOPT_NOBODY and CURLOPT_FAILONERROR
|
||||||
|
with FTP to detect if a file exists or not, but it is not working:
|
||||||
|
https://curl.se/mail/lib-2008-07/0295.html
|
||||||
|
|
||||||
|
7.4 FTP with ACCT
|
||||||
|
|
||||||
|
When doing an operation over FTP that requires the ACCT command (but not when
|
||||||
|
logging in), the operation will fail since libcurl does not detect this and
|
||||||
|
thus fails to issue the correct command:
|
||||||
|
https://curl.se/bug/view.cgi?id=635
|
||||||
|
|
||||||
|
7.5 FTPS upload, FileZilla, GnuTLS and close_notify
|
||||||
|
|
||||||
|
An issue where curl does not send the TLS alert close_notify, which triggers
|
||||||
|
the wrath of GnuTLS in FileZilla server, and a FTP reply 426 ECONNABORTED.
|
||||||
|
|
||||||
|
https://github.com/curl/curl/issues/11383
|
||||||
|
|
||||||
|
7.11 FTPS upload data loss with TLS 1.3
|
||||||
|
|
||||||
|
During FTPS upload curl does not attempt to read TLS handshake messages sent
|
||||||
|
after the initial handshake. OpenSSL servers running TLS 1.3 may send such a
|
||||||
|
message. When curl closes the upload connection if unread data has been
|
||||||
|
received (such as a TLS handshake message) then the TCP protocol sends an
|
||||||
|
RST to the server, which may cause the server to discard or truncate the
|
||||||
|
upload if it has not read all sent data yet, and then return an error to curl
|
||||||
|
on the control channel connection.
|
||||||
|
|
||||||
|
Since 7.78.0 this is mostly fixed. curl will do a single read before closing
|
||||||
|
TLS connections (which causes the TLS library to read handshake messages),
|
||||||
|
however there is still possibility of an RST if more messages need to be read
|
||||||
|
or a message arrives after the read but before close (network race condition).
|
||||||
|
|
||||||
|
https://github.com/curl/curl/issues/6149
|
||||||
|
|
||||||
|
7.12 FTPS server compatibility on Windows with Schannel
|
||||||
|
|
||||||
|
FTPS is not widely used with the Schannel TLS backend and so there may be more
|
||||||
|
bugs compared to other TLS backends such as OpenSSL. In the past users have
|
||||||
|
reported hanging and failed connections. It's very likely some changes to curl
|
||||||
|
since then fixed the issues. None of the reported issues can be reproduced any
|
||||||
|
longer.
|
||||||
|
|
||||||
|
If you encounter an issue connecting to your server via FTPS with the latest
|
||||||
|
curl and Schannel then please search for open issues or file a new issue.
|
||||||
|
|
||||||
|
9. SFTP and SCP
|
||||||
|
|
||||||
|
9.1 SFTP does not do CURLOPT_POSTQUOTE correct
|
||||||
|
|
||||||
|
When libcurl sends CURLOPT_POSTQUOTE commands when connected to a SFTP server
|
||||||
|
using the multi interface, the commands are not being sent correctly and
|
||||||
|
instead the connection is "cancelled" (the operation is considered done)
|
||||||
|
prematurely. There is a half-baked (busy-looping) patch provided in the bug
|
||||||
|
report but it cannot be accepted as-is. See
|
||||||
|
https://curl.se/bug/view.cgi?id=748
|
||||||
|
|
||||||
|
9.2 wolfssh: publickey auth does not work
|
||||||
|
|
||||||
|
When building curl to use the wolfSSH backend for SFTP, the publickey
|
||||||
|
authentication does not work. This is simply functionality not written for curl
|
||||||
|
yet, the necessary API for make this work is provided by wolfSSH.
|
||||||
|
|
||||||
|
See https://github.com/curl/curl/issues/4820
|
||||||
|
|
||||||
|
9.3 Remote recursive folder creation with SFTP
|
||||||
|
|
||||||
|
On this servers, the curl fails to create directories on the remote server
|
||||||
|
even when the CURLOPT_FTP_CREATE_MISSING_DIRS option is set.
|
||||||
|
|
||||||
|
See https://github.com/curl/curl/issues/5204
|
||||||
|
|
||||||
|
9.4 libssh blocking and infinite loop problem
|
||||||
|
|
||||||
|
In the SSH_SFTP_INIT state for libssh, the ssh session working mode is set to
|
||||||
|
blocking mode. If the network is suddenly disconnected during sftp
|
||||||
|
transmission, curl will be stuck, even if curl is configured with a timeout.
|
||||||
|
|
||||||
|
https://github.com/curl/curl/issues/8632
|
||||||
|
|
||||||
|
9.5 cygwin: "WARNING: UNPROTECTED PRIVATE KEY FILE!"
|
||||||
|
|
||||||
|
Running SCP and SFTP tests on cygwin makes this warning message appear.
|
||||||
|
|
||||||
|
https://github.com/curl/curl/issues/11244
|
||||||
|
|
||||||
|
10. SOCKS
|
||||||
|
|
||||||
|
10.3 FTPS over SOCKS
|
||||||
|
|
||||||
|
libcurl does not support FTPS over a SOCKS proxy.
|
||||||
|
|
||||||
|
|
||||||
|
11. Internals
|
||||||
|
|
||||||
|
11.1 gssapi library name + version is missing in curl_version_info()
|
||||||
|
|
||||||
|
The struct needs to be expanded and code added to store this info.
|
||||||
|
|
||||||
|
See https://github.com/curl/curl/issues/13492
|
||||||
|
|
||||||
|
11.2 error buffer not set if connection to multiple addresses fails
|
||||||
|
|
||||||
|
If you ask libcurl to resolve a hostname like example.com to IPv6 addresses
|
||||||
|
only. But you only have IPv4 connectivity. libcurl will correctly fail with
|
||||||
|
CURLE_COULDNT_CONNECT. But the error buffer set by CURLOPT_ERRORBUFFER
|
||||||
|
remains empty. Issue: https://github.com/curl/curl/issues/544
|
||||||
|
|
||||||
|
11.4 HTTP test server 'connection-monitor' problems
|
||||||
|
|
||||||
|
The 'connection-monitor' feature of the sws HTTP test server does not work
|
||||||
|
properly if some tests are run in unexpected order. Like 1509 and then 1525.
|
||||||
|
|
||||||
|
See https://github.com/curl/curl/issues/868
|
||||||
|
|
||||||
|
11.5 Connection information when using TCP Fast Open
|
||||||
|
|
||||||
|
CURLINFO_LOCAL_PORT (and possibly a few other) fails when TCP Fast Open is
|
||||||
|
enabled.
|
||||||
|
|
||||||
|
See https://github.com/curl/curl/issues/1332 and
|
||||||
|
https://github.com/curl/curl/issues/4296
|
||||||
|
|
||||||
|
12. LDAP
|
||||||
|
|
||||||
|
12.1 OpenLDAP hangs after returning results
|
||||||
|
|
||||||
|
By configuration defaults, OpenLDAP automatically chase referrals on
|
||||||
|
secondary socket descriptors. The OpenLDAP backend is asynchronous and thus
|
||||||
|
should monitor all socket descriptors involved. Currently, these secondary
|
||||||
|
descriptors are not monitored, causing OpenLDAP library to never receive
|
||||||
|
data from them.
|
||||||
|
|
||||||
|
As a temporary workaround, disable referrals chasing by configuration.
|
||||||
|
|
||||||
|
The fix is not easy: proper automatic referrals chasing requires a
|
||||||
|
synchronous bind callback and monitoring an arbitrary number of socket
|
||||||
|
descriptors for a single easy handle (currently limited to 5).
|
||||||
|
|
||||||
|
Generic LDAP is synchronous: OK.
|
||||||
|
|
||||||
|
See https://github.com/curl/curl/issues/622 and
|
||||||
|
https://curl.se/mail/lib-2016-01/0101.html
|
||||||
|
|
||||||
|
12.2 LDAP on Windows does authentication wrong?
|
||||||
|
|
||||||
|
https://github.com/curl/curl/issues/3116
|
||||||
|
|
||||||
|
12.3 LDAP on Windows does not work
|
||||||
|
|
||||||
|
A simple curl command line getting "ldap://ldap.forumsys.com" returns an
|
||||||
|
error that says "no memory" !
|
||||||
|
|
||||||
|
https://github.com/curl/curl/issues/4261
|
||||||
|
|
||||||
|
12.4 LDAPS requests to ActiveDirectory server hang
|
||||||
|
|
||||||
|
https://github.com/curl/curl/issues/9580
|
||||||
|
|
||||||
|
13. TCP/IP
|
||||||
|
|
||||||
|
13.2 Trying local ports fails on Windows
|
||||||
|
|
||||||
|
This makes '--local-port [range]' to not work since curl cannot properly
|
||||||
|
detect if a port is already in use, so it will try the first port, use that and
|
||||||
|
then subsequently fail anyway if that was actually in use.
|
||||||
|
|
||||||
|
https://github.com/curl/curl/issues/8112
|
||||||
|
|
||||||
|
15. CMake
|
||||||
|
|
||||||
|
15.1 cmake outputs: no version information available
|
||||||
|
|
||||||
|
Something in the SONAME generation seems to be wrong in the cmake build.
|
||||||
|
|
||||||
|
https://github.com/curl/curl/issues/11158
|
||||||
|
|
||||||
|
15.2 support build with GnuTLS
|
||||||
|
|
||||||
|
15.3 unusable tool_hugehelp.c with MinGW
|
||||||
|
|
||||||
|
see https://github.com/curl/curl/issues/3125
|
||||||
|
|
||||||
|
15.6 uses -lpthread instead of Threads::Threads
|
||||||
|
|
||||||
|
See https://github.com/curl/curl/issues/6166
|
||||||
|
|
||||||
|
15.7 generated .pc file contains strange entries
|
||||||
|
|
||||||
|
The Libs.private field of the generated .pc file contains -lgcc -lgcc_s -lc
|
||||||
|
-lgcc -lgcc_s
|
||||||
|
|
||||||
|
See https://github.com/curl/curl/issues/6167
|
||||||
|
|
||||||
|
15.11 ExternalProject_Add does not set CURL_CA_PATH
|
||||||
|
|
||||||
|
CURL_CA_BUNDLE and CURL_CA_PATH are not set properly when cmake's
|
||||||
|
ExternalProject_Add is used to build curl as a dependency.
|
||||||
|
|
||||||
|
See https://github.com/curl/curl/issues/6313
|
||||||
|
|
||||||
|
15.13 CMake build with MIT Kerberos does not work
|
||||||
|
|
||||||
|
Minimum CMake version was bumped in curl 7.71.0 (#5358) Since CMake 3.2
|
||||||
|
try_compile started respecting the CMAKE_EXE_FLAGS. The code dealing with
|
||||||
|
MIT Kerberos detection sets few variables to potentially weird mix of space,
|
||||||
|
and ;-separated flags. It had to blow up at some point. All the CMake checks
|
||||||
|
that involve compilation are doomed from that point, the configured tree
|
||||||
|
cannot be built.
|
||||||
|
|
||||||
|
https://github.com/curl/curl/issues/6904
|
||||||
|
|
||||||
|
16. aws-sigv4
|
||||||
|
|
||||||
|
16.1 aws-sigv4 does not sign requests with * correctly
|
||||||
|
|
||||||
|
https://github.com/curl/curl/issues/7559
|
||||||
|
|
||||||
|
16.6 aws-sigv4 does not behave well with AWS VPC Lattice
|
||||||
|
|
||||||
|
https://github.com/curl/curl/issues/11007
|
||||||
|
|
||||||
|
17. HTTP/2
|
||||||
|
|
||||||
|
17.1 HTTP/2 prior knowledge over proxy
|
||||||
|
|
||||||
|
https://github.com/curl/curl/issues/12641
|
||||||
|
|
||||||
|
17.2 HTTP/2 frames while in the connection pool kill reuse
|
||||||
|
|
||||||
|
If the server sends HTTP/2 frames (like for example an HTTP/2 PING frame) to
|
||||||
|
curl while the connection is held in curl's connection pool, the socket will
|
||||||
|
be found readable when considered for reuse and that makes curl think it is
|
||||||
|
dead and then it will be closed and a new connection gets created instead.
|
||||||
|
|
||||||
|
This is *best* fixed by adding monitoring to connections while they are kept
|
||||||
|
in the pool so that pings can be responded to appropriately.
|
||||||
|
|
||||||
|
17.3 ENHANCE_YOUR_CALM causes infinite retries
|
||||||
|
|
||||||
|
Infinite retries with 2 parallel requests on one connection receiving GOAWAY
|
||||||
|
with ENHANCE_YOUR_CALM error code.
|
||||||
|
|
||||||
|
See https://github.com/curl/curl/issues/5119
|
||||||
|
|
||||||
|
18. HTTP/3
|
||||||
|
|
||||||
|
18.1 connection migration does not work
|
||||||
|
|
||||||
|
https://github.com/curl/curl/issues/7695
|
||||||
|
|
||||||
|
19. RTSP
|
||||||
|
|
||||||
|
19.1 Some methods do not support response bodies
|
||||||
|
|
||||||
|
The RTSP implementation is written to assume that a number of RTSP methods
|
||||||
|
will always get responses without bodies, even though there seems to be no
|
||||||
|
indication in the RFC that this is always the case.
|
||||||
|
|
||||||
|
https://github.com/curl/curl/issues/12414
|
258
src/dependencies/curl-8.8.0/docs/MAIL-ETIQUETTE.md
Normal file
258
src/dependencies/curl-8.8.0/docs/MAIL-ETIQUETTE.md
Normal file
|
@ -0,0 +1,258 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
# Mail etiquette
|
||||||
|
|
||||||
|
## About the lists
|
||||||
|
|
||||||
|
### Mailing Lists
|
||||||
|
|
||||||
|
The mailing lists we have are all listed and described on the [curl
|
||||||
|
website](https://curl.se/mail/).
|
||||||
|
|
||||||
|
Each mailing list is targeted to a specific set of users and subjects, please
|
||||||
|
use the one or the ones that suit you the most.
|
||||||
|
|
||||||
|
Each mailing list has hundreds up to thousands of readers, meaning that each
|
||||||
|
mail sent is received and read by a large number of people. People from
|
||||||
|
various cultures, regions, religions and continents.
|
||||||
|
|
||||||
|
### Netiquette
|
||||||
|
|
||||||
|
Netiquette is a common term for how to behave on the Internet. Of course, in
|
||||||
|
each particular group and subculture there are differences in what is
|
||||||
|
acceptable and what is considered good manners.
|
||||||
|
|
||||||
|
This document outlines what we in the curl project consider to be good
|
||||||
|
etiquette, and primarily this focus on how to behave on and how to use our
|
||||||
|
mailing lists.
|
||||||
|
|
||||||
|
### Do Not Mail a Single Individual
|
||||||
|
|
||||||
|
Many people send one question to one person. One person gets many mails, and
|
||||||
|
there is only one person who can give you a reply. The question may be
|
||||||
|
something that other people would also like to ask. These other people have no
|
||||||
|
way to read the reply, but to ask the one person the question. The one person
|
||||||
|
consequently gets overloaded with mail.
|
||||||
|
|
||||||
|
If you really want to contact an individual and perhaps pay for his or her
|
||||||
|
services, by all means go ahead, but if it is just another curl question, take
|
||||||
|
it to a suitable list instead.
|
||||||
|
|
||||||
|
### Subscription Required
|
||||||
|
|
||||||
|
All curl mailing lists require that you are subscribed to allow a mail to go
|
||||||
|
through to all the subscribers.
|
||||||
|
|
||||||
|
If you post without being subscribed (or from a different mail address than
|
||||||
|
the one you are subscribed with), your mail is simply silently discarded. You
|
||||||
|
have to subscribe first, then post.
|
||||||
|
|
||||||
|
The reason for this unfortunate and strict subscription policy is of course to
|
||||||
|
stop spam from pestering the lists.
|
||||||
|
|
||||||
|
### Moderation of new posters
|
||||||
|
|
||||||
|
Several of the curl mailing lists automatically make all posts from new
|
||||||
|
subscribers be moderated. After you have subscribed and sent your first mail
|
||||||
|
to a list, that mail is not let through to the list until a mailing list
|
||||||
|
administrator has verified that it is OK and permits it to get posted.
|
||||||
|
|
||||||
|
Once a first post has been made that proves the sender is actually talking
|
||||||
|
about curl-related subjects, the moderation "flag" is switched off and future
|
||||||
|
posts go through without being moderated.
|
||||||
|
|
||||||
|
The reason for this moderation policy is that we do suffer from spammers who
|
||||||
|
actually subscribe and send spam to our lists.
|
||||||
|
|
||||||
|
### Handling trolls and spam
|
||||||
|
|
||||||
|
Despite our good intentions and hard work to keep spam off the lists and to
|
||||||
|
maintain a friendly and positive atmosphere, there are times when spam and or
|
||||||
|
trolls get through.
|
||||||
|
|
||||||
|
Troll - "someone who posts inflammatory, extraneous, or off-topic messages in
|
||||||
|
an online community"
|
||||||
|
|
||||||
|
Spam - "use of electronic messaging systems to send unsolicited bulk messages"
|
||||||
|
|
||||||
|
No matter what, we NEVER EVER respond to trolls or spammers on the list. If
|
||||||
|
you believe the list admin should do something in particular, contact them
|
||||||
|
off-list. The subject is taken care of as much as possible to prevent repeated
|
||||||
|
offenses, but responding on the list to such messages never leads to anything
|
||||||
|
good and only puts the light even more on the offender: which was the entire
|
||||||
|
purpose of it getting sent to the list in the first place.
|
||||||
|
|
||||||
|
Do not feed the trolls.
|
||||||
|
|
||||||
|
### How to unsubscribe
|
||||||
|
|
||||||
|
You can unsubscribe the same way you subscribed in the first place. You go to
|
||||||
|
the page for the particular mailing list you are subscribed to and you enter
|
||||||
|
your email address and password and press the unsubscribe button.
|
||||||
|
|
||||||
|
Also, the instructions to unsubscribe are included in the headers of every
|
||||||
|
mail that is sent out to all curl related mailing lists and there is a footer
|
||||||
|
in each mail that links to the "admin" page on which you can unsubscribe and
|
||||||
|
change other options.
|
||||||
|
|
||||||
|
You NEVER EVER email the mailing list requesting someone else to take you off
|
||||||
|
the list.
|
||||||
|
|
||||||
|
### I posted, now what?
|
||||||
|
|
||||||
|
If you are not subscribed with the same email address that you used to send
|
||||||
|
the email, your post is silently discarded.
|
||||||
|
|
||||||
|
If you posted for the first time to the mailing list, you first need to wait
|
||||||
|
for an administrator to allow your email to go through (moderated). This
|
||||||
|
normally happens quickly but in case we are asleep, you may have to wait a few
|
||||||
|
hours.
|
||||||
|
|
||||||
|
Once your email goes through it is sent out to several hundred or even
|
||||||
|
thousands of recipients. Your email may cover an area that not that many
|
||||||
|
people know about or are interested in. Or possibly the person who knows about
|
||||||
|
it is on vacation or under a heavy work load right now. You may have to wait
|
||||||
|
for a response and you should not expect to get a response at all. Ideally,
|
||||||
|
you get an answer within a couple of days.
|
||||||
|
|
||||||
|
You do yourself and all of us a service when you include as many details as
|
||||||
|
possible already in your first email. Mention your operating system and
|
||||||
|
environment. Tell us which curl version you are using and tell us what you
|
||||||
|
did, what happened and what you expected would happen. Preferably, show us
|
||||||
|
what you did with details enough to allow others to help point out the problem
|
||||||
|
or repeat the steps in their locations.
|
||||||
|
|
||||||
|
Failing to include details only delays responses and make people respond and
|
||||||
|
ask for more details and you have to send follow-up emails that include them.
|
||||||
|
|
||||||
|
Expect the responses to primarily help YOU debug the issue, or ask YOU
|
||||||
|
questions that can lead you or others towards a solution or explanation to
|
||||||
|
whatever you experience.
|
||||||
|
|
||||||
|
If you are a repeat offender to the guidelines outlined in this document,
|
||||||
|
chances are that people ignore you and your chances to get responses in the
|
||||||
|
future greatly diminish.
|
||||||
|
|
||||||
|
### Your emails are public
|
||||||
|
|
||||||
|
Your email, its contents and all its headers and the details in those headers
|
||||||
|
are received by every subscriber of the mailing list that you send your email
|
||||||
|
to.
|
||||||
|
|
||||||
|
Your email as sent to a curl mailing list ends up in mail archives, on the
|
||||||
|
curl website and elsewhere, for others to see and read. Today and in the
|
||||||
|
future. In addition to the archives, the mail is sent out to thousands of
|
||||||
|
individuals. There is no way to undo a sent email.
|
||||||
|
|
||||||
|
When sending emails to a curl mailing list, do not include sensitive
|
||||||
|
information such as usernames and passwords; use fake ones, temporary ones or
|
||||||
|
just remove them completely from the mail. Note that this includes base64
|
||||||
|
encoded HTTP Basic auth headers.
|
||||||
|
|
||||||
|
This public nature of the curl mailing lists makes automatically inserted mail
|
||||||
|
footers about mails being "private" or "only meant for the recipient" or
|
||||||
|
similar even more silly than usual. Because they are absolutely not private
|
||||||
|
when sent to a public mailing list.
|
||||||
|
|
||||||
|
## Sending mail
|
||||||
|
|
||||||
|
### Reply or New Mail
|
||||||
|
|
||||||
|
Please do not reply to an existing message as a short-cut to post a message to
|
||||||
|
the lists.
|
||||||
|
|
||||||
|
Many mail programs and web archivers use information within mails to keep them
|
||||||
|
together as "threads", as collections of posts that discuss a certain subject.
|
||||||
|
If you do not intend to reply on the same or similar subject, do not just hit
|
||||||
|
reply on an existing mail and change the subject, create a new mail.
|
||||||
|
|
||||||
|
### Reply to the List
|
||||||
|
|
||||||
|
When replying to a message from the list, make sure that you do "group reply"
|
||||||
|
or "reply to all", and not just reply to the author of the single mail you
|
||||||
|
reply to.
|
||||||
|
|
||||||
|
We are actively discouraging replying to the single person by setting the
|
||||||
|
correct field in outgoing mails back asking for replies to get sent to the
|
||||||
|
mailing list address, making it harder for people to reply to the author only
|
||||||
|
by mistake.
|
||||||
|
|
||||||
|
### Use a Sensible Subject
|
||||||
|
|
||||||
|
Please use a subject of the mail that makes sense and that is related to the
|
||||||
|
contents of your mail. It makes it a lot easier to find your mail afterwards
|
||||||
|
and it makes it easier to track mail threads and topics.
|
||||||
|
|
||||||
|
### Do Not Top-Post
|
||||||
|
|
||||||
|
If you reply to a message, do not use top-posting. Top-posting is when you
|
||||||
|
write the new text at the top of a mail and you insert the previous quoted
|
||||||
|
mail conversation below. It forces users to read the mail in a backwards order
|
||||||
|
to properly understand it.
|
||||||
|
|
||||||
|
This is why top posting is so bad (in top posting order):
|
||||||
|
|
||||||
|
A: Because it messes up the order in which people normally read text.
|
||||||
|
Q: Why is top-posting such a bad thing?
|
||||||
|
A: Top-posting.
|
||||||
|
Q: What is the most annoying thing in email?
|
||||||
|
|
||||||
|
Apart from the screwed up read order (especially when mixed together in a
|
||||||
|
thread when someone responds using the mandated bottom-posting style), it also
|
||||||
|
makes it impossible to quote only parts of the original mail.
|
||||||
|
|
||||||
|
When you reply to a mail. You let the mail client insert the previous mail
|
||||||
|
quoted. Then you put the cursor on the first line of the mail and you move
|
||||||
|
down through the mail, deleting all parts of the quotes that do not add
|
||||||
|
context for your comments. When you want to add a comment you do so, inline,
|
||||||
|
right after the quotes that relate to your comment. Then you continue
|
||||||
|
downwards again.
|
||||||
|
|
||||||
|
When most of the quotes have been removed and you have added your own words,
|
||||||
|
you are done.
|
||||||
|
|
||||||
|
### HTML is not for mails
|
||||||
|
|
||||||
|
Please switch off those HTML encoded messages. You can mail all those funny
|
||||||
|
mails to your friends. We speak plain text mails.
|
||||||
|
|
||||||
|
### Quoting
|
||||||
|
|
||||||
|
Quote as little as possible. Just enough to provide the context you cannot
|
||||||
|
eave out. A lengthy description can be found
|
||||||
|
[here](https://www.netmeister.org/news/learn2quote.html).
|
||||||
|
|
||||||
|
### Digest
|
||||||
|
|
||||||
|
We allow subscribers to subscribe to the "digest" version of the mailing
|
||||||
|
lists. A digest is a collection of mails lumped together in one single mail.
|
||||||
|
|
||||||
|
Should you decide to reply to a mail sent out as a digest, there are two
|
||||||
|
things you MUST consider if you really really cannot subscribe normally
|
||||||
|
instead:
|
||||||
|
|
||||||
|
Cut off all mails and chatter that is not related to the mail you want to
|
||||||
|
reply to.
|
||||||
|
|
||||||
|
Change the subject name to something sensible and related to the subject,
|
||||||
|
preferably even the actual subject of the single mail you wanted to reply to
|
||||||
|
|
||||||
|
### Please Tell Us How You Solved The Problem
|
||||||
|
|
||||||
|
Many people mail questions to the list, people spend some of their time and
|
||||||
|
make an effort in providing good answers to these questions.
|
||||||
|
|
||||||
|
If you are the one who asks, please consider responding once more in case one
|
||||||
|
of the hints was what solved your problems. The guys who write answers feel
|
||||||
|
good to know that they provided a good answer and that you fixed the problem.
|
||||||
|
Far too often, the person who asked the question is never heard from again,
|
||||||
|
and we never get to know if they are gone because the problem was solved or
|
||||||
|
perhaps because the problem was unsolvable.
|
||||||
|
|
||||||
|
Getting the solution posted also helps other users that experience the same
|
||||||
|
problem(s). They get to see (possibly in the web archives) that the suggested
|
||||||
|
fixes actually have helped at least one person.
|
1006
src/dependencies/curl-8.8.0/docs/MANUAL.md
Normal file
1006
src/dependencies/curl-8.8.0/docs/MANUAL.md
Normal file
File diff suppressed because it is too large
Load diff
33
src/dependencies/curl-8.8.0/docs/MQTT.md
Normal file
33
src/dependencies/curl-8.8.0/docs/MQTT.md
Normal file
|
@ -0,0 +1,33 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
# MQTT in curl
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
A plain "GET" subscribes to the topic and prints all published messages.
|
||||||
|
Doing a "POST" publishes the post data to the topic and exits.
|
||||||
|
|
||||||
|
Example subscribe:
|
||||||
|
|
||||||
|
curl mqtt://host.home/bedroom/temp
|
||||||
|
|
||||||
|
Example publish:
|
||||||
|
|
||||||
|
curl -d 75 mqtt://host.home/bedroom/dimmer
|
||||||
|
|
||||||
|
## What does curl deliver as a response to a subscribe
|
||||||
|
|
||||||
|
It outputs two bytes topic length (MSB | LSB), the topic followed by the
|
||||||
|
payload.
|
||||||
|
|
||||||
|
## Caveats
|
||||||
|
|
||||||
|
Remaining limitations:
|
||||||
|
- Only QoS level 0 is implemented for publish
|
||||||
|
- No way to set retain flag for publish
|
||||||
|
- No TLS (mqtts) support
|
||||||
|
- Naive EAGAIN handling does not handle split messages
|
125
src/dependencies/curl-8.8.0/docs/Makefile.am
Normal file
125
src/dependencies/curl-8.8.0/docs/Makefile.am
Normal file
|
@ -0,0 +1,125 @@
|
||||||
|
#***************************************************************************
|
||||||
|
# _ _ ____ _
|
||||||
|
# Project ___| | | | _ \| |
|
||||||
|
# / __| | | | |_) | |
|
||||||
|
# | (__| |_| | _ <| |___
|
||||||
|
# \___|\___/|_| \_\_____|
|
||||||
|
#
|
||||||
|
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
#
|
||||||
|
# This software is licensed as described in the file COPYING, which
|
||||||
|
# you should have received as part of this distribution. The terms
|
||||||
|
# are also available at https://curl.se/docs/copyright.html.
|
||||||
|
#
|
||||||
|
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
|
||||||
|
# copies of the Software, and permit persons to whom the Software is
|
||||||
|
# furnished to do so, under the terms of the COPYING file.
|
||||||
|
#
|
||||||
|
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
|
||||||
|
# KIND, either express or implied.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: curl
|
||||||
|
#
|
||||||
|
###########################################################################
|
||||||
|
|
||||||
|
AUTOMAKE_OPTIONS = foreign no-dependencies
|
||||||
|
|
||||||
|
if BUILD_DOCS
|
||||||
|
# if we disable man page building, ignore these
|
||||||
|
MK_CA_DOCS = mk-ca-bundle.1
|
||||||
|
CURLCONF_DOCS = curl-config.1
|
||||||
|
endif
|
||||||
|
|
||||||
|
CURLPAGES = curl-config.md mk-ca-bundle.md
|
||||||
|
|
||||||
|
SUBDIRS = . cmdline-opts libcurl
|
||||||
|
DIST_SUBDIRS = $(SUBDIRS) examples
|
||||||
|
|
||||||
|
if BUILD_DOCS
|
||||||
|
CLEANFILES = mk-ca-bundle.1 curl-config.1
|
||||||
|
endif
|
||||||
|
|
||||||
|
EXTRA_DIST = \
|
||||||
|
$(CURLPAGES) \
|
||||||
|
ALTSVC.md \
|
||||||
|
BINDINGS.md \
|
||||||
|
BUFQ.md \
|
||||||
|
BUFREF.md \
|
||||||
|
BUG-BOUNTY.md \
|
||||||
|
BUGS.md \
|
||||||
|
CHECKSRC.md \
|
||||||
|
CIPHERS.md \
|
||||||
|
CMakeLists.txt \
|
||||||
|
CODE_OF_CONDUCT.md \
|
||||||
|
CODE_REVIEW.md \
|
||||||
|
CODE_STYLE.md \
|
||||||
|
CLIENT-READERS.md \
|
||||||
|
CLIENT-WRITERS.md \
|
||||||
|
CONNECTION-FILTERS.md \
|
||||||
|
CONTRIBUTE.md \
|
||||||
|
CURL-DISABLE.md \
|
||||||
|
CURLDOWN.md \
|
||||||
|
DEPRECATE.md \
|
||||||
|
DISTROS.md \
|
||||||
|
DYNBUF.md \
|
||||||
|
EARLY-RELEASE.md \
|
||||||
|
ECH.md \
|
||||||
|
EXPERIMENTAL.md \
|
||||||
|
FAQ \
|
||||||
|
FEATURES.md \
|
||||||
|
GOVERNANCE.md \
|
||||||
|
HELP-US.md \
|
||||||
|
HISTORY.md \
|
||||||
|
HSTS.md \
|
||||||
|
HTTP-COOKIES.md \
|
||||||
|
HTTP2.md \
|
||||||
|
HTTP3.md \
|
||||||
|
HYPER.md \
|
||||||
|
INSTALL \
|
||||||
|
INSTALL-CMAKE.md \
|
||||||
|
INSTALL.md \
|
||||||
|
INTERNALS.md \
|
||||||
|
IPFS.md \
|
||||||
|
KNOWN_BUGS \
|
||||||
|
MAIL-ETIQUETTE.md \
|
||||||
|
MANUAL.md \
|
||||||
|
MQTT.md \
|
||||||
|
NEW-PROTOCOL.md \
|
||||||
|
options-in-versions \
|
||||||
|
PARALLEL-TRANSFERS.md \
|
||||||
|
README.md \
|
||||||
|
RELEASE-PROCEDURE.md \
|
||||||
|
RUSTLS.md \
|
||||||
|
ROADMAP.md \
|
||||||
|
SECURITY-ADVISORY.md \
|
||||||
|
SPONSORS.md \
|
||||||
|
SSL-PROBLEMS.md \
|
||||||
|
SSLCERTS.md \
|
||||||
|
THANKS \
|
||||||
|
TODO \
|
||||||
|
TheArtOfHttpScripting.md \
|
||||||
|
URL-SYNTAX.md \
|
||||||
|
VERSIONS.md \
|
||||||
|
VULN-DISCLOSURE-POLICY.md \
|
||||||
|
WEBSOCKET.md
|
||||||
|
|
||||||
|
CD2NROFF = $(top_srcdir)/scripts/cd2nroff $< >$@
|
||||||
|
|
||||||
|
CD2 = $(CD2_$(V))
|
||||||
|
CD2_0 = @echo " RENDER " $@;
|
||||||
|
CD2_1 =
|
||||||
|
CD2_ = $(CD2_0)
|
||||||
|
|
||||||
|
SUFFIXES = .1 .md
|
||||||
|
|
||||||
|
all: $(MK_CA_DOCS) $(CURLCONF_DOCS)
|
||||||
|
|
||||||
|
.md.1:
|
||||||
|
$(CD2)$(CD2NROFF)
|
||||||
|
|
||||||
|
curl-config.1: curl-config.md
|
||||||
|
|
||||||
|
mk-ca-bundle.1: mk-ca-bundle.md
|
||||||
|
|
||||||
|
distclean:
|
||||||
|
rm -f $(CLEANFILES)
|
861
src/dependencies/curl-8.8.0/docs/Makefile.in
Normal file
861
src/dependencies/curl-8.8.0/docs/Makefile.in
Normal file
|
@ -0,0 +1,861 @@
|
||||||
|
# Makefile.in generated by automake 1.16.5 from Makefile.am.
|
||||||
|
# @configure_input@
|
||||||
|
|
||||||
|
# Copyright (C) 1994-2021 Free Software Foundation, Inc.
|
||||||
|
|
||||||
|
# This Makefile.in is free software; the Free Software Foundation
|
||||||
|
# gives unlimited permission to copy and/or distribute it,
|
||||||
|
# with or without modifications, as long as this notice is preserved.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
|
||||||
|
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
|
||||||
|
# PARTICULAR PURPOSE.
|
||||||
|
|
||||||
|
@SET_MAKE@
|
||||||
|
|
||||||
|
#***************************************************************************
|
||||||
|
# _ _ ____ _
|
||||||
|
# Project ___| | | | _ \| |
|
||||||
|
# / __| | | | |_) | |
|
||||||
|
# | (__| |_| | _ <| |___
|
||||||
|
# \___|\___/|_| \_\_____|
|
||||||
|
#
|
||||||
|
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
#
|
||||||
|
# This software is licensed as described in the file COPYING, which
|
||||||
|
# you should have received as part of this distribution. The terms
|
||||||
|
# are also available at https://curl.se/docs/copyright.html.
|
||||||
|
#
|
||||||
|
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
|
||||||
|
# copies of the Software, and permit persons to whom the Software is
|
||||||
|
# furnished to do so, under the terms of the COPYING file.
|
||||||
|
#
|
||||||
|
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
|
||||||
|
# KIND, either express or implied.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: curl
|
||||||
|
#
|
||||||
|
###########################################################################
|
||||||
|
VPATH = @srcdir@
|
||||||
|
am__is_gnu_make = { \
|
||||||
|
if test -z '$(MAKELEVEL)'; then \
|
||||||
|
false; \
|
||||||
|
elif test -n '$(MAKE_HOST)'; then \
|
||||||
|
true; \
|
||||||
|
elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \
|
||||||
|
true; \
|
||||||
|
else \
|
||||||
|
false; \
|
||||||
|
fi; \
|
||||||
|
}
|
||||||
|
am__make_running_with_option = \
|
||||||
|
case $${target_option-} in \
|
||||||
|
?) ;; \
|
||||||
|
*) echo "am__make_running_with_option: internal error: invalid" \
|
||||||
|
"target option '$${target_option-}' specified" >&2; \
|
||||||
|
exit 1;; \
|
||||||
|
esac; \
|
||||||
|
has_opt=no; \
|
||||||
|
sane_makeflags=$$MAKEFLAGS; \
|
||||||
|
if $(am__is_gnu_make); then \
|
||||||
|
sane_makeflags=$$MFLAGS; \
|
||||||
|
else \
|
||||||
|
case $$MAKEFLAGS in \
|
||||||
|
*\\[\ \ ]*) \
|
||||||
|
bs=\\; \
|
||||||
|
sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \
|
||||||
|
| sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \
|
||||||
|
esac; \
|
||||||
|
fi; \
|
||||||
|
skip_next=no; \
|
||||||
|
strip_trailopt () \
|
||||||
|
{ \
|
||||||
|
flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \
|
||||||
|
}; \
|
||||||
|
for flg in $$sane_makeflags; do \
|
||||||
|
test $$skip_next = yes && { skip_next=no; continue; }; \
|
||||||
|
case $$flg in \
|
||||||
|
*=*|--*) continue;; \
|
||||||
|
-*I) strip_trailopt 'I'; skip_next=yes;; \
|
||||||
|
-*I?*) strip_trailopt 'I';; \
|
||||||
|
-*O) strip_trailopt 'O'; skip_next=yes;; \
|
||||||
|
-*O?*) strip_trailopt 'O';; \
|
||||||
|
-*l) strip_trailopt 'l'; skip_next=yes;; \
|
||||||
|
-*l?*) strip_trailopt 'l';; \
|
||||||
|
-[dEDm]) skip_next=yes;; \
|
||||||
|
-[JT]) skip_next=yes;; \
|
||||||
|
esac; \
|
||||||
|
case $$flg in \
|
||||||
|
*$$target_option*) has_opt=yes; break;; \
|
||||||
|
esac; \
|
||||||
|
done; \
|
||||||
|
test $$has_opt = yes
|
||||||
|
am__make_dryrun = (target_option=n; $(am__make_running_with_option))
|
||||||
|
am__make_keepgoing = (target_option=k; $(am__make_running_with_option))
|
||||||
|
pkgdatadir = $(datadir)/@PACKAGE@
|
||||||
|
pkgincludedir = $(includedir)/@PACKAGE@
|
||||||
|
pkglibdir = $(libdir)/@PACKAGE@
|
||||||
|
pkglibexecdir = $(libexecdir)/@PACKAGE@
|
||||||
|
am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
|
||||||
|
install_sh_DATA = $(install_sh) -c -m 644
|
||||||
|
install_sh_PROGRAM = $(install_sh) -c
|
||||||
|
install_sh_SCRIPT = $(install_sh) -c
|
||||||
|
INSTALL_HEADER = $(INSTALL_DATA)
|
||||||
|
transform = $(program_transform_name)
|
||||||
|
NORMAL_INSTALL = :
|
||||||
|
PRE_INSTALL = :
|
||||||
|
POST_INSTALL = :
|
||||||
|
NORMAL_UNINSTALL = :
|
||||||
|
PRE_UNINSTALL = :
|
||||||
|
POST_UNINSTALL = :
|
||||||
|
build_triplet = @build@
|
||||||
|
host_triplet = @host@
|
||||||
|
subdir = docs
|
||||||
|
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
|
||||||
|
am__aclocal_m4_deps = $(top_srcdir)/m4/curl-amissl.m4 \
|
||||||
|
$(top_srcdir)/m4/curl-bearssl.m4 \
|
||||||
|
$(top_srcdir)/m4/curl-compilers.m4 \
|
||||||
|
$(top_srcdir)/m4/curl-confopts.m4 \
|
||||||
|
$(top_srcdir)/m4/curl-functions.m4 \
|
||||||
|
$(top_srcdir)/m4/curl-gnutls.m4 \
|
||||||
|
$(top_srcdir)/m4/curl-mbedtls.m4 \
|
||||||
|
$(top_srcdir)/m4/curl-openssl.m4 \
|
||||||
|
$(top_srcdir)/m4/curl-override.m4 \
|
||||||
|
$(top_srcdir)/m4/curl-reentrant.m4 \
|
||||||
|
$(top_srcdir)/m4/curl-rustls.m4 \
|
||||||
|
$(top_srcdir)/m4/curl-schannel.m4 \
|
||||||
|
$(top_srcdir)/m4/curl-sectransp.m4 \
|
||||||
|
$(top_srcdir)/m4/curl-sysconfig.m4 \
|
||||||
|
$(top_srcdir)/m4/curl-wolfssl.m4 $(top_srcdir)/m4/libtool.m4 \
|
||||||
|
$(top_srcdir)/m4/ltoptions.m4 $(top_srcdir)/m4/ltsugar.m4 \
|
||||||
|
$(top_srcdir)/m4/ltversion.m4 $(top_srcdir)/m4/lt~obsolete.m4 \
|
||||||
|
$(top_srcdir)/m4/xc-am-iface.m4 \
|
||||||
|
$(top_srcdir)/m4/xc-cc-check.m4 \
|
||||||
|
$(top_srcdir)/m4/xc-lt-iface.m4 \
|
||||||
|
$(top_srcdir)/m4/xc-translit.m4 \
|
||||||
|
$(top_srcdir)/m4/xc-val-flgs.m4 \
|
||||||
|
$(top_srcdir)/m4/zz40-xc-ovr.m4 \
|
||||||
|
$(top_srcdir)/m4/zz50-xc-ovr.m4 \
|
||||||
|
$(top_srcdir)/m4/zz60-xc-ovr.m4 $(top_srcdir)/acinclude.m4 \
|
||||||
|
$(top_srcdir)/configure.ac
|
||||||
|
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
|
||||||
|
$(ACLOCAL_M4)
|
||||||
|
DIST_COMMON = $(srcdir)/Makefile.am $(am__DIST_COMMON)
|
||||||
|
mkinstalldirs = $(install_sh) -d
|
||||||
|
CONFIG_HEADER = $(top_builddir)/lib/curl_config.h
|
||||||
|
CONFIG_CLEAN_FILES =
|
||||||
|
CONFIG_CLEAN_VPATH_FILES =
|
||||||
|
AM_V_P = $(am__v_P_@AM_V@)
|
||||||
|
am__v_P_ = $(am__v_P_@AM_DEFAULT_V@)
|
||||||
|
am__v_P_0 = false
|
||||||
|
am__v_P_1 = :
|
||||||
|
AM_V_GEN = $(am__v_GEN_@AM_V@)
|
||||||
|
am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@)
|
||||||
|
am__v_GEN_0 = @echo " GEN " $@;
|
||||||
|
am__v_GEN_1 =
|
||||||
|
AM_V_at = $(am__v_at_@AM_V@)
|
||||||
|
am__v_at_ = $(am__v_at_@AM_DEFAULT_V@)
|
||||||
|
am__v_at_0 = @
|
||||||
|
am__v_at_1 =
|
||||||
|
depcomp =
|
||||||
|
am__maybe_remake_depfiles =
|
||||||
|
SOURCES =
|
||||||
|
DIST_SOURCES =
|
||||||
|
RECURSIVE_TARGETS = all-recursive check-recursive cscopelist-recursive \
|
||||||
|
ctags-recursive dvi-recursive html-recursive info-recursive \
|
||||||
|
install-data-recursive install-dvi-recursive \
|
||||||
|
install-exec-recursive install-html-recursive \
|
||||||
|
install-info-recursive install-pdf-recursive \
|
||||||
|
install-ps-recursive install-recursive installcheck-recursive \
|
||||||
|
installdirs-recursive pdf-recursive ps-recursive \
|
||||||
|
tags-recursive uninstall-recursive
|
||||||
|
am__can_run_installinfo = \
|
||||||
|
case $$AM_UPDATE_INFO_DIR in \
|
||||||
|
n|no|NO) false;; \
|
||||||
|
*) (install-info --version) >/dev/null 2>&1;; \
|
||||||
|
esac
|
||||||
|
RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \
|
||||||
|
distclean-recursive maintainer-clean-recursive
|
||||||
|
am__recursive_targets = \
|
||||||
|
$(RECURSIVE_TARGETS) \
|
||||||
|
$(RECURSIVE_CLEAN_TARGETS) \
|
||||||
|
$(am__extra_recursive_targets)
|
||||||
|
AM_RECURSIVE_TARGETS = $(am__recursive_targets:-recursive=) TAGS CTAGS \
|
||||||
|
distdir distdir-am
|
||||||
|
am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP)
|
||||||
|
# Read a list of newline-separated strings from the standard input,
|
||||||
|
# and print each of them once, without duplicates. Input order is
|
||||||
|
# *not* preserved.
|
||||||
|
am__uniquify_input = $(AWK) '\
|
||||||
|
BEGIN { nonempty = 0; } \
|
||||||
|
{ items[$$0] = 1; nonempty = 1; } \
|
||||||
|
END { if (nonempty) { for (i in items) print i; }; } \
|
||||||
|
'
|
||||||
|
# Make sure the list of sources is unique. This is necessary because,
|
||||||
|
# e.g., the same source file might be shared among _SOURCES variables
|
||||||
|
# for different programs/libraries.
|
||||||
|
am__define_uniq_tagged_files = \
|
||||||
|
list='$(am__tagged_files)'; \
|
||||||
|
unique=`for i in $$list; do \
|
||||||
|
if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
|
||||||
|
done | $(am__uniquify_input)`
|
||||||
|
am__DIST_COMMON = $(srcdir)/Makefile.in INSTALL README.md THANKS TODO
|
||||||
|
DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
|
||||||
|
am__relativize = \
|
||||||
|
dir0=`pwd`; \
|
||||||
|
sed_first='s,^\([^/]*\)/.*$$,\1,'; \
|
||||||
|
sed_rest='s,^[^/]*/*,,'; \
|
||||||
|
sed_last='s,^.*/\([^/]*\)$$,\1,'; \
|
||||||
|
sed_butlast='s,/*[^/]*$$,,'; \
|
||||||
|
while test -n "$$dir1"; do \
|
||||||
|
first=`echo "$$dir1" | sed -e "$$sed_first"`; \
|
||||||
|
if test "$$first" != "."; then \
|
||||||
|
if test "$$first" = ".."; then \
|
||||||
|
dir2=`echo "$$dir0" | sed -e "$$sed_last"`/"$$dir2"; \
|
||||||
|
dir0=`echo "$$dir0" | sed -e "$$sed_butlast"`; \
|
||||||
|
else \
|
||||||
|
first2=`echo "$$dir2" | sed -e "$$sed_first"`; \
|
||||||
|
if test "$$first2" = "$$first"; then \
|
||||||
|
dir2=`echo "$$dir2" | sed -e "$$sed_rest"`; \
|
||||||
|
else \
|
||||||
|
dir2="../$$dir2"; \
|
||||||
|
fi; \
|
||||||
|
dir0="$$dir0"/"$$first"; \
|
||||||
|
fi; \
|
||||||
|
fi; \
|
||||||
|
dir1=`echo "$$dir1" | sed -e "$$sed_rest"`; \
|
||||||
|
done; \
|
||||||
|
reldir="$$dir2"
|
||||||
|
ACLOCAL = @ACLOCAL@
|
||||||
|
AMTAR = @AMTAR@
|
||||||
|
AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@
|
||||||
|
APACHECTL = @APACHECTL@
|
||||||
|
APXS = @APXS@
|
||||||
|
AR = @AR@
|
||||||
|
AR_FLAGS = @AR_FLAGS@
|
||||||
|
AS = @AS@
|
||||||
|
AUTOCONF = @AUTOCONF@
|
||||||
|
AUTOHEADER = @AUTOHEADER@
|
||||||
|
AUTOMAKE = @AUTOMAKE@
|
||||||
|
AWK = @AWK@
|
||||||
|
BLANK_AT_MAKETIME = @BLANK_AT_MAKETIME@
|
||||||
|
CADDY = @CADDY@
|
||||||
|
CC = @CC@
|
||||||
|
CCDEPMODE = @CCDEPMODE@
|
||||||
|
CFLAGS = @CFLAGS@
|
||||||
|
CFLAG_CURL_SYMBOL_HIDING = @CFLAG_CURL_SYMBOL_HIDING@
|
||||||
|
CONFIGURE_OPTIONS = @CONFIGURE_OPTIONS@
|
||||||
|
CPP = @CPP@
|
||||||
|
CPPFLAGS = @CPPFLAGS@
|
||||||
|
CPPFLAG_CURL_STATICLIB = @CPPFLAG_CURL_STATICLIB@
|
||||||
|
CSCOPE = @CSCOPE@
|
||||||
|
CTAGS = @CTAGS@
|
||||||
|
CURLVERSION = @CURLVERSION@
|
||||||
|
CURL_CA_BUNDLE = @CURL_CA_BUNDLE@
|
||||||
|
CURL_CFLAG_EXTRAS = @CURL_CFLAG_EXTRAS@
|
||||||
|
CURL_DISABLE_DICT = @CURL_DISABLE_DICT@
|
||||||
|
CURL_DISABLE_FILE = @CURL_DISABLE_FILE@
|
||||||
|
CURL_DISABLE_FTP = @CURL_DISABLE_FTP@
|
||||||
|
CURL_DISABLE_GOPHER = @CURL_DISABLE_GOPHER@
|
||||||
|
CURL_DISABLE_HTTP = @CURL_DISABLE_HTTP@
|
||||||
|
CURL_DISABLE_IMAP = @CURL_DISABLE_IMAP@
|
||||||
|
CURL_DISABLE_LDAP = @CURL_DISABLE_LDAP@
|
||||||
|
CURL_DISABLE_LDAPS = @CURL_DISABLE_LDAPS@
|
||||||
|
CURL_DISABLE_MQTT = @CURL_DISABLE_MQTT@
|
||||||
|
CURL_DISABLE_POP3 = @CURL_DISABLE_POP3@
|
||||||
|
CURL_DISABLE_PROXY = @CURL_DISABLE_PROXY@
|
||||||
|
CURL_DISABLE_RTSP = @CURL_DISABLE_RTSP@
|
||||||
|
CURL_DISABLE_SMB = @CURL_DISABLE_SMB@
|
||||||
|
CURL_DISABLE_SMTP = @CURL_DISABLE_SMTP@
|
||||||
|
CURL_DISABLE_TELNET = @CURL_DISABLE_TELNET@
|
||||||
|
CURL_DISABLE_TFTP = @CURL_DISABLE_TFTP@
|
||||||
|
CURL_LT_SHLIB_VERSIONED_FLAVOUR = @CURL_LT_SHLIB_VERSIONED_FLAVOUR@
|
||||||
|
CURL_NETWORK_AND_TIME_LIBS = @CURL_NETWORK_AND_TIME_LIBS@
|
||||||
|
CURL_NETWORK_LIBS = @CURL_NETWORK_LIBS@
|
||||||
|
CURL_WITH_MULTI_SSL = @CURL_WITH_MULTI_SSL@
|
||||||
|
CYGPATH_W = @CYGPATH_W@
|
||||||
|
DEFAULT_SSL_BACKEND = @DEFAULT_SSL_BACKEND@
|
||||||
|
DEFS = @DEFS@
|
||||||
|
DEPDIR = @DEPDIR@
|
||||||
|
DLLTOOL = @DLLTOOL@
|
||||||
|
DSYMUTIL = @DSYMUTIL@
|
||||||
|
DUMPBIN = @DUMPBIN@
|
||||||
|
ECHO_C = @ECHO_C@
|
||||||
|
ECHO_N = @ECHO_N@
|
||||||
|
ECHO_T = @ECHO_T@
|
||||||
|
EGREP = @EGREP@
|
||||||
|
ENABLE_SHARED = @ENABLE_SHARED@
|
||||||
|
ENABLE_STATIC = @ENABLE_STATIC@
|
||||||
|
ETAGS = @ETAGS@
|
||||||
|
EXEEXT = @EXEEXT@
|
||||||
|
FGREP = @FGREP@
|
||||||
|
FILECMD = @FILECMD@
|
||||||
|
FISH_FUNCTIONS_DIR = @FISH_FUNCTIONS_DIR@
|
||||||
|
GCOV = @GCOV@
|
||||||
|
GREP = @GREP@
|
||||||
|
HAVE_BROTLI = @HAVE_BROTLI@
|
||||||
|
HAVE_GNUTLS_SRP = @HAVE_GNUTLS_SRP@
|
||||||
|
HAVE_LDAP_SSL = @HAVE_LDAP_SSL@
|
||||||
|
HAVE_LIBZ = @HAVE_LIBZ@
|
||||||
|
HAVE_OPENSSL_QUIC = @HAVE_OPENSSL_QUIC@
|
||||||
|
HAVE_OPENSSL_SRP = @HAVE_OPENSSL_SRP@
|
||||||
|
HAVE_PROTO_BSDSOCKET_H = @HAVE_PROTO_BSDSOCKET_H@
|
||||||
|
HAVE_ZSTD = @HAVE_ZSTD@
|
||||||
|
HTTPD = @HTTPD@
|
||||||
|
HTTPD_NGHTTPX = @HTTPD_NGHTTPX@
|
||||||
|
IDN_ENABLED = @IDN_ENABLED@
|
||||||
|
INSTALL = @INSTALL@
|
||||||
|
INSTALL_DATA = @INSTALL_DATA@
|
||||||
|
INSTALL_PROGRAM = @INSTALL_PROGRAM@
|
||||||
|
INSTALL_SCRIPT = @INSTALL_SCRIPT@
|
||||||
|
INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
|
||||||
|
IPV6_ENABLED = @IPV6_ENABLED@
|
||||||
|
LCOV = @LCOV@
|
||||||
|
LD = @LD@
|
||||||
|
LDFLAGS = @LDFLAGS@
|
||||||
|
LIBCURL_LIBS = @LIBCURL_LIBS@
|
||||||
|
LIBCURL_NO_SHARED = @LIBCURL_NO_SHARED@
|
||||||
|
LIBOBJS = @LIBOBJS@
|
||||||
|
LIBS = @LIBS@
|
||||||
|
LIBTOOL = @LIBTOOL@
|
||||||
|
LIPO = @LIPO@
|
||||||
|
LN_S = @LN_S@
|
||||||
|
LTLIBOBJS = @LTLIBOBJS@
|
||||||
|
LT_SYS_LIBRARY_PATH = @LT_SYS_LIBRARY_PATH@
|
||||||
|
MAINT = @MAINT@
|
||||||
|
MAKEINFO = @MAKEINFO@
|
||||||
|
MANIFEST_TOOL = @MANIFEST_TOOL@
|
||||||
|
MKDIR_P = @MKDIR_P@
|
||||||
|
NM = @NM@
|
||||||
|
NMEDIT = @NMEDIT@
|
||||||
|
OBJDUMP = @OBJDUMP@
|
||||||
|
OBJEXT = @OBJEXT@
|
||||||
|
OTOOL = @OTOOL@
|
||||||
|
OTOOL64 = @OTOOL64@
|
||||||
|
PACKAGE = @PACKAGE@
|
||||||
|
PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@
|
||||||
|
PACKAGE_NAME = @PACKAGE_NAME@
|
||||||
|
PACKAGE_STRING = @PACKAGE_STRING@
|
||||||
|
PACKAGE_TARNAME = @PACKAGE_TARNAME@
|
||||||
|
PACKAGE_URL = @PACKAGE_URL@
|
||||||
|
PACKAGE_VERSION = @PACKAGE_VERSION@
|
||||||
|
PATH_SEPARATOR = @PATH_SEPARATOR@
|
||||||
|
PERL = @PERL@
|
||||||
|
PKGADD_NAME = @PKGADD_NAME@
|
||||||
|
PKGADD_PKG = @PKGADD_PKG@
|
||||||
|
PKGADD_VENDOR = @PKGADD_VENDOR@
|
||||||
|
PKGCONFIG = @PKGCONFIG@
|
||||||
|
RANDOM_FILE = @RANDOM_FILE@
|
||||||
|
RANLIB = @RANLIB@
|
||||||
|
RC = @RC@
|
||||||
|
REQUIRE_LIB_DEPS = @REQUIRE_LIB_DEPS@
|
||||||
|
SED = @SED@
|
||||||
|
SET_MAKE = @SET_MAKE@
|
||||||
|
SHELL = @SHELL@
|
||||||
|
SSL_BACKENDS = @SSL_BACKENDS@
|
||||||
|
SSL_ENABLED = @SSL_ENABLED@
|
||||||
|
SSL_LIBS = @SSL_LIBS@
|
||||||
|
STRIP = @STRIP@
|
||||||
|
SUPPORT_FEATURES = @SUPPORT_FEATURES@
|
||||||
|
SUPPORT_PROTOCOLS = @SUPPORT_PROTOCOLS@
|
||||||
|
TEST_NGHTTPX = @TEST_NGHTTPX@
|
||||||
|
USE_ARES = @USE_ARES@
|
||||||
|
USE_BEARSSL = @USE_BEARSSL@
|
||||||
|
USE_GNUTLS = @USE_GNUTLS@
|
||||||
|
USE_HYPER = @USE_HYPER@
|
||||||
|
USE_LIBPSL = @USE_LIBPSL@
|
||||||
|
USE_LIBRTMP = @USE_LIBRTMP@
|
||||||
|
USE_LIBSSH = @USE_LIBSSH@
|
||||||
|
USE_LIBSSH2 = @USE_LIBSSH2@
|
||||||
|
USE_MBEDTLS = @USE_MBEDTLS@
|
||||||
|
USE_MSH3 = @USE_MSH3@
|
||||||
|
USE_NGHTTP2 = @USE_NGHTTP2@
|
||||||
|
USE_NGHTTP3 = @USE_NGHTTP3@
|
||||||
|
USE_NGTCP2 = @USE_NGTCP2@
|
||||||
|
USE_NGTCP2_CRYPTO_BORINGSSL = @USE_NGTCP2_CRYPTO_BORINGSSL@
|
||||||
|
USE_NGTCP2_CRYPTO_GNUTLS = @USE_NGTCP2_CRYPTO_GNUTLS@
|
||||||
|
USE_NGTCP2_CRYPTO_QUICTLS = @USE_NGTCP2_CRYPTO_QUICTLS@
|
||||||
|
USE_NGTCP2_CRYPTO_WOLFSSL = @USE_NGTCP2_CRYPTO_WOLFSSL@
|
||||||
|
USE_NGTCP2_H3 = @USE_NGTCP2_H3@
|
||||||
|
USE_OPENLDAP = @USE_OPENLDAP@
|
||||||
|
USE_OPENSSL_H3 = @USE_OPENSSL_H3@
|
||||||
|
USE_OPENSSL_QUIC = @USE_OPENSSL_QUIC@
|
||||||
|
USE_QUICHE = @USE_QUICHE@
|
||||||
|
USE_RUSTLS = @USE_RUSTLS@
|
||||||
|
USE_SCHANNEL = @USE_SCHANNEL@
|
||||||
|
USE_SECTRANSP = @USE_SECTRANSP@
|
||||||
|
USE_UNIX_SOCKETS = @USE_UNIX_SOCKETS@
|
||||||
|
USE_WIN32_CRYPTO = @USE_WIN32_CRYPTO@
|
||||||
|
USE_WIN32_LARGE_FILES = @USE_WIN32_LARGE_FILES@
|
||||||
|
USE_WIN32_SMALL_FILES = @USE_WIN32_SMALL_FILES@
|
||||||
|
USE_WINDOWS_SSPI = @USE_WINDOWS_SSPI@
|
||||||
|
USE_WOLFSSH = @USE_WOLFSSH@
|
||||||
|
USE_WOLFSSL = @USE_WOLFSSL@
|
||||||
|
VERSION = @VERSION@
|
||||||
|
VERSIONNUM = @VERSIONNUM@
|
||||||
|
VSFTPD = @VSFTPD@
|
||||||
|
ZLIB_LIBS = @ZLIB_LIBS@
|
||||||
|
ZSH_FUNCTIONS_DIR = @ZSH_FUNCTIONS_DIR@
|
||||||
|
abs_builddir = @abs_builddir@
|
||||||
|
abs_srcdir = @abs_srcdir@
|
||||||
|
abs_top_builddir = @abs_top_builddir@
|
||||||
|
abs_top_srcdir = @abs_top_srcdir@
|
||||||
|
ac_ct_AR = @ac_ct_AR@
|
||||||
|
ac_ct_CC = @ac_ct_CC@
|
||||||
|
ac_ct_DUMPBIN = @ac_ct_DUMPBIN@
|
||||||
|
am__include = @am__include@
|
||||||
|
am__leading_dot = @am__leading_dot@
|
||||||
|
am__quote = @am__quote@
|
||||||
|
am__tar = @am__tar@
|
||||||
|
am__untar = @am__untar@
|
||||||
|
bindir = @bindir@
|
||||||
|
build = @build@
|
||||||
|
build_alias = @build_alias@
|
||||||
|
build_cpu = @build_cpu@
|
||||||
|
build_os = @build_os@
|
||||||
|
build_vendor = @build_vendor@
|
||||||
|
builddir = @builddir@
|
||||||
|
datadir = @datadir@
|
||||||
|
datarootdir = @datarootdir@
|
||||||
|
docdir = @docdir@
|
||||||
|
dvidir = @dvidir@
|
||||||
|
exec_prefix = @exec_prefix@
|
||||||
|
host = @host@
|
||||||
|
host_alias = @host_alias@
|
||||||
|
host_cpu = @host_cpu@
|
||||||
|
host_os = @host_os@
|
||||||
|
host_vendor = @host_vendor@
|
||||||
|
htmldir = @htmldir@
|
||||||
|
includedir = @includedir@
|
||||||
|
infodir = @infodir@
|
||||||
|
install_sh = @install_sh@
|
||||||
|
libdir = @libdir@
|
||||||
|
libexecdir = @libexecdir@
|
||||||
|
libext = @libext@
|
||||||
|
localedir = @localedir@
|
||||||
|
localstatedir = @localstatedir@
|
||||||
|
mandir = @mandir@
|
||||||
|
mkdir_p = @mkdir_p@
|
||||||
|
oldincludedir = @oldincludedir@
|
||||||
|
pdfdir = @pdfdir@
|
||||||
|
prefix = @prefix@
|
||||||
|
program_transform_name = @program_transform_name@
|
||||||
|
psdir = @psdir@
|
||||||
|
runstatedir = @runstatedir@
|
||||||
|
sbindir = @sbindir@
|
||||||
|
sharedstatedir = @sharedstatedir@
|
||||||
|
srcdir = @srcdir@
|
||||||
|
sysconfdir = @sysconfdir@
|
||||||
|
target_alias = @target_alias@
|
||||||
|
top_build_prefix = @top_build_prefix@
|
||||||
|
top_builddir = @top_builddir@
|
||||||
|
top_srcdir = @top_srcdir@
|
||||||
|
AUTOMAKE_OPTIONS = foreign no-dependencies
|
||||||
|
|
||||||
|
# if we disable man page building, ignore these
|
||||||
|
@BUILD_DOCS_TRUE@MK_CA_DOCS = mk-ca-bundle.1
|
||||||
|
@BUILD_DOCS_TRUE@CURLCONF_DOCS = curl-config.1
|
||||||
|
CURLPAGES = curl-config.md mk-ca-bundle.md
|
||||||
|
SUBDIRS = . cmdline-opts libcurl
|
||||||
|
DIST_SUBDIRS = $(SUBDIRS) examples
|
||||||
|
@BUILD_DOCS_TRUE@CLEANFILES = mk-ca-bundle.1 curl-config.1
|
||||||
|
EXTRA_DIST = \
|
||||||
|
$(CURLPAGES) \
|
||||||
|
ALTSVC.md \
|
||||||
|
BINDINGS.md \
|
||||||
|
BUFQ.md \
|
||||||
|
BUFREF.md \
|
||||||
|
BUG-BOUNTY.md \
|
||||||
|
BUGS.md \
|
||||||
|
CHECKSRC.md \
|
||||||
|
CIPHERS.md \
|
||||||
|
CMakeLists.txt \
|
||||||
|
CODE_OF_CONDUCT.md \
|
||||||
|
CODE_REVIEW.md \
|
||||||
|
CODE_STYLE.md \
|
||||||
|
CLIENT-READERS.md \
|
||||||
|
CLIENT-WRITERS.md \
|
||||||
|
CONNECTION-FILTERS.md \
|
||||||
|
CONTRIBUTE.md \
|
||||||
|
CURL-DISABLE.md \
|
||||||
|
CURLDOWN.md \
|
||||||
|
DEPRECATE.md \
|
||||||
|
DISTROS.md \
|
||||||
|
DYNBUF.md \
|
||||||
|
EARLY-RELEASE.md \
|
||||||
|
ECH.md \
|
||||||
|
EXPERIMENTAL.md \
|
||||||
|
FAQ \
|
||||||
|
FEATURES.md \
|
||||||
|
GOVERNANCE.md \
|
||||||
|
HELP-US.md \
|
||||||
|
HISTORY.md \
|
||||||
|
HSTS.md \
|
||||||
|
HTTP-COOKIES.md \
|
||||||
|
HTTP2.md \
|
||||||
|
HTTP3.md \
|
||||||
|
HYPER.md \
|
||||||
|
INSTALL \
|
||||||
|
INSTALL-CMAKE.md \
|
||||||
|
INSTALL.md \
|
||||||
|
INTERNALS.md \
|
||||||
|
IPFS.md \
|
||||||
|
KNOWN_BUGS \
|
||||||
|
MAIL-ETIQUETTE.md \
|
||||||
|
MANUAL.md \
|
||||||
|
MQTT.md \
|
||||||
|
NEW-PROTOCOL.md \
|
||||||
|
options-in-versions \
|
||||||
|
PARALLEL-TRANSFERS.md \
|
||||||
|
README.md \
|
||||||
|
RELEASE-PROCEDURE.md \
|
||||||
|
RUSTLS.md \
|
||||||
|
ROADMAP.md \
|
||||||
|
SECURITY-ADVISORY.md \
|
||||||
|
SPONSORS.md \
|
||||||
|
SSL-PROBLEMS.md \
|
||||||
|
SSLCERTS.md \
|
||||||
|
THANKS \
|
||||||
|
TODO \
|
||||||
|
TheArtOfHttpScripting.md \
|
||||||
|
URL-SYNTAX.md \
|
||||||
|
VERSIONS.md \
|
||||||
|
VULN-DISCLOSURE-POLICY.md \
|
||||||
|
WEBSOCKET.md
|
||||||
|
|
||||||
|
CD2NROFF = $(top_srcdir)/scripts/cd2nroff $< >$@
|
||||||
|
CD2 = $(CD2_$(V))
|
||||||
|
CD2_0 = @echo " RENDER " $@;
|
||||||
|
CD2_1 =
|
||||||
|
CD2_ = $(CD2_0)
|
||||||
|
SUFFIXES = .1 .md
|
||||||
|
all: all-recursive
|
||||||
|
|
||||||
|
.SUFFIXES:
|
||||||
|
.SUFFIXES: .1 .md
|
||||||
|
$(srcdir)/Makefile.in: @MAINTAINER_MODE_TRUE@ $(srcdir)/Makefile.am $(am__configure_deps)
|
||||||
|
@for dep in $?; do \
|
||||||
|
case '$(am__configure_deps)' in \
|
||||||
|
*$$dep*) \
|
||||||
|
( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \
|
||||||
|
&& { if test -f $@; then exit 0; else break; fi; }; \
|
||||||
|
exit 1;; \
|
||||||
|
esac; \
|
||||||
|
done; \
|
||||||
|
echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign docs/Makefile'; \
|
||||||
|
$(am__cd) $(top_srcdir) && \
|
||||||
|
$(AUTOMAKE) --foreign docs/Makefile
|
||||||
|
Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
|
||||||
|
@case '$?' in \
|
||||||
|
*config.status*) \
|
||||||
|
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \
|
||||||
|
*) \
|
||||||
|
echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__maybe_remake_depfiles)'; \
|
||||||
|
cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__maybe_remake_depfiles);; \
|
||||||
|
esac;
|
||||||
|
|
||||||
|
$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES)
|
||||||
|
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
|
||||||
|
|
||||||
|
$(top_srcdir)/configure: @MAINTAINER_MODE_TRUE@ $(am__configure_deps)
|
||||||
|
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
|
||||||
|
$(ACLOCAL_M4): @MAINTAINER_MODE_TRUE@ $(am__aclocal_m4_deps)
|
||||||
|
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
|
||||||
|
$(am__aclocal_m4_deps):
|
||||||
|
|
||||||
|
mostlyclean-libtool:
|
||||||
|
-rm -f *.lo
|
||||||
|
|
||||||
|
clean-libtool:
|
||||||
|
-rm -rf .libs _libs
|
||||||
|
|
||||||
|
# This directory's subdirectories are mostly independent; you can cd
|
||||||
|
# into them and run 'make' without going through this Makefile.
|
||||||
|
# To change the values of 'make' variables: instead of editing Makefiles,
|
||||||
|
# (1) if the variable is set in 'config.status', edit 'config.status'
|
||||||
|
# (which will cause the Makefiles to be regenerated when you run 'make');
|
||||||
|
# (2) otherwise, pass the desired values on the 'make' command line.
|
||||||
|
$(am__recursive_targets):
|
||||||
|
@fail=; \
|
||||||
|
if $(am__make_keepgoing); then \
|
||||||
|
failcom='fail=yes'; \
|
||||||
|
else \
|
||||||
|
failcom='exit 1'; \
|
||||||
|
fi; \
|
||||||
|
dot_seen=no; \
|
||||||
|
target=`echo $@ | sed s/-recursive//`; \
|
||||||
|
case "$@" in \
|
||||||
|
distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \
|
||||||
|
*) list='$(SUBDIRS)' ;; \
|
||||||
|
esac; \
|
||||||
|
for subdir in $$list; do \
|
||||||
|
echo "Making $$target in $$subdir"; \
|
||||||
|
if test "$$subdir" = "."; then \
|
||||||
|
dot_seen=yes; \
|
||||||
|
local_target="$$target-am"; \
|
||||||
|
else \
|
||||||
|
local_target="$$target"; \
|
||||||
|
fi; \
|
||||||
|
($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \
|
||||||
|
|| eval $$failcom; \
|
||||||
|
done; \
|
||||||
|
if test "$$dot_seen" = "no"; then \
|
||||||
|
$(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \
|
||||||
|
fi; test -z "$$fail"
|
||||||
|
|
||||||
|
ID: $(am__tagged_files)
|
||||||
|
$(am__define_uniq_tagged_files); mkid -fID $$unique
|
||||||
|
tags: tags-recursive
|
||||||
|
TAGS: tags
|
||||||
|
|
||||||
|
tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files)
|
||||||
|
set x; \
|
||||||
|
here=`pwd`; \
|
||||||
|
if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \
|
||||||
|
include_option=--etags-include; \
|
||||||
|
empty_fix=.; \
|
||||||
|
else \
|
||||||
|
include_option=--include; \
|
||||||
|
empty_fix=; \
|
||||||
|
fi; \
|
||||||
|
list='$(SUBDIRS)'; for subdir in $$list; do \
|
||||||
|
if test "$$subdir" = .; then :; else \
|
||||||
|
test ! -f $$subdir/TAGS || \
|
||||||
|
set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \
|
||||||
|
fi; \
|
||||||
|
done; \
|
||||||
|
$(am__define_uniq_tagged_files); \
|
||||||
|
shift; \
|
||||||
|
if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \
|
||||||
|
test -n "$$unique" || unique=$$empty_fix; \
|
||||||
|
if test $$# -gt 0; then \
|
||||||
|
$(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
|
||||||
|
"$$@" $$unique; \
|
||||||
|
else \
|
||||||
|
$(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
|
||||||
|
$$unique; \
|
||||||
|
fi; \
|
||||||
|
fi
|
||||||
|
ctags: ctags-recursive
|
||||||
|
|
||||||
|
CTAGS: ctags
|
||||||
|
ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files)
|
||||||
|
$(am__define_uniq_tagged_files); \
|
||||||
|
test -z "$(CTAGS_ARGS)$$unique" \
|
||||||
|
|| $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \
|
||||||
|
$$unique
|
||||||
|
|
||||||
|
GTAGS:
|
||||||
|
here=`$(am__cd) $(top_builddir) && pwd` \
|
||||||
|
&& $(am__cd) $(top_srcdir) \
|
||||||
|
&& gtags -i $(GTAGS_ARGS) "$$here"
|
||||||
|
cscopelist: cscopelist-recursive
|
||||||
|
|
||||||
|
cscopelist-am: $(am__tagged_files)
|
||||||
|
list='$(am__tagged_files)'; \
|
||||||
|
case "$(srcdir)" in \
|
||||||
|
[\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \
|
||||||
|
*) sdir=$(subdir)/$(srcdir) ;; \
|
||||||
|
esac; \
|
||||||
|
for i in $$list; do \
|
||||||
|
if test -f "$$i"; then \
|
||||||
|
echo "$(subdir)/$$i"; \
|
||||||
|
else \
|
||||||
|
echo "$$sdir/$$i"; \
|
||||||
|
fi; \
|
||||||
|
done >> $(top_builddir)/cscope.files
|
||||||
|
|
||||||
|
distclean-tags:
|
||||||
|
-rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags
|
||||||
|
distdir: $(BUILT_SOURCES)
|
||||||
|
$(MAKE) $(AM_MAKEFLAGS) distdir-am
|
||||||
|
|
||||||
|
distdir-am: $(DISTFILES)
|
||||||
|
@srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
|
||||||
|
topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
|
||||||
|
list='$(DISTFILES)'; \
|
||||||
|
dist_files=`for file in $$list; do echo $$file; done | \
|
||||||
|
sed -e "s|^$$srcdirstrip/||;t" \
|
||||||
|
-e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \
|
||||||
|
case $$dist_files in \
|
||||||
|
*/*) $(MKDIR_P) `echo "$$dist_files" | \
|
||||||
|
sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \
|
||||||
|
sort -u` ;; \
|
||||||
|
esac; \
|
||||||
|
for file in $$dist_files; do \
|
||||||
|
if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
|
||||||
|
if test -d $$d/$$file; then \
|
||||||
|
dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \
|
||||||
|
if test -d "$(distdir)/$$file"; then \
|
||||||
|
find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
|
||||||
|
fi; \
|
||||||
|
if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
|
||||||
|
cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \
|
||||||
|
find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
|
||||||
|
fi; \
|
||||||
|
cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \
|
||||||
|
else \
|
||||||
|
test -f "$(distdir)/$$file" \
|
||||||
|
|| cp -p $$d/$$file "$(distdir)/$$file" \
|
||||||
|
|| exit 1; \
|
||||||
|
fi; \
|
||||||
|
done
|
||||||
|
@list='$(DIST_SUBDIRS)'; for subdir in $$list; do \
|
||||||
|
if test "$$subdir" = .; then :; else \
|
||||||
|
$(am__make_dryrun) \
|
||||||
|
|| test -d "$(distdir)/$$subdir" \
|
||||||
|
|| $(MKDIR_P) "$(distdir)/$$subdir" \
|
||||||
|
|| exit 1; \
|
||||||
|
dir1=$$subdir; dir2="$(distdir)/$$subdir"; \
|
||||||
|
$(am__relativize); \
|
||||||
|
new_distdir=$$reldir; \
|
||||||
|
dir1=$$subdir; dir2="$(top_distdir)"; \
|
||||||
|
$(am__relativize); \
|
||||||
|
new_top_distdir=$$reldir; \
|
||||||
|
echo " (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) top_distdir="$$new_top_distdir" distdir="$$new_distdir" \\"; \
|
||||||
|
echo " am__remove_distdir=: am__skip_length_check=: am__skip_mode_fix=: distdir)"; \
|
||||||
|
($(am__cd) $$subdir && \
|
||||||
|
$(MAKE) $(AM_MAKEFLAGS) \
|
||||||
|
top_distdir="$$new_top_distdir" \
|
||||||
|
distdir="$$new_distdir" \
|
||||||
|
am__remove_distdir=: \
|
||||||
|
am__skip_length_check=: \
|
||||||
|
am__skip_mode_fix=: \
|
||||||
|
distdir) \
|
||||||
|
|| exit 1; \
|
||||||
|
fi; \
|
||||||
|
done
|
||||||
|
check-am: all-am
|
||||||
|
check: check-recursive
|
||||||
|
all-am: Makefile
|
||||||
|
installdirs: installdirs-recursive
|
||||||
|
installdirs-am:
|
||||||
|
install: install-recursive
|
||||||
|
install-exec: install-exec-recursive
|
||||||
|
install-data: install-data-recursive
|
||||||
|
uninstall: uninstall-recursive
|
||||||
|
|
||||||
|
install-am: all-am
|
||||||
|
@$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
|
||||||
|
|
||||||
|
installcheck: installcheck-recursive
|
||||||
|
install-strip:
|
||||||
|
if test -z '$(STRIP)'; then \
|
||||||
|
$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
|
||||||
|
install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
|
||||||
|
install; \
|
||||||
|
else \
|
||||||
|
$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
|
||||||
|
install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
|
||||||
|
"INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \
|
||||||
|
fi
|
||||||
|
mostlyclean-generic:
|
||||||
|
|
||||||
|
clean-generic:
|
||||||
|
-test -z "$(CLEANFILES)" || rm -f $(CLEANFILES)
|
||||||
|
|
||||||
|
distclean-generic:
|
||||||
|
-test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
|
||||||
|
-test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES)
|
||||||
|
|
||||||
|
maintainer-clean-generic:
|
||||||
|
@echo "This command is intended for maintainers to use"
|
||||||
|
@echo "it deletes files that may require special tools to rebuild."
|
||||||
|
clean: clean-recursive
|
||||||
|
|
||||||
|
clean-am: clean-generic clean-libtool mostlyclean-am
|
||||||
|
|
||||||
|
distclean-am: clean-am distclean-generic distclean-tags
|
||||||
|
|
||||||
|
dvi: dvi-recursive
|
||||||
|
|
||||||
|
dvi-am:
|
||||||
|
|
||||||
|
html: html-recursive
|
||||||
|
|
||||||
|
html-am:
|
||||||
|
|
||||||
|
info: info-recursive
|
||||||
|
|
||||||
|
info-am:
|
||||||
|
|
||||||
|
install-data-am:
|
||||||
|
|
||||||
|
install-dvi: install-dvi-recursive
|
||||||
|
|
||||||
|
install-dvi-am:
|
||||||
|
|
||||||
|
install-exec-am:
|
||||||
|
|
||||||
|
install-html: install-html-recursive
|
||||||
|
|
||||||
|
install-html-am:
|
||||||
|
|
||||||
|
install-info: install-info-recursive
|
||||||
|
|
||||||
|
install-info-am:
|
||||||
|
|
||||||
|
install-man:
|
||||||
|
|
||||||
|
install-pdf: install-pdf-recursive
|
||||||
|
|
||||||
|
install-pdf-am:
|
||||||
|
|
||||||
|
install-ps: install-ps-recursive
|
||||||
|
|
||||||
|
install-ps-am:
|
||||||
|
|
||||||
|
installcheck-am:
|
||||||
|
|
||||||
|
maintainer-clean: maintainer-clean-recursive
|
||||||
|
-rm -f Makefile
|
||||||
|
maintainer-clean-am: distclean-am maintainer-clean-generic
|
||||||
|
|
||||||
|
mostlyclean: mostlyclean-recursive
|
||||||
|
|
||||||
|
mostlyclean-am: mostlyclean-generic mostlyclean-libtool
|
||||||
|
|
||||||
|
pdf: pdf-recursive
|
||||||
|
|
||||||
|
pdf-am:
|
||||||
|
|
||||||
|
ps: ps-recursive
|
||||||
|
|
||||||
|
ps-am:
|
||||||
|
|
||||||
|
uninstall-am:
|
||||||
|
|
||||||
|
.MAKE: $(am__recursive_targets) install-am install-strip
|
||||||
|
|
||||||
|
.PHONY: $(am__recursive_targets) CTAGS GTAGS TAGS all all-am check \
|
||||||
|
check-am clean clean-generic clean-libtool cscopelist-am ctags \
|
||||||
|
ctags-am distclean distclean-generic distclean-libtool \
|
||||||
|
distclean-tags distdir dvi dvi-am html html-am info info-am \
|
||||||
|
install install-am install-data install-data-am install-dvi \
|
||||||
|
install-dvi-am install-exec install-exec-am install-html \
|
||||||
|
install-html-am install-info install-info-am install-man \
|
||||||
|
install-pdf install-pdf-am install-ps install-ps-am \
|
||||||
|
install-strip installcheck installcheck-am installdirs \
|
||||||
|
installdirs-am maintainer-clean maintainer-clean-generic \
|
||||||
|
mostlyclean mostlyclean-generic mostlyclean-libtool pdf pdf-am \
|
||||||
|
ps ps-am tags tags-am uninstall uninstall-am
|
||||||
|
|
||||||
|
.PRECIOUS: Makefile
|
||||||
|
|
||||||
|
|
||||||
|
all: $(MK_CA_DOCS) $(CURLCONF_DOCS)
|
||||||
|
|
||||||
|
.md.1:
|
||||||
|
$(CD2)$(CD2NROFF)
|
||||||
|
|
||||||
|
curl-config.1: curl-config.md
|
||||||
|
|
||||||
|
mk-ca-bundle.1: mk-ca-bundle.md
|
||||||
|
|
||||||
|
distclean:
|
||||||
|
rm -f $(CLEANFILES)
|
||||||
|
|
||||||
|
# Tell versions [3.59,3.63) of GNU make to not export all variables.
|
||||||
|
# Otherwise a system limit (for SysV at least) may be exceeded.
|
||||||
|
.NOEXPORT:
|
116
src/dependencies/curl-8.8.0/docs/NEW-PROTOCOL.md
Normal file
116
src/dependencies/curl-8.8.0/docs/NEW-PROTOCOL.md
Normal file
|
@ -0,0 +1,116 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
# Adding a new protocol?
|
||||||
|
|
||||||
|
Every once in a while, someone comes up with the idea of adding support for yet
|
||||||
|
another protocol to curl. After all, curl already supports 25 something
|
||||||
|
protocols and it is the Internet transfer machine for the world.
|
||||||
|
|
||||||
|
In the curl project we love protocols and we love supporting many protocols
|
||||||
|
and doing it well.
|
||||||
|
|
||||||
|
How do you proceed to add a new protocol and what are the requirements?
|
||||||
|
|
||||||
|
## No fixed set of requirements
|
||||||
|
|
||||||
|
This document is an attempt to describe things to consider. There is no
|
||||||
|
checklist of the twenty-seven things you need to cross off. We view the entire
|
||||||
|
effort as a whole and then judge if it seems to be the right thing - for now.
|
||||||
|
The more things that look right, fit our patterns and are done in ways that
|
||||||
|
align with our thinking, the better are the chances that we agree that
|
||||||
|
supporting this protocol is a grand idea.
|
||||||
|
|
||||||
|
## Mutual benefit is preferred
|
||||||
|
|
||||||
|
curl is not here for your protocol. Your protocol is not here for curl. The
|
||||||
|
best cooperation and end result occur when all involved parties mutually see
|
||||||
|
and agree that supporting this protocol in curl would be good for everyone.
|
||||||
|
Heck, for the world.
|
||||||
|
|
||||||
|
Consider "selling us" the idea that we need an implementation merged in curl,
|
||||||
|
to be fairly important. *Why* do we want curl to support this new protocol?
|
||||||
|
|
||||||
|
## Protocol requirements
|
||||||
|
|
||||||
|
### Client-side
|
||||||
|
|
||||||
|
The protocol implementation is for a client's side of a "communication
|
||||||
|
session".
|
||||||
|
|
||||||
|
### Transfer oriented
|
||||||
|
|
||||||
|
The protocol itself should be focused on *transfers*. Be it uploads or
|
||||||
|
downloads or both. It should at least be possible to view the transfers as
|
||||||
|
such, like we can view reading emails over POP3 as a download and sending
|
||||||
|
emails over SMTP as an upload.
|
||||||
|
|
||||||
|
If you cannot even shoehorn the protocol into a transfer focused view, then
|
||||||
|
you are up for a tough argument.
|
||||||
|
|
||||||
|
### URL
|
||||||
|
|
||||||
|
There should be a documented URL format. If there is an RFC for it there is no
|
||||||
|
question about it but the syntax does not have to be a published RFC. It could
|
||||||
|
be enough if it is already in use by other implementations.
|
||||||
|
|
||||||
|
If you make up the syntax just in order to be able to propose it to curl, then
|
||||||
|
you are in a bad place. URLs are designed and defined for interoperability.
|
||||||
|
There should at least be a good chance that other clients and servers can be
|
||||||
|
implemented supporting the same URL syntax and work the same or similar way.
|
||||||
|
|
||||||
|
URLs work on registered 'schemes'. There is a register of [all officially
|
||||||
|
recognized
|
||||||
|
schemes](https://www.iana.org/assignments/uri-schemes/uri-schemes.xhtml). If
|
||||||
|
your protocol is not in there, is it really a protocol we want?
|
||||||
|
|
||||||
|
### Wide and public use
|
||||||
|
|
||||||
|
The protocol shall already be used or have an expectation of getting used
|
||||||
|
widely. Experimental protocols are better off worked on in experiments first,
|
||||||
|
to prove themselves before they are adopted by curl.
|
||||||
|
|
||||||
|
## Code
|
||||||
|
|
||||||
|
Of course the code needs to be written, provided, licensed agreeably and it
|
||||||
|
should follow our code guidelines and review comments have to be dealt with.
|
||||||
|
If the implementation needs third party code, that third party code should not
|
||||||
|
have noticeably lesser standards than the curl project itself.
|
||||||
|
|
||||||
|
## Tests
|
||||||
|
|
||||||
|
As much of the protocol implementation as possible needs to be verified by
|
||||||
|
curl test cases. We must have the implementation get tested by CI jobs,
|
||||||
|
torture tests and more.
|
||||||
|
|
||||||
|
We have experienced many times in the past how new implementations were brought
|
||||||
|
to curl and immediately once the code had been merged, the originator vanished
|
||||||
|
from the face of the earth. That is fine, but we need to take the necessary
|
||||||
|
precautions so when it happens we are still fine.
|
||||||
|
|
||||||
|
Our test infrastructure is powerful enough to test just about every possible
|
||||||
|
protocol - but it might require a bit of an effort to make it happen.
|
||||||
|
|
||||||
|
## Documentation
|
||||||
|
|
||||||
|
We cannot assume that users are particularly familiar with details and
|
||||||
|
peculiarities of the protocol. It needs documentation.
|
||||||
|
|
||||||
|
Maybe it even needs some internal documentation so that the developers who try
|
||||||
|
to debug something five years from now can figure out functionality a little
|
||||||
|
easier!
|
||||||
|
|
||||||
|
The protocol specification itself should be freely available without requiring
|
||||||
|
a non-disclosure agreement or similar.
|
||||||
|
|
||||||
|
## Do not compare
|
||||||
|
|
||||||
|
We are constantly raising the bar and we are constantly improving the project.
|
||||||
|
A lot of things we did in the past would not be acceptable if done today.
|
||||||
|
Therefore, you might be tempted to use shortcuts or "hacks" you can spot
|
||||||
|
other - existing - protocol implementations have used, but there is nothing to
|
||||||
|
gain from that. The bar has been raised. Former "cheats" may not tolerated
|
||||||
|
anymore.
|
56
src/dependencies/curl-8.8.0/docs/PARALLEL-TRANSFERS.md
Normal file
56
src/dependencies/curl-8.8.0/docs/PARALLEL-TRANSFERS.md
Normal file
|
@ -0,0 +1,56 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
# Parallel transfers
|
||||||
|
|
||||||
|
curl 7.66.0 introduced support for doing multiple transfers simultaneously; in
|
||||||
|
parallel.
|
||||||
|
|
||||||
|
## -Z, --parallel
|
||||||
|
|
||||||
|
When this command line option is used, curl performs the transfers given to it
|
||||||
|
at the same time. It does up to `--parallel-max` concurrent transfers, with a
|
||||||
|
default value of 50.
|
||||||
|
|
||||||
|
## Progress meter
|
||||||
|
|
||||||
|
The progress meter that is displayed when doing parallel transfers is
|
||||||
|
completely different than the regular one used for each single transfer.
|
||||||
|
|
||||||
|
It shows:
|
||||||
|
|
||||||
|
o percent download (if known, which means *all* transfers need to have a
|
||||||
|
known size)
|
||||||
|
o percent upload (if known, with the same caveat as for download)
|
||||||
|
o total amount of downloaded data
|
||||||
|
o total amount of uploaded data
|
||||||
|
o number of transfers to perform
|
||||||
|
o number of concurrent transfers being transferred right now
|
||||||
|
o number of transfers queued up waiting to start
|
||||||
|
o total time all transfers are expected to take (if sizes are known)
|
||||||
|
o current time the transfers have spent so far
|
||||||
|
o estimated time left (if sizes are known)
|
||||||
|
o current transfer speed (the faster of upload/download speeds measured over
|
||||||
|
the last few seconds)
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
DL% UL% Dled Uled Xfers Live Qd Total Current Left Speed
|
||||||
|
72 -- 37.9G 0 101 30 23 0:00:55 0:00:34 0:00:22 2752M
|
||||||
|
|
||||||
|
## Behavior differences
|
||||||
|
|
||||||
|
Connections are shared fine between different easy handles, but the
|
||||||
|
"authentication contexts" are not. For example doing HTTP Digest auth with one
|
||||||
|
handle for a particular transfer and then continue on with another handle that
|
||||||
|
reuses the same connection, the second handle cannot send the necessary
|
||||||
|
Authorization header at once since the context is only kept in the original
|
||||||
|
easy handle.
|
||||||
|
|
||||||
|
To fix this, the authorization state could be made possible to share with the
|
||||||
|
share API as well, as a context per origin + path (realm?) basically.
|
||||||
|
|
||||||
|
Visible in test 153, 1412 and more.
|
18
src/dependencies/curl-8.8.0/docs/README.md
Normal file
18
src/dependencies/curl-8.8.0/docs/README.md
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
# Documentation
|
||||||
|
|
||||||
|
You find a mix of various documentation in this directory and subdirectories,
|
||||||
|
using several different formats. Some of them are not ideal for reading
|
||||||
|
directly in your browser.
|
||||||
|
|
||||||
|
If you would rather see the rendered version of the documentation, check out the
|
||||||
|
curl website's [documentation section](https://curl.se/docs/) for
|
||||||
|
general curl stuff or the [libcurl section](https://curl.se/libcurl/) for
|
||||||
|
libcurl related documentation.
|
121
src/dependencies/curl-8.8.0/docs/RELEASE-PROCEDURE.md
Normal file
121
src/dependencies/curl-8.8.0/docs/RELEASE-PROCEDURE.md
Normal file
|
@ -0,0 +1,121 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
curl release procedure - how to do a release
|
||||||
|
============================================
|
||||||
|
|
||||||
|
in the source code repo
|
||||||
|
-----------------------
|
||||||
|
|
||||||
|
- do a *regular build* with a sensible build config to make sure the
|
||||||
|
`src/tool_hugehelp.c` file etc is correctly generated
|
||||||
|
|
||||||
|
- edit `RELEASE-NOTES` to be accurate
|
||||||
|
|
||||||
|
- update `docs/THANKS`
|
||||||
|
|
||||||
|
- make sure all relevant changes are committed on the master branch
|
||||||
|
|
||||||
|
- tag the git repo in this style: `git tag -a curl-7_34_0`. -a annotates the
|
||||||
|
tag and we use underscores instead of dots in the version number. Make sure
|
||||||
|
the tag is GPG signed (using -s).
|
||||||
|
|
||||||
|
- run `./scripts/dmaketgz 7.34.0` to build the release tarballs.
|
||||||
|
|
||||||
|
- push the git commits and the new tag
|
||||||
|
|
||||||
|
- GPG sign the 4 tarballs as `maketgz` suggests
|
||||||
|
|
||||||
|
- upload the 8 resulting files to the primary download directory
|
||||||
|
|
||||||
|
in the curl-www repo
|
||||||
|
--------------------
|
||||||
|
|
||||||
|
- edit `Makefile` (version number and date),
|
||||||
|
|
||||||
|
- edit `_newslog.html` (announce the new release) and
|
||||||
|
|
||||||
|
- edit `_changes.html` (insert changes+bugfixes from RELEASE-NOTES)
|
||||||
|
|
||||||
|
- commit all local changes
|
||||||
|
|
||||||
|
- tag the repo with the same name as used for the source repo.
|
||||||
|
|
||||||
|
- make sure all relevant changes are committed and pushed on the master branch
|
||||||
|
|
||||||
|
(the website then updates its contents automatically)
|
||||||
|
|
||||||
|
on GitHub
|
||||||
|
---------
|
||||||
|
|
||||||
|
- edit the newly made release tag so that it is listed as the latest release
|
||||||
|
|
||||||
|
inform
|
||||||
|
------
|
||||||
|
|
||||||
|
- send an email to curl-users, curl-announce and curl-library. Insert the
|
||||||
|
RELEASE-NOTES into the mail.
|
||||||
|
|
||||||
|
celebrate
|
||||||
|
---------
|
||||||
|
|
||||||
|
- suitable beverage intake is encouraged for the festivities
|
||||||
|
|
||||||
|
curl release scheduling
|
||||||
|
=======================
|
||||||
|
|
||||||
|
Release Cycle
|
||||||
|
-------------
|
||||||
|
|
||||||
|
We normally do releases every 8 weeks on Wednesdays. If important problems
|
||||||
|
arise, we can insert releases outside the schedule or we can move the release
|
||||||
|
date.
|
||||||
|
|
||||||
|
Each 8 week (56 days) release cycle is divided into three distinct periods:
|
||||||
|
|
||||||
|
- During the first 10 calendar days after a release, we are in "cool down". We
|
||||||
|
do not merge features but only bug-fixes. If a regression is reported, we
|
||||||
|
might do a follow-up patch release.
|
||||||
|
|
||||||
|
- During the following 3 weeks (21 days) there is a feature window: we allow
|
||||||
|
new features and changes to curl and libcurl. If we accept any such changes,
|
||||||
|
we bump the minor number used for the next release.
|
||||||
|
|
||||||
|
- During the next 25 days we are in feature freeze. We do not merge any
|
||||||
|
features or changes, and we only focus on fixing bugs and polishing things
|
||||||
|
to make the pending release a solid one.
|
||||||
|
|
||||||
|
If a future release date happens to end up on a "bad date", like in the middle
|
||||||
|
of common public holidays or when the lead release manager is unavailable, the
|
||||||
|
release date can be moved forwards or backwards a full week. This is then
|
||||||
|
advertised well in advance.
|
||||||
|
|
||||||
|
Critical problems
|
||||||
|
-----------------
|
||||||
|
|
||||||
|
We can break the release cycle and do a patch release at any point if a
|
||||||
|
critical enough problem is reported. There is no exact definition of how to
|
||||||
|
assess such criticality, but if an issue is highly disturbing or has a
|
||||||
|
security impact on a large enough share of the user population it might
|
||||||
|
qualify.
|
||||||
|
|
||||||
|
If you think an issue qualifies, bring it to the curl-library mailing list and
|
||||||
|
push for it.
|
||||||
|
|
||||||
|
Coming dates
|
||||||
|
------------
|
||||||
|
|
||||||
|
Based on the description above, here are some planned release dates (at the
|
||||||
|
time of this writing):
|
||||||
|
|
||||||
|
- May 22, 2024
|
||||||
|
- July 17, 2024
|
||||||
|
- September 11, 2024
|
||||||
|
- November 6, 2024
|
||||||
|
- January 8, 2025
|
||||||
|
- March 5, 2025
|
||||||
|
- April 30, 2025
|
||||||
|
- June 25, 2025
|
28
src/dependencies/curl-8.8.0/docs/RELEASE-TOOLS.md
Normal file
28
src/dependencies/curl-8.8.0/docs/RELEASE-TOOLS.md
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
# Release tools used for curl 8.8.0
|
||||||
|
|
||||||
|
The following tools and their Debian package version numbers were used to
|
||||||
|
produce this release tarball.
|
||||||
|
|
||||||
|
- autoconf: 2.71-3
|
||||||
|
- automake: 1:1.16.5-1.3
|
||||||
|
- libtool: 2.4.7-5
|
||||||
|
- make: 4.3-4.1
|
||||||
|
- perl: 5.36.0-7+deb12u1
|
||||||
|
- git: 1:2.39.2-1.1
|
||||||
|
|
||||||
|
# Reproduce the tarball
|
||||||
|
|
||||||
|
- Clone the repo and checkout the tag: curl-8_8_0
|
||||||
|
- Install the same set of tools + versions as listed above
|
||||||
|
|
||||||
|
## Do a standard build
|
||||||
|
|
||||||
|
- autoreconf -fi
|
||||||
|
- ./configure [...]
|
||||||
|
- make
|
||||||
|
|
||||||
|
## Generate the tarball with the same timestamp
|
||||||
|
|
||||||
|
- export SOURCE_DATE_EPOCH=1716357300
|
||||||
|
- ./maketgz [version]
|
||||||
|
|
17
src/dependencies/curl-8.8.0/docs/ROADMAP.md
Normal file
17
src/dependencies/curl-8.8.0/docs/ROADMAP.md
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
# curl the next few years - perhaps
|
||||||
|
|
||||||
|
Roadmap of things Daniel Stenberg wants to work on next. It is intended to
|
||||||
|
serve as a guideline for others for information, feedback and possible
|
||||||
|
participation.
|
||||||
|
|
||||||
|
## WebSocket
|
||||||
|
|
||||||
|
Agree that it is a good enough API and remove the EXPERIMENTAL label.
|
||||||
|
|
||||||
|
##
|
31
src/dependencies/curl-8.8.0/docs/RUSTLS.md
Normal file
31
src/dependencies/curl-8.8.0/docs/RUSTLS.md
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
# Rustls
|
||||||
|
|
||||||
|
[Rustls is a TLS backend written in Rust](https://docs.rs/rustls/). Curl can
|
||||||
|
be built to use it as an alternative to OpenSSL or other TLS backends. We use
|
||||||
|
the [rustls-ffi C bindings](https://github.com/rustls/rustls-ffi/). This
|
||||||
|
version of curl depends on version v0.13.0 of rustls-ffi.
|
||||||
|
|
||||||
|
# Building with rustls
|
||||||
|
|
||||||
|
First, [install Rust](https://rustup.rs/).
|
||||||
|
|
||||||
|
Next, check out, build, and install the appropriate version of rustls-ffi:
|
||||||
|
|
||||||
|
% git clone https://github.com/rustls/rustls-ffi -b v0.13.0
|
||||||
|
% cd rustls-ffi
|
||||||
|
% make
|
||||||
|
% make DESTDIR=${HOME}/rustls-ffi-built/ install
|
||||||
|
|
||||||
|
Now configure and build curl with rustls:
|
||||||
|
|
||||||
|
% git clone https://github.com/curl/curl
|
||||||
|
% cd curl
|
||||||
|
% autoreconf -fi
|
||||||
|
% ./configure --with-rustls=${HOME}/rustls-ffi-built
|
||||||
|
% make
|
135
src/dependencies/curl-8.8.0/docs/SECURITY-ADVISORY.md
Normal file
135
src/dependencies/curl-8.8.0/docs/SECURITY-ADVISORY.md
Normal file
|
@ -0,0 +1,135 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
# Anatomy of a curl security advisory
|
||||||
|
|
||||||
|
As described in the [Security Process](https://curl.se/dev/secprocess.html)
|
||||||
|
document, when a security vulnerability has been reported to the project and
|
||||||
|
confirmed, we author an advisory document for the issue. It should ideally
|
||||||
|
be written in cooperation with the reporter to make sure all the angles and
|
||||||
|
details of the problem are gathered and described correctly and succinctly.
|
||||||
|
|
||||||
|
## New document
|
||||||
|
|
||||||
|
A security advisory for curl is created in the `docs/` folder in the
|
||||||
|
[curl-www](https://github.com/curl/curl-www) repository. It should be named
|
||||||
|
`$CVEID.md` where `$CVEID` is the full CVE Id that has been registered for the
|
||||||
|
flaw. Like `CVE-2016-0755`. The `.md` extension of course means that the
|
||||||
|
document is written using markdown.
|
||||||
|
|
||||||
|
The standard way to go about this is to first write the `VULNERABILITY`
|
||||||
|
section for the document, so that there is description of the flaw available,
|
||||||
|
then paste this description into the CVE Id request.
|
||||||
|
|
||||||
|
### `vuln.pm`
|
||||||
|
|
||||||
|
The new issue should be entered at the top of the list in the file `vuln.pm`
|
||||||
|
in the same directory. It holds a large array with all published curl
|
||||||
|
vulnerabilities. All fields should be filled in accordingly, separated by a
|
||||||
|
pipe character (`|`).
|
||||||
|
|
||||||
|
The eleven fields for each CVE in `vuln.pm` are, in order:
|
||||||
|
|
||||||
|
HTML page name, first vulnerable version, last vulnerable version, name of
|
||||||
|
the issue, CVE Id, announce date (`YYYYMMDD`), report to the project date
|
||||||
|
(`YYYYMMDD`), CWE, awarded reward amount (USD), area (single word), C-issue
|
||||||
|
(`-` if not a C issue at all, `OVERFLOW` , `OVERREAD`, `DOUBLE_FREE`,
|
||||||
|
`USE_AFTER_FREE`, `NULL_MISTAKE`, `UNINIT`)
|
||||||
|
|
||||||
|
### `Makefile`
|
||||||
|
|
||||||
|
The new CVE webpage filename needs to be added in the `Makefile`'s `CVELIST`
|
||||||
|
macro.
|
||||||
|
|
||||||
|
When the markdown is in place and the `Makefile` and `vuln.pm` are updated,
|
||||||
|
all other files and metadata for all curl advisories and versions get
|
||||||
|
generated automatically using those files.
|
||||||
|
|
||||||
|
## Document format
|
||||||
|
|
||||||
|
The easy way is to start with a recent previously published advisory and just
|
||||||
|
blank out old texts and save it using a new name. Save the subtitles and
|
||||||
|
general layout.
|
||||||
|
|
||||||
|
Some details and metadata are extracted from this document so it is important
|
||||||
|
to stick to the existing format.
|
||||||
|
|
||||||
|
The first list must be the title of the issue.
|
||||||
|
|
||||||
|
### VULNERABILITY
|
||||||
|
|
||||||
|
The first subtitle should be `VULNERABILITY`. That should then include a
|
||||||
|
through and detailed description of the flaw. Including how it can be
|
||||||
|
triggered and maybe something about what might happen if triggered or
|
||||||
|
exploited.
|
||||||
|
|
||||||
|
### INFO
|
||||||
|
|
||||||
|
The next section is `INFO` which adds meta data information about the flaw. It
|
||||||
|
specifically mentions the official CVE Id for the issue and it must list the
|
||||||
|
CWE Id, starting on its own line. We write CWE identifiers in advisories with
|
||||||
|
the full (official) explanation on the right side of a colon. Like this:
|
||||||
|
|
||||||
|
`CWE-305: Authentication Bypass by Primary Weakness`
|
||||||
|
|
||||||
|
### AFFECTED VERSIONS
|
||||||
|
|
||||||
|
The third section first lists what versions that are affected, then adds
|
||||||
|
clarity by stressing what versions that are *not* affected. A third line adds
|
||||||
|
information about which specific git commit that introduced the vulnerability.
|
||||||
|
|
||||||
|
The `Introduced-in` commit should be a full URL that displays the commit, but
|
||||||
|
should work as a stand-alone commit hash if everything up to the last slash is
|
||||||
|
cut out.
|
||||||
|
|
||||||
|
An example using the correct syntax:
|
||||||
|
|
||||||
|
~~~
|
||||||
|
- Affected versions: curl 7.16.1 to and including 7.88.1
|
||||||
|
- Not affected versions: curl < 7.16.1 and curl >= 8.0.0
|
||||||
|
- Introduced-in: https://github.com/curl/curl/commit/2147284cad
|
||||||
|
~~~
|
||||||
|
|
||||||
|
### THE SOLUTION
|
||||||
|
|
||||||
|
This section describes and discusses the fix. The only mandatory information
|
||||||
|
here is the link to the git commit that fixes the problem.
|
||||||
|
|
||||||
|
The `Fixed-in` value should be a full URL that displays the commit, but should
|
||||||
|
work as a stand-alone commit hash if everything up to the last slash is cut
|
||||||
|
out.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
`- Fixed-in: https://github.com/curl/curl/commit/af369db4d3833272b8ed`
|
||||||
|
|
||||||
|
### RECOMMENDATIONS
|
||||||
|
|
||||||
|
This section lists the recommended actions for the users in a top to bottom
|
||||||
|
priority order and should ideally contain three items but no less than two.
|
||||||
|
|
||||||
|
The top two are almost always `upgrade curl to version XXX` and `apply the
|
||||||
|
patch to your local version`.
|
||||||
|
|
||||||
|
### TIMELINE
|
||||||
|
|
||||||
|
Detail when this report was received in the project. When package distributors
|
||||||
|
were notified (via the distros mailing list or similar)
|
||||||
|
|
||||||
|
When the advisory and fixed version are released.
|
||||||
|
|
||||||
|
### CREDITS
|
||||||
|
|
||||||
|
Mention the reporter and patch author at least, then everyone else involved
|
||||||
|
you think deserves a mention.
|
||||||
|
|
||||||
|
If you want to mention more than one name, separate the names with comma
|
||||||
|
(`,`).
|
||||||
|
|
||||||
|
~~~
|
||||||
|
- Reported-by: Full Name
|
||||||
|
- Patched-by: Full Name
|
||||||
|
~~~
|
47
src/dependencies/curl-8.8.0/docs/SPONSORS.md
Normal file
47
src/dependencies/curl-8.8.0/docs/SPONSORS.md
Normal file
|
@ -0,0 +1,47 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
# curl sponsors
|
||||||
|
|
||||||
|
A sponsor is someone who donates money or resources to the curl project for no
|
||||||
|
specific service in return.
|
||||||
|
|
||||||
|
curl accepts donations via [GitHub sponsors](https://github.com/sponsors/curl)
|
||||||
|
and [Open Collective](https://opencollective.com/curl).
|
||||||
|
|
||||||
|
An even better way to contribute to the project might be to pay an engineer or
|
||||||
|
two to spend work hours on curl related tasks.
|
||||||
|
|
||||||
|
We promise to use donated funds for things and activities that we believe are
|
||||||
|
beneficial for the project and its development. That includes but is not
|
||||||
|
limited to bug-bounties, developer conferences, infrastructure, development,
|
||||||
|
services and hardware.
|
||||||
|
|
||||||
|
Recurring donations above a certain amount of money puts the sponsor at a
|
||||||
|
named sponsor level: **Silver**, **Gold**, **Platinum** or **Top**.
|
||||||
|
|
||||||
|
Sponsors on a named level can provide their logo image and preferred URL and
|
||||||
|
get recognition on the curl website's [sponsor
|
||||||
|
page](https://curl.se/sponsors.html).
|
||||||
|
|
||||||
|
- **Silver Sponsor** at least 100 USD/month
|
||||||
|
- **Gold Sponsor** at least 500 USD/month
|
||||||
|
- **Platinum Sponsor** at least 1000 USD/month
|
||||||
|
- **Top Sponsor** outstanding extra valuable help
|
||||||
|
|
||||||
|
## Sponsor requirements
|
||||||
|
|
||||||
|
A named level sponsor is entitled a logo and link on the curl website assuming
|
||||||
|
the company, brand and link are not deemed unsuitable. The curl team reserves
|
||||||
|
the right to make that decision at its own discretion.
|
||||||
|
|
||||||
|
Sponsors may be denied a website presence for example if involved with drugs,
|
||||||
|
gambling, pornography, social media manipulation etc.
|
||||||
|
|
||||||
|
## Past Sponsors
|
||||||
|
|
||||||
|
Sponsors that stop paying are considered *Past Sponsors* and are not displayed
|
||||||
|
on the sponsor page anymore. We thank you for your contributions!
|
97
src/dependencies/curl-8.8.0/docs/SSL-PROBLEMS.md
Normal file
97
src/dependencies/curl-8.8.0/docs/SSL-PROBLEMS.md
Normal file
|
@ -0,0 +1,97 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
# SSL problems
|
||||||
|
|
||||||
|
First, let's establish that we often refer to TLS and SSL interchangeably as
|
||||||
|
SSL here. The current protocol is called TLS, it was called SSL a long time
|
||||||
|
ago.
|
||||||
|
|
||||||
|
There are several known reasons why a connection that involves SSL might
|
||||||
|
fail. This is a document that attempts to detail the most common ones and
|
||||||
|
how to mitigate them.
|
||||||
|
|
||||||
|
## CA certs
|
||||||
|
|
||||||
|
CA certs are used to digitally verify the server's certificate. You need a
|
||||||
|
"ca bundle" for this. See lots of more details on this in the `SSLCERTS`
|
||||||
|
document.
|
||||||
|
|
||||||
|
## CA bundle missing intermediate certificates
|
||||||
|
|
||||||
|
When using said CA bundle to verify a server cert, you may experience
|
||||||
|
problems if your CA store does not contain the certificates for the
|
||||||
|
intermediates if the server does not provide them.
|
||||||
|
|
||||||
|
The TLS protocol mandates that the intermediate certificates are sent in the
|
||||||
|
handshake, but as browsers have ways to survive or work around such
|
||||||
|
omissions, missing intermediates in TLS handshakes still happen that browser
|
||||||
|
users do not notice.
|
||||||
|
|
||||||
|
Browsers work around this problem in two ways: they cache intermediate
|
||||||
|
certificates from previous transfers and some implement the TLS "AIA"
|
||||||
|
extension that lets the client explicitly download such certificates on
|
||||||
|
demand.
|
||||||
|
|
||||||
|
## Protocol version
|
||||||
|
|
||||||
|
Some broken servers fail to support the protocol negotiation properly that
|
||||||
|
SSL servers are supposed to handle. This may cause the connection to fail
|
||||||
|
completely. Sometimes you may need to explicitly select a SSL version to use
|
||||||
|
when connecting to make the connection succeed.
|
||||||
|
|
||||||
|
An additional complication can be that modern SSL libraries sometimes are
|
||||||
|
built with support for older SSL and TLS versions disabled!
|
||||||
|
|
||||||
|
All versions of SSL and the TLS versions before 1.2 are considered insecure
|
||||||
|
and should be avoided. Use TLS 1.2 or later.
|
||||||
|
|
||||||
|
## Ciphers
|
||||||
|
|
||||||
|
Clients give servers a list of ciphers to select from. If the list does not
|
||||||
|
include any ciphers the server wants/can use, the connection handshake
|
||||||
|
fails.
|
||||||
|
|
||||||
|
curl has recently disabled the user of a whole bunch of seriously insecure
|
||||||
|
ciphers from its default set (slightly depending on SSL backend in use).
|
||||||
|
|
||||||
|
You may have to explicitly provide an alternative list of ciphers for curl
|
||||||
|
to use to allow the server to use a weak cipher for you.
|
||||||
|
|
||||||
|
Note that these weak ciphers are identified as flawed. For example, this
|
||||||
|
includes symmetric ciphers with less than 128 bit keys and RC4.
|
||||||
|
|
||||||
|
Schannel in Windows XP is not able to connect to servers that no longer
|
||||||
|
support the legacy handshakes and algorithms used by those versions, so we
|
||||||
|
advise against building curl to use Schannel on really old Windows versions.
|
||||||
|
|
||||||
|
Reference: [Prohibiting RC4 Cipher
|
||||||
|
Suites](https://datatracker.ietf.org/doc/html/draft-popov-tls-prohibiting-rc4-01)
|
||||||
|
|
||||||
|
## Allow BEAST
|
||||||
|
|
||||||
|
BEAST is the name of a TLS 1.0 attack that surfaced 2011. When adding means
|
||||||
|
to mitigate this attack, it turned out that some broken servers out there in
|
||||||
|
the wild did not work properly with the BEAST mitigation in place.
|
||||||
|
|
||||||
|
To make such broken servers work, the --ssl-allow-beast option was
|
||||||
|
introduced. Exactly as it sounds, it re-introduces the BEAST vulnerability
|
||||||
|
but on the other hand it allows curl to connect to that kind of strange
|
||||||
|
servers.
|
||||||
|
|
||||||
|
## Disabling certificate revocation checks
|
||||||
|
|
||||||
|
Some SSL backends may do certificate revocation checks (CRL, OCSP, etc)
|
||||||
|
depending on the OS or build configuration. The --ssl-no-revoke option was
|
||||||
|
introduced in 7.44.0 to disable revocation checking but currently is only
|
||||||
|
supported for Schannel (the native Windows SSL library), with an exception
|
||||||
|
in the case of Windows' Untrusted Publishers block list which it seems cannot
|
||||||
|
be bypassed. This option may have broader support to accommodate other SSL
|
||||||
|
backends in the future.
|
||||||
|
|
||||||
|
References:
|
||||||
|
|
||||||
|
https://curl.se/docs/ssl-compared.html
|
160
src/dependencies/curl-8.8.0/docs/SSLCERTS.md
Normal file
160
src/dependencies/curl-8.8.0/docs/SSLCERTS.md
Normal file
|
@ -0,0 +1,160 @@
|
||||||
|
<!--
|
||||||
|
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: curl
|
||||||
|
-->
|
||||||
|
|
||||||
|
SSL Certificate Verification
|
||||||
|
============================
|
||||||
|
|
||||||
|
SSL is TLS
|
||||||
|
----------
|
||||||
|
|
||||||
|
SSL is the old name. It is called TLS these days.
|
||||||
|
|
||||||
|
Native SSL
|
||||||
|
----------
|
||||||
|
|
||||||
|
If libcurl was built with Schannel or Secure Transport support (the native SSL
|
||||||
|
libraries included in Windows and Mac OS X), then this does not apply to
|
||||||
|
you. Scroll down for details on how the OS-native engines handle SSL
|
||||||
|
certificates. If you are not sure, then run "curl -V" and read the results. If
|
||||||
|
the version string says `Schannel` in it, then it was built with Schannel
|
||||||
|
support.
|
||||||
|
|
||||||
|
It is about trust
|
||||||
|
-----------------
|
||||||
|
|
||||||
|
This system is about trust. In your local CA certificate store you have certs
|
||||||
|
from *trusted* Certificate Authorities that you then can use to verify that
|
||||||
|
the server certificates you see are valid. They are signed by one of the
|
||||||
|
certificate authorities you trust.
|
||||||
|
|
||||||
|
Which certificate authorities do you trust? You can decide to trust the same
|
||||||
|
set of companies your operating system trusts, or the set one of the known
|
||||||
|
browsers trust. That is basically trust via someone else you trust. You should
|
||||||
|
just be aware that modern operating systems and browsers are setup to trust
|
||||||
|
*hundreds* of companies and in recent years several certificate authorities
|
||||||
|
have been found untrustworthy.
|
||||||
|
|
||||||
|
Certificate Verification
|
||||||
|
------------------------
|
||||||
|
|
||||||
|
libcurl performs peer SSL certificate verification by default. This is done
|
||||||
|
by using a CA certificate store that the SSL library can use to make sure the
|
||||||
|
peer's server certificate is valid.
|
||||||
|
|
||||||
|
If you communicate with HTTPS, FTPS or other TLS-using servers using
|
||||||
|
certificates in the CA store, you can be sure that the remote server really is
|
||||||
|
the one it claims to be.
|
||||||
|
|
||||||
|
If the remote server uses a self-signed certificate, if you do not install a CA
|
||||||
|
cert store, if the server uses a certificate signed by a CA that is not
|
||||||
|
included in the store you use or if the remote host is an impostor
|
||||||
|
impersonating your favorite site, and you want to transfer files from this
|
||||||
|
server, do one of the following:
|
||||||
|
|
||||||
|
1. Tell libcurl to *not* verify the peer. With libcurl you disable this with
|
||||||
|
`curl_easy_setopt(curl, CURLOPT_SSL_VERIFYPEER, FALSE);`
|
||||||
|
|
||||||
|
With the curl command line tool, you disable this with `-k`/`--insecure`.
|
||||||
|
|
||||||
|
2. Get a CA certificate that can verify the remote server and use the proper
|
||||||
|
option to point out this CA cert for verification when connecting. For
|
||||||
|
libcurl hackers: `curl_easy_setopt(curl, CURLOPT_CAINFO, cacert);`
|
||||||
|
|
||||||
|
With the curl command line tool: `--cacert [file]`
|
||||||
|
|
||||||
|
3. Add the CA cert for your server to the existing default CA certificate
|
||||||
|
store. The default CA certificate store can be changed at compile time with
|
||||||
|
the following configure options:
|
||||||
|
|
||||||
|
`--with-ca-bundle=FILE`: use the specified file as the CA certificate
|
||||||
|
store. CA certificates need to be concatenated in PEM format into this
|
||||||
|
file.
|
||||||
|
|
||||||
|
`--with-ca-path=PATH`: use the specified path as CA certificate store. CA
|
||||||
|
certificates need to be stored as individual PEM files in this directory.
|
||||||
|
You may need to run c_rehash after adding files there.
|
||||||
|
|
||||||
|
If neither of the two options is specified, configure tries to auto-detect
|
||||||
|
a setting. It's also possible to explicitly not set any default store but
|
||||||
|
rely on the built in default the crypto library may provide instead. You
|
||||||
|
can achieve that by passing both `--without-ca-bundle` and
|
||||||
|
`--without-ca-path` to the configure script.
|
||||||
|
|
||||||
|
If you use Internet Explorer, this is one way to get extract the CA cert
|
||||||
|
for a particular server:
|
||||||
|
|
||||||
|
- View the certificate by double-clicking the padlock
|
||||||
|
- Find out where the CA certificate is kept (Certificate>
|
||||||
|
Authority Information Access>URL)
|
||||||
|
- Get a copy of the crt file using curl
|
||||||
|
- Convert it from crt to PEM using the OpenSSL tool:
|
||||||
|
`openssl x509 -inform DES -in yourdownloaded.crt -out outcert.pem -text`
|
||||||
|
- Add the `outcert.pem` to the CA certificate store or use it stand-alone
|
||||||
|
as described below.
|
||||||
|
|
||||||
|
If you use the `openssl` tool, this is one way to get extract the CA cert
|
||||||
|
for a particular server:
|
||||||
|
|
||||||
|
- `openssl s_client -showcerts -servername server -connect server:443 > cacert.pem`
|
||||||
|
- type "quit", followed by the "ENTER" key
|
||||||
|
- The certificate has `BEGIN CERTIFICATE` and `END CERTIFICATE` markers.
|
||||||
|
- If you want to see the data in the certificate, you can do: `openssl
|
||||||
|
x509 -inform PEM -in certfile -text -out certdata` where `certfile` is
|
||||||
|
the cert you extracted from logfile. Look in `certdata`.
|
||||||
|
- If you want to trust the certificate, you can add it to your CA
|
||||||
|
certificate store or use it stand-alone as described. Just remember that
|
||||||
|
the security is no better than the way you obtained the certificate.
|
||||||
|
|
||||||
|
4. If you are using the curl command line tool and the TLS backend is not
|
||||||
|
Schannel then you can specify your own CA cert file by setting the
|
||||||
|
environment variable `CURL_CA_BUNDLE` to the path of your choice.
|
||||||
|
|
||||||
|
If you are using the curl command line tool on Windows, curl searches for
|
||||||
|
a CA cert file named "curl-ca-bundle.crt" in these directories and in this
|
||||||
|
order:
|
||||||
|
1. application's directory
|
||||||
|
2. current working directory
|
||||||
|
3. Windows System directory (e.g. C:\windows\system32)
|
||||||
|
4. Windows Directory (e.g. C:\windows)
|
||||||
|
5. all directories along %PATH%
|
||||||
|
|
||||||
|
5. Get another CA cert bundle. One option is to extract the one a recent
|
||||||
|
Firefox browser uses by running 'make ca-bundle' in the curl build tree
|
||||||
|
root, or possibly download a version that was generated this way for you:
|
||||||
|
[CA Extract](https://curl.se/docs/caextract.html)
|
||||||
|
|
||||||
|
Neglecting to use one of the above methods when dealing with a server using a
|
||||||
|
certificate that is not signed by one of the certificates in the installed CA
|
||||||
|
certificate store, causes SSL to report an error (`certificate verify failed`)
|
||||||
|
during the handshake and SSL then refuses further communication with that
|
||||||
|
server.
|
||||||
|
|
||||||
|
Certificate Verification with Schannel and Secure Transport
|
||||||
|
-----------------------------------------------------------
|
||||||
|
|
||||||
|
If libcurl was built with Schannel (Microsoft's native TLS engine) or Secure
|
||||||
|
Transport (Apple's native TLS engine) support, then libcurl still performs
|
||||||
|
peer certificate verification, but instead of using a CA cert bundle, it uses
|
||||||
|
the certificates that are built into the OS. These are the same certificates
|
||||||
|
that appear in the Internet Options control panel (under Windows) or Keychain
|
||||||
|
Access application (under OS X). Any custom security rules for certificates
|
||||||
|
are honored.
|
||||||
|
|
||||||
|
Schannel runs CRL checks on certificates unless peer verification is disabled.
|
||||||
|
Secure Transport on iOS runs OCSP checks on certificates unless peer
|
||||||
|
verification is disabled. Secure Transport on OS X runs either OCSP or CRL
|
||||||
|
checks on certificates if those features are enabled, and this behavior can be
|
||||||
|
adjusted in the preferences of Keychain Access.
|
||||||
|
|
||||||
|
HTTPS proxy
|
||||||
|
-----------
|
||||||
|
|
||||||
|
Since version 7.52.0, curl can do HTTPS to the proxy separately from the
|
||||||
|
connection to the server. This TLS connection is handled separately from the
|
||||||
|
server connection so instead of `--insecure` and `--cacert` to control the
|
||||||
|
certificate verification, you use `--proxy-insecure` and `--proxy-cacert`.
|
||||||
|
With these options, you make sure that the TLS connection and the trust of the
|
||||||
|
proxy can be kept totally separate from the TLS connection to the server.
|
3177
src/dependencies/curl-8.8.0/docs/THANKS
Normal file
3177
src/dependencies/curl-8.8.0/docs/THANKS
Normal file
File diff suppressed because it is too large
Load diff
1401
src/dependencies/curl-8.8.0/docs/TODO
Normal file
1401
src/dependencies/curl-8.8.0/docs/TODO
Normal file
File diff suppressed because it is too large
Load diff
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue