Merge remote-tracking branch 'upstream/master'

This commit is contained in:
0x676e67 2025-03-20 23:25:49 +08:00
commit 80d7385f78
13 changed files with 417 additions and 31 deletions

View File

@ -36,7 +36,7 @@ jobs:
id: rust-version
run: echo "::set-output name=version::$(rustc --version)"
- name: Cache cargo index
uses: actions/cache@v1
uses: actions/cache@v4
with:
path: ~/.cargo/registry/index
key: index-${{ runner.os }}-${{ github.run_number }}
@ -45,14 +45,14 @@ jobs:
- name: Create lockfile
run: cargo generate-lockfile
- name: Cache cargo registry
uses: actions/cache@v1
uses: actions/cache@v4
with:
path: ~/.cargo/registry/cache
key: registry-${{ runner.os }}-${{ steps.rust-version.outputs.version }}-${{ hashFiles('Cargo.lock') }}
- name: Fetch dependencies
run: cargo fetch
- name: Cache target directory
uses: actions/cache@v1
uses: actions/cache@v4
with:
path: target
key: clippy-target-${{ runner.os }}-${{ steps.rust-version.outputs.version }}-${{ hashFiles('Cargo.lock') }}

View File

@ -1,5 +1,17 @@
4.15.0
- 2025-02-27 Expose API to enable certificate compression. (#241)
- 2025-02-23 Fix lifetimes in ssl::select_next_proto
- 2025-02-23 Revert cmake bump (for now) as it is overly restrictive (#321)
- 2025-02-21 Introduce a builder pattern for SslEchKeys + make set_ech_keys take a reference (#320)
- 2025-02-21 Revert "Refactor!: Remove strict `TokioIo` response requirement from `hyper_boring::v1::HttpsConnector`"
- 2025-02-21 Revert "Refactor!: Introduce a Cargo feature for optional Hyper 0 support"
- 2025-02-21 Address clippy lints
- 2025-02-21 Actually expose SslEchKeys
4.14.0
- 2025-02-19 Bump cmake-rs to improve Mac OS build parallelism
- 2025-02-19 Expose SSL_CTX_set1_ech_keys from SslContextRef
- 2024-01-27 Set CMAKE_BUILD_PARALLEL_LEVEL to available_parallelism
- 2025-02-14 build: Fix the build for 32-bit Linux platform (#312)
- 2024-11-30 Use corresponds macro

View File

@ -96,10 +96,15 @@ impl Config {
|| self.features.underscore_wildcards;
let patches_required = features_with_patches_enabled && !self.env.assume_patched;
let build_from_sources_required = self.features.fips_link_precompiled || patches_required;
if is_precompiled_native_lib && build_from_sources_required {
panic!("precompiled BoringSSL was provided, so FIPS configuration or optional patches can't be applied");
if is_precompiled_native_lib && patches_required {
println!(
"cargo:warning=precompiled BoringSSL was provided, so patches will be ignored"
);
}
if is_precompiled_native_lib && self.features.fips_link_precompiled {
panic!("precompiled BoringSSL was provided, so FIPS configuration can't be applied");
}
}
}

View File

@ -756,7 +756,7 @@ fn main() {
"des.h",
"dtls1.h",
"hkdf.h",
#[cfg(not(feature = "fips"))]
#[cfg(not(any(feature = "fips", feature = "fips-no-compat")))]
"hpke.h",
"hmac.h",
"hrss.h",

View File

@ -19,9 +19,19 @@ rustdoc-args = ["--cfg", "docsrs"]
[features]
# Controlling the build
# Use a FIPS-validated version of boringssl.
# Use a FIPS-validated version of BoringSSL. This feature sets "fips-compat".
fips = ["fips-compat", "boring-sys/fips"]
# Use a FIPS build of BoringSSL, but don't set "fips-compat".
#
# As of boringSSL commit a430310d6563c0734ddafca7731570dfb683dc19, we no longer
# need to make exceptions for the types of BufLen, ProtosLen, and ValueLen,
# which means the "fips-compat" feature is no longer needed.
#
# TODO(cjpatton) Delete this feature and modify "fips" so that it doesn't imply
# "fips-compat".
fips-no-compat = ["boring-sys/fips"]
# Build with compatibility for the BoringSSL FIPS version, without enabling the
# `fips` feature itself (useful e.g. if `fips-link-precompiled` is used with an
# older BoringSSL version).
@ -80,3 +90,4 @@ zstd = { workspace = true, optional = true }
[dev-dependencies]
hex = { workspace = true }
rusty-hook = { workspace = true }
brotli = { workspace = true }

View File

@ -14,8 +14,16 @@ pub fn enabled() -> bool {
#[test]
fn is_enabled() {
#[cfg(any(feature = "fips", feature = "fips-link-precompiled"))]
#[cfg(any(
feature = "fips",
feature = "fips-no-compat",
feature = "fips-link-precompiled"
))]
assert!(enabled());
#[cfg(not(any(feature = "fips", feature = "fips-link-precompiled")))]
#[cfg(not(any(
feature = "fips",
feature = "fips-no-compat",
feature = "fips-link-precompiled"
)))]
assert!(!enabled());
}

View File

@ -61,6 +61,13 @@
//! Note that `BORING_BSSL_PRECOMPILED_BCM_O` is never used, as linking BoringSSL with precompiled non-FIPS
//! module is not supported.
//!
//! ## Linking with a C++ standard library
//!
//! Recent versions of boringssl require some C++ standard library features, so boring needs to link
//! with a STL implementation. This can be controlled using the BORING_BSSL_RUST_CPPLIB variable. If
//! no library is specified, libc++ is used on macOS and iOS whereas libstdc++ is used on other Unix
//! systems.
//!
//! # Optional patches
//!
//! ## Experimental post-quantum cryptography
@ -123,7 +130,7 @@ pub mod error;
pub mod ex_data;
pub mod fips;
pub mod hash;
#[cfg(not(feature = "fips"))]
#[cfg(not(any(feature = "fips", feature = "fips-no-compat")))]
pub mod hpke;
pub mod memcmp;
pub mod nid;

View File

@ -1,10 +1,10 @@
#![forbid(unsafe_op_in_unsafe_fn)]
use super::{
AlpnError, ClientHello, GetSessionPendingError, PrivateKeyMethod, PrivateKeyMethodError,
SelectCertError, SniError, Ssl, SslAlert, SslContext, SslContextRef, SslInfoCallbackAlert,
SslInfoCallbackMode, SslInfoCallbackValue, SslRef, SslSession, SslSessionRef,
SslSignatureAlgorithm, SslVerifyError, SESSION_CTX_INDEX,
AlpnError, CertificateCompressor, ClientHello, GetSessionPendingError, PrivateKeyMethod,
PrivateKeyMethodError, SelectCertError, SniError, Ssl, SslAlert, SslContext, SslContextRef,
SslInfoCallbackAlert, SslInfoCallbackMode, SslInfoCallbackValue, SslRef, SslSession,
SslSessionRef, SslSignatureAlgorithm, SslVerifyError, SESSION_CTX_INDEX,
};
use crate::error::ErrorStack;
use crate::ffi;
@ -579,3 +579,146 @@ pub(super) unsafe extern "C" fn raw_info_callback<F>(
callback(ssl, SslInfoCallbackMode(mode), value);
}
pub(super) unsafe extern "C" fn raw_ssl_cert_compress<C>(
ssl: *mut ffi::SSL,
out: *mut ffi::CBB,
input: *const u8,
input_len: usize,
) -> ::std::os::raw::c_int
where
C: CertificateCompressor,
{
const {
assert!(C::CAN_COMPRESS);
}
// SAFETY: boring provides valid inputs.
let ssl = unsafe { SslRef::from_ptr_mut(ssl) };
let ssl_context = ssl.ssl_context();
let compressor = ssl_context
.ex_data(SslContext::cached_ex_index::<C>())
.expect("BUG: certificate compression missed");
let input_slice = unsafe { std::slice::from_raw_parts(input, input_len) };
let mut writer = CryptoByteBuilder::from_ptr(out);
if compressor.compress(input_slice, &mut writer).is_err() {
return 0;
}
1
}
pub(super) unsafe extern "C" fn raw_ssl_cert_decompress<C>(
ssl: *mut ffi::SSL,
out: *mut *mut ffi::CRYPTO_BUFFER,
uncompressed_len: usize,
input: *const u8,
input_len: usize,
) -> ::std::os::raw::c_int
where
C: CertificateCompressor,
{
const {
assert!(C::CAN_DECOMPRESS);
}
// SAFETY: boring provides valid inputs.
let ssl = unsafe { SslRef::from_ptr_mut(ssl) };
let ssl_context = ssl.ssl_context();
let compressor = ssl_context
.ex_data(SslContext::cached_ex_index::<C>())
.expect("BUG: certificate compression missed");
let Ok(mut decompression_buffer) = CryptoBufferBuilder::with_capacity(uncompressed_len) else {
return 0;
};
let input_slice = unsafe { std::slice::from_raw_parts(input, input_len) };
if compressor
.decompress(input_slice, decompression_buffer.as_writer())
.is_err()
{
return 0;
}
let Ok(crypto_buffer) = decompression_buffer.build() else {
return 0;
};
unsafe { *out = crypto_buffer };
1
}
struct CryptoByteBuilder<'a>(*mut ffi::CBB, std::marker::PhantomData<&'a [u8]>);
impl CryptoByteBuilder<'_> {
fn from_ptr(ptr: *mut ffi::CBB) -> Self {
Self(ptr, Default::default())
}
}
impl std::io::Write for CryptoByteBuilder<'_> {
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
let success = unsafe { ffi::CBB_add_bytes(self.0, buf.as_ptr(), buf.len()) == 1 };
if !success {
return Err(std::io::Error::other("CBB_add_bytes failed"));
}
Ok(buf.len())
}
fn flush(&mut self) -> std::io::Result<()> {
let success = unsafe { ffi::CBB_flush(self.0) == 1 };
if !success {
return Err(std::io::Error::other("CBB_flush failed"));
}
Ok(())
}
}
struct CryptoBufferBuilder<'a> {
buffer: *mut ffi::CRYPTO_BUFFER,
cursor: std::io::Cursor<&'a mut [u8]>,
}
impl<'a> CryptoBufferBuilder<'a> {
fn with_capacity(capacity: usize) -> Result<CryptoBufferBuilder<'a>, ErrorStack> {
let mut data: *mut u8 = std::ptr::null_mut();
let buffer = unsafe { crate::cvt_p(ffi::CRYPTO_BUFFER_alloc(&mut data, capacity))? };
Ok(CryptoBufferBuilder {
buffer,
cursor: std::io::Cursor::new(unsafe { std::slice::from_raw_parts_mut(data, capacity) }),
})
}
fn as_writer(&mut self) -> &mut (impl std::io::Write + 'a) {
&mut self.cursor
}
fn build(mut self) -> Result<*mut ffi::CRYPTO_BUFFER, ErrorStack> {
let buffer_capacity = unsafe { ffi::CRYPTO_BUFFER_len(self.buffer) };
if self.cursor.position() != buffer_capacity as u64 {
// Make sure all bytes in buffer initialized as required by Boring SSL.
return Err(ErrorStack::get());
}
unsafe {
let mut result = ptr::null_mut();
ptr::swap(&mut self.buffer, &mut result);
std::mem::forget(self);
Ok(result)
}
}
}
impl Drop for CryptoBufferBuilder<'_> {
fn drop(&mut self) {
if !self.buffer.is_null() {
unsafe {
boring_sys::CRYPTO_BUFFER_free(self.buffer);
}
}
}
}

View File

@ -108,7 +108,7 @@ pub use self::cert_compression::CertCompressionAlgorithm;
pub use self::connector::{
ConnectConfiguration, SslAcceptor, SslAcceptorBuilder, SslConnector, SslConnectorBuilder,
};
#[cfg(not(feature = "fips"))]
#[cfg(not(any(feature = "fips", feature = "fips-no-compat")))]
pub use self::ech::SslEchKeysRef;
pub use self::error::{Error, ErrorCode, HandshakeError};
@ -118,7 +118,7 @@ mod callbacks;
#[cfg(feature = "cert-compression")]
mod cert_compression;
mod connector;
#[cfg(not(feature = "fips"))]
#[cfg(not(any(feature = "fips", feature = "fips-no-compat")))]
mod ech;
mod error;
mod mut_only;
@ -773,14 +773,19 @@ impl SslCurve {
pub const X25519_MLKEM768: SslCurve = SslCurve(ffi::SSL_CURVE_X25519_MLKEM768 as _);
#[cfg(feature = "pq-experimental")]
#[cfg(not(any(feature = "fips", feature = "fips-no-compat")))]
pub const X25519_KYBER768_DRAFT00: SslCurve =
SslCurve(ffi::SSL_CURVE_X25519_KYBER768_DRAFT00 as _);
#[cfg(all(not(feature = "fips"), feature = "pq-experimental"))]
pub const X25519_KYBER768_DRAFT00_OLD: SslCurve =
SslCurve(ffi::SSL_CURVE_X25519_KYBER768_DRAFT00_OLD as _);
#[cfg(feature = "pq-experimental")]
#[cfg(all(not(feature = "fips"), feature = "pq-experimental"))]
pub const X25519_KYBER512_DRAFT00: SslCurve =
SslCurve(ffi::SSL_CURVE_X25519_KYBER512_DRAFT00 as _);
#[cfg(feature = "pq-experimental")]
#[cfg(all(not(feature = "fips"), feature = "pq-experimental"))]
pub const P256_KYBER768_DRAFT00: SslCurve = SslCurve(ffi::SSL_CURVE_P256_KYBER768_DRAFT00 as _);
/// Returns the curve name
@ -813,15 +818,15 @@ impl SslCurve {
ffi::SSL_CURVE_SECP384R1 => Some(ffi::NID_secp384r1),
ffi::SSL_CURVE_SECP521R1 => Some(ffi::NID_secp521r1),
ffi::SSL_CURVE_X25519 => Some(ffi::NID_X25519),
#[cfg(not(feature = "fips"))]
#[cfg(not(any(feature = "fips", feature = "fips-no-compat")))]
ffi::SSL_CURVE_X25519_KYBER768_DRAFT00 => Some(ffi::NID_X25519Kyber768Draft00),
#[cfg(feature = "pq-experimental")]
#[cfg(all(not(feature = "fips"), feature = "pq-experimental"))]
ffi::SSL_CURVE_X25519_KYBER768_DRAFT00_OLD => Some(ffi::NID_X25519Kyber768Draft00Old),
#[cfg(feature = "pq-experimental")]
#[cfg(all(not(feature = "fips"), feature = "pq-experimental"))]
ffi::SSL_CURVE_X25519_KYBER512_DRAFT00 => Some(ffi::NID_X25519Kyber512Draft00),
#[cfg(feature = "pq-experimental")]
#[cfg(all(not(feature = "fips"), feature = "pq-experimental"))]
ffi::SSL_CURVE_P256_KYBER768_DRAFT00 => Some(ffi::NID_P256Kyber768Draft00),
#[cfg(feature = "pq-experimental")]
#[cfg(all(not(feature = "fips"), feature = "pq-experimental"))]
ffi::SSL_CURVE_X25519_MLKEM768 => Some(ffi::NID_X25519MLKEM768),
ffi::SSL_CURVE_DHE2048 => Some(ffi::NID_ffdhe2048),
ffi::SSL_CURVE_DHE3072 => Some(ffi::NID_ffdhe3072),
@ -851,6 +856,16 @@ impl CompliancePolicy {
Self(ffi::ssl_compliance_policy_t::ssl_compliance_policy_wpa3_192_202304);
}
// IANA assigned identifier of compression algorithm. See https://www.rfc-editor.org/rfc/rfc8879.html#name-compression-algorithms
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub struct CertificateCompressionAlgorithm(u16);
impl CertificateCompressionAlgorithm {
pub const ZLIB: Self = Self(ffi::TLSEXT_cert_compression_zlib as u16);
pub const BROTLI: Self = Self(ffi::TLSEXT_cert_compression_brotli as u16);
}
/// A standard implementation of protocol selection for Application Layer Protocol Negotiation
/// (ALPN).
///
@ -1566,6 +1581,48 @@ impl SslContextBuilder {
}
}
/// Registers a certificate compression algorithm.
///
/// Corresponds to [`SSL_CTX_add_cert_compression_alg`].
///
/// [`SSL_CTX_add_cert_compression_alg`]: https://commondatastorage.googleapis.com/chromium-boringssl-docs/ssl.h.html#SSL_CTX_add_cert_compression_alg
pub fn add_certificate_compression_algorithm<C>(
&mut self,
compressor: C,
) -> Result<(), ErrorStack>
where
C: CertificateCompressor,
{
const {
assert!(C::CAN_COMPRESS || C::CAN_DECOMPRESS, "Either compression or decompression must be supported for algorithm to be registered")
};
let success = unsafe {
ffi::SSL_CTX_add_cert_compression_alg(
self.as_ptr(),
C::ALGORITHM.0,
const {
if C::CAN_COMPRESS {
Some(callbacks::raw_ssl_cert_compress::<C>)
} else {
None
}
},
const {
if C::CAN_DECOMPRESS {
Some(callbacks::raw_ssl_cert_decompress::<C>)
} else {
None
}
},
) == 1
};
if !success {
return Err(ErrorStack::get());
}
self.replace_ex_data(SslContext::cached_ex_index::<C>(), compressor);
Ok(())
}
/// Configures a custom private key method on the context.
///
/// See [`PrivateKeyMethod`] for more details.
@ -2006,7 +2063,7 @@ impl SslContextBuilder {
/// ECHConfigs to allow stale DNS caches to update. Unlike most `SSL_CTX` APIs, this function
/// is safe to call even after the `SSL_CTX` has been associated with connections on various
/// threads.
#[cfg(not(feature = "fips"))]
#[cfg(not(any(feature = "fips", feature = "fips-no-compat")))]
#[corresponds(SSL_CTX_set1_ech_keys)]
pub fn set_ech_keys(&self, keys: &SslEchKeys) -> Result<(), ErrorStack> {
unsafe { cvt(ffi::SSL_CTX_set1_ech_keys(self.as_ptr(), keys.as_ptr())).map(|_| ()) }
@ -2248,7 +2305,7 @@ impl SslContextRef {
/// ECHConfigs to allow stale DNS caches to update. Unlike most `SSL_CTX` APIs, this function
/// is safe to call even after the `SSL_CTX` has been associated with connections on various
/// threads.
#[cfg(not(feature = "fips"))]
#[cfg(not(any(feature = "fips", feature = "fips-no-compat")))]
#[corresponds(SSL_CTX_set1_ech_keys)]
pub fn set_ech_keys(&self, keys: &SslEchKeys) -> Result<(), ErrorStack> {
unsafe { cvt(ffi::SSL_CTX_set1_ech_keys(self.as_ptr(), keys.as_ptr())).map(|_| ()) }
@ -2765,7 +2822,7 @@ impl SslRef {
if cfg!(feature = "kx-client-nist-required") {
"P256Kyber768Draft00:P-256:P-384:P-521"
} else {
"X25519Kyber768Draft00:X25519:P256Kyber768Draft00:P-256:P-384:P-521"
"X25519MLKEM768:X25519Kyber768Draft00:X25519:P256Kyber768Draft00:P-256:P-384:P-521"
}
} else if cfg!(feature = "kx-client-pq-supported") {
if cfg!(feature = "kx-client-nist-required") {
@ -4289,6 +4346,36 @@ impl PrivateKeyMethodError {
pub const RETRY: Self = Self(ffi::ssl_private_key_result_t::ssl_private_key_retry);
}
/// Describes certificate compression algorithm. Implementation MUST implement transformation at least in one direction.
pub trait CertificateCompressor: Send + Sync + 'static {
/// An IANA assigned identifier of compression algorithm
const ALGORITHM: CertificateCompressionAlgorithm;
/// Indicates if compressor support compression
const CAN_COMPRESS: bool;
/// Indicates if compressor support decompression
const CAN_DECOMPRESS: bool;
/// Perform compression of `input` buffer and write compressed data to `output`.
#[allow(unused_variables)]
fn compress<W>(&self, input: &[u8], output: &mut W) -> std::io::Result<()>
where
W: std::io::Write,
{
Err(std::io::Error::other("not implemented"))
}
/// Perform decompression of `input` buffer and write compressed data to `output`.
#[allow(unused_variables)]
fn decompress<W>(&self, input: &[u8], output: &mut W) -> std::io::Result<()>
where
W: std::io::Write,
{
Err(std::io::Error::other("not implemented"))
}
}
use crate::ffi::{SSL_CTX_up_ref, SSL_SESSION_get_master_key, SSL_SESSION_up_ref, SSL_is_server};
use crate::ffi::{DTLS_method, TLS_client_method, TLS_method, TLS_server_method};

View File

@ -0,0 +1,102 @@
use std::io::Write as _;
use super::server::Server;
use crate::ssl::CertificateCompressor;
use crate::x509::store::X509StoreBuilder;
use crate::x509::X509;
struct BrotliCompressor {
q: u32,
lgwin: u32,
}
impl Default for BrotliCompressor {
fn default() -> Self {
Self { q: 11, lgwin: 32 }
}
}
impl CertificateCompressor for BrotliCompressor {
const ALGORITHM: crate::ssl::CertificateCompressionAlgorithm =
crate::ssl::CertificateCompressionAlgorithm(1234);
const CAN_COMPRESS: bool = true;
const CAN_DECOMPRESS: bool = true;
fn compress<W>(&self, input: &[u8], output: &mut W) -> std::io::Result<()>
where
W: std::io::Write,
{
let mut writer = brotli::CompressorWriter::new(output, 1024, self.q, self.lgwin);
writer.write_all(input)?;
Ok(())
}
fn decompress<W>(&self, input: &[u8], output: &mut W) -> std::io::Result<()>
where
W: std::io::Write,
{
brotli::BrotliDecompress(&mut std::io::Cursor::new(input), output)?;
Ok(())
}
}
#[test]
fn server_only_cert_compression() {
let mut server_builder = Server::builder();
server_builder
.ctx()
.add_certificate_compression_algorithm(BrotliCompressor::default())
.unwrap();
let server = server_builder.build();
let mut store = X509StoreBuilder::new().unwrap();
let x509 = X509::from_pem(super::ROOT_CERT).unwrap();
store.add_cert(x509).unwrap();
let client = server.client();
client.connect();
}
#[test]
fn client_only_cert_compression() {
let server_builder = Server::builder().build();
let mut store = X509StoreBuilder::new().unwrap();
let x509 = X509::from_pem(super::ROOT_CERT).unwrap();
store.add_cert(x509).unwrap();
let mut client = server_builder.client();
client
.ctx()
.add_certificate_compression_algorithm(BrotliCompressor::default())
.unwrap();
client.connect();
}
#[test]
fn client_and_server_cert_compression() {
let mut server = Server::builder();
server
.ctx()
.add_certificate_compression_algorithm(BrotliCompressor::default())
.unwrap();
let server = server.build();
let mut store = X509StoreBuilder::new().unwrap();
let x509 = X509::from_pem(super::ROOT_CERT).unwrap();
store.add_cert(x509).unwrap();
let mut client = server.client();
client
.ctx()
.add_certificate_compression_algorithm(BrotliCompressor::default())
.unwrap();
client.connect();
}

View File

@ -21,12 +21,13 @@ use crate::ssl::{
use crate::x509::verify::X509CheckFlags;
use crate::x509::{X509Name, X509};
#[cfg(not(feature = "fips"))]
#[cfg(not(any(feature = "fips", feature = "fips-no-compat")))]
use super::CompliancePolicy;
mod cert_compressor;
mod cert_verify;
mod custom_verify;
#[cfg(not(feature = "fips"))]
#[cfg(not(any(feature = "fips", feature = "fips-no-compat")))]
mod ech;
mod extensions;
mod private_key_method;
@ -990,7 +991,7 @@ fn test_get_ciphers() {
}
#[test]
#[cfg(not(feature = "fips"))]
#[cfg(not(any(feature = "fips", feature = "fips-no-compat")))]
fn test_set_compliance() {
let mut ctx = SslContext::builder(SslMethod::tls()).unwrap();
ctx.set_compliance_policy(CompliancePolicy::FIPS_202205)

View File

@ -19,6 +19,16 @@ rustdoc-args = ["--cfg", "docsrs"]
# Use a FIPS-validated version of boringssl.
fips = ["boring/fips", "boring-sys/fips"]
# Use a FIPS build of BoringSSL, but don't set "fips-compat".
#
# As of boringSSL commit a430310d6563c0734ddafca7731570dfb683dc19, we no longer
# need to make exceptions for the types of BufLen, ProtosLen, and ValueLen,
# which means the "fips-compat" feature is no longer needed.
#
# TODO(cjpatton) Delete this feature and modify "fips" so that it doesn't imply
# "fips-compat".
fips-no-compat = ["boring/fips-no-compat"]
# Link with precompiled FIPS-validated `bcm.o` module.
fips-link-precompiled = ["boring/fips-link-precompiled", "boring-sys/fips-link-precompiled"]