diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 5cb4130..05dfef4 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -35,6 +35,8 @@ jobs: steps: - name: checkout source uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + submodules: true - name: set up cargo cache uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3 continue-on-error: false @@ -82,7 +84,7 @@ jobs: - name: rustdoc # always run if build succeeds if: ${{ !cancelled() && steps.build.outcome == 'success' }} - run: cargo doc --no-deps + run: cargo doc --all-features --no-deps examples-linux: name: Examples (Linux) @@ -90,6 +92,8 @@ jobs: steps: - name: checkout source uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + submodules: true - name: set up cargo cache uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3 continue-on-error: false @@ -124,6 +128,8 @@ jobs: - name: install command line dependencies run: brew install make gnupg - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + submodules: true - uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b with: toolchain: stable @@ -156,9 +162,9 @@ jobs: - name: disable ipv6 for gpg run: echo "disable-ipv6" > .cache/.gnupg/dirmngr.conf - name: build - run: cargo build + run: cargo build --workspace --all-targets --features "async,vendored" - name: run tests - run: cargo test --workspace + run: cargo test --workspace --features "async,vendored" fmt: name: Rustfmt diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000..d4fe729 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,4 @@ +[submodule "nginx-src/nginx"] + path = nginx-src/nginx + url = https://github.com/nginx/nginx.git + branch = stable-1.28 diff --git a/Cargo.lock b/Cargo.lock index 95382ce..40161e1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -272,12 +272,6 @@ version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" -[[package]] -name = "env_home" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7f84e12ccf0a7ddc17a6c41c93326024c42920d7ee630d04950e6926645c0fe" - [[package]] name = "errno" version = "0.3.12" @@ -302,6 +296,12 @@ dependencies = [ "tokio", ] +[[package]] +name = "fastrand" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" + [[package]] name = "filetime" version = "0.2.25" @@ -556,20 +556,26 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "nginx-src" +version = "1.28.0+1.28.0" +dependencies = [ + "duct", + "flate2", + "tar", + "ureq", +] + [[package]] name = "nginx-sys" version = "0.5.0" dependencies = [ "bindgen", "cc", - "duct", "dunce", "errno", - "flate2", + "nginx-src", "regex", - "tar", - "ureq", - "which", ] [[package]] @@ -581,7 +587,7 @@ dependencies = [ "lock_api", "nginx-sys", "pin-project-lite", - "target-triple", + "tempfile", ] [[package]] @@ -944,10 +950,16 @@ dependencies = [ ] [[package]] -name = "target-triple" -version = "0.1.4" +name = "tempfile" +version = "3.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ac9aa371f599d22256307c24a9d748c041e548cbf599f35d890f9d365361790" +checksum = "e8a64e3985349f2441a1a9ef0b853f869006c3855f2cda6862a94d26ebb9d6a1" +dependencies = [ + "fastrand", + "once_cell", + "rustix", + "windows-sys 0.59.0", +] [[package]] name = "tinyvec" @@ -1173,18 +1185,6 @@ dependencies = [ "rustls-pki-types", ] -[[package]] -name = "which" -version = "7.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24d643ce3fd3e5b54854602a080f34fb10ab75e0b813ee32d00ca2b44fa74762" -dependencies = [ - "either", - "env_home", - "rustix", - "winsafe", -] - [[package]] name = "windows-core" version = "0.61.2" @@ -1390,12 +1390,6 @@ version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486" -[[package]] -name = "winsafe" -version = "0.0.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d135d17ab770252ad95e9a872d365cf3090e3be864a34ab46f48555993efc904" - [[package]] name = "xattr" version = "1.5.0" diff --git a/Cargo.toml b/Cargo.toml index a035054..589fc61 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,5 +1,6 @@ [workspace] members = [ + "nginx-src", "nginx-sys", "examples", ] @@ -24,6 +25,11 @@ homepage.workspace = true repository.workspace = true rust-version.workspace = true +[package.metadata.docs.rs] +all-features = true +default-target = "x86_64-unknown-linux-gnu" +targets = [] + [dependencies] allocator-api2 = { version = "0.2.21", default-features = false } async-task = { version = "4.7.1", optional = true } @@ -32,7 +38,7 @@ nginx-sys = { path = "nginx-sys", default-features=false, version = "0.5.0"} pin-project-lite = { version = "0.2.16", optional = true } [features] -default = ["std", "vendored"] +default = ["std"] async = [ "alloc", "dep:async-task", @@ -50,14 +56,11 @@ std = [ "alloc", "allocator-api2/std" ] -# Build our own copy of the NGINX by default. -# This could be disabled with `--no-default-features` to minimize the dependency -# tree when building against an existing copy of the NGINX with the -# NGINX_SOURCE_DIR/NGINX_BUILD_DIR variables. +# Build our own copy of the NGINX from `nginx-src` crate. vendored = ["nginx-sys/vendored"] [badges] maintenance = { status = "experimental" } [dev-dependencies] -target-triple = "0.1.2" +tempfile = { version = "3.20.0", default-features = false } diff --git a/build.rs b/build.rs index 1f8c574..d4461d1 100644 --- a/build.rs +++ b/build.rs @@ -53,6 +53,12 @@ fn main() { } } + // Pass build directory to the tests + println!("cargo::rerun-if-env-changed=DEP_NGINX_BUILD_DIR"); + if let Ok(build_dir) = std::env::var("DEP_NGINX_BUILD_DIR") { + println!("cargo::rustc-env=DEP_NGINX_BUILD_DIR={build_dir}"); + } + // Generate required compiler flags if cfg!(target_os = "macos") { // https://stackoverflow.com/questions/28124221/error-linking-with-cc-failed-exit-code-1 diff --git a/deny.toml b/deny.toml index 39f40a6..a2b7132 100644 --- a/deny.toml +++ b/deny.toml @@ -6,8 +6,8 @@ all-features = true [licenses] allow = [ - "Apache-2.0 WITH LLVM-exception", "Apache-2.0", + "BSD-2-Clause", "BSD-3-Clause", "ISC", "MIT", diff --git a/nginx-src/Cargo.toml b/nginx-src/Cargo.toml new file mode 100644 index 0000000..4ac3104 --- /dev/null +++ b/nginx-src/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "nginx-src" +# Version format: ..+ +version = "1.28.0+1.28.0" +# Crate sources are licensed under Apache-2.0, with exception of the +# NGINX submodlue that is redistributed under BSD-2-Clause. +license = "Apache-2.0 AND BSD-2-Clause" +description = "Source of NGINX" +keywords = ["nginx", "module", "sys"] +edition.workspace = true +homepage.workspace = true +repository.workspace = true +rust-version.workspace = true + +[dependencies] +duct = "1" +flate2 = "1" +tar = "0.4" +ureq = "3.0.10" diff --git a/nginx-src/LICENSE b/nginx-src/LICENSE new file mode 120000 index 0000000..ea5b606 --- /dev/null +++ b/nginx-src/LICENSE @@ -0,0 +1 @@ +../LICENSE \ No newline at end of file diff --git a/nginx-src/README.md b/nginx-src/README.md new file mode 100644 index 0000000..334a44d --- /dev/null +++ b/nginx-src/README.md @@ -0,0 +1,82 @@ +# nginx-src + +The crate contains a vendored copy of the NGINX source and a logic to build it. +It is intended to be consumed by the [nginx-sys] crate for CI builds, tests or +rustdoc generation. + +It is notably not intended for producing binaries suitable for production use. +For such scenarios we recommend building the `ngx-rust` based module against +the packages from or your preferred distribution. +See the [nginx-sys] documentation for building `ngx-rust` modules against an +existing pre-configured NGINX source tree. + +[nginx-sys]: https://docs.rs/nginx-sys/ + +## Versioning + +This crate follows the latest stable branch of NGINX. + + * The major and minor fields are taken from the NGINX version. + * The patch version is incremented on changes to the build logic or crate + metadata. + * The version metadata contains full version of NGINX. + +## Build Requirements + +The crate can be built on common Unix-like operating systems and requires all +the usual NGINX build dependencies (including development headers for the +libraries) installed in system paths: + + * C compiler and toolchain + * SSL library, OpenSSL or LibreSSL + * PCRE or PCRE2 + * Zlib or zlib-ng witn Zlib compatible API enabled + +We don't intend to support Windows at the moment, as NGINX does not support +dynamic modules for this target. + +## Environment variables + +Following variables can be set to customize the build. + + * `NGX_CONFIGURE_ARGS` — additional arguments to pass to the NGINX configure + script. + + Example: `export NGX_CONFIGURE_ARGS='--with-debug'; cargo build` + + * `NGX_CFLAGS`, `NGX_LDFLAGS` — additional C compiler and linker flags to + pass to the NGINX configure script. Internally, this is added to the + `--with-cc-opt=...` and `--with-ld-opt=...` configure arguments. + + Example: + ```sh + export NGX_CFLAGS='-I/opt/boringssl/include' + export NGX_LDFLAGS='-L/opt/boringssl/build -lstdc++' + cargo build + ``` + +## Download NGINX and dependency sources during build + +While we recommend using the system libraries, it is still possible to opt into +downloading the NGINX itself and the dependency sources from the network with +the help of the following variables: + + * `NGX_VERSION` — if specified, the version of NGINX to download and build + instead of the one bundled with the crate. + * `OPENSSL_VERSION` — if specified, the version of OpenSSL to download and use + use instead of the system-provided library. + * `PCRE2_VERSION` — if specified, the version of PCRE2 to download and use + instead of the system-provided library. + * `ZLIB_VERSION` — if specified, the version of Zlib to download and use + instead of the system-provided library. + +If the `gpg` executable is present in the path, the build script will verify +the integrity of the downloaded files using GPG signatures and a known set of +public keys. +This behavior can be disabled by setting `NGX_NO_SIGNATURE_CHECK`. + +## License + +The code in this crate is licensed under the [Apache License 2.0](../LICENSE). +The crate also contains the source code of NGINX, distributed under the +[BSD 2-Clause License](https://nginx.org/LICENSE). diff --git a/nginx-src/nginx b/nginx-src/nginx new file mode 160000 index 0000000..481d28c --- /dev/null +++ b/nginx-src/nginx @@ -0,0 +1 @@ +Subproject commit 481d28cb4e04c8096b9b6134856891dc52ecc68f diff --git a/nginx-src/src/download.rs b/nginx-src/src/download.rs new file mode 100644 index 0000000..a63986e --- /dev/null +++ b/nginx-src/src/download.rs @@ -0,0 +1,256 @@ +extern crate duct; + +use std::error::Error as StdError; +use std::fs::File; +use std::io; +use std::path::{Path, PathBuf}; +use std::sync::LazyLock; +use std::{env, fs}; + +use flate2::read::GzDecoder; +use tar::Archive; + +use crate::verifier::SignatureVerifier; + +const NGINX_URL_PREFIX: &str = "https://nginx.org/download"; +const OPENSSL_URL_PREFIX: &str = "https://github.com/openssl/openssl/releases/download"; +const PCRE1_URL_PREFIX: &str = "https://sourceforge.net/projects/pcre/files/pcre"; +const PCRE2_URL_PREFIX: &str = "https://github.com/PCRE2Project/pcre2/releases/download"; +const ZLIB_URL_PREFIX: &str = "https://github.com/madler/zlib/releases/download"; +const UBUNTU_KEYSEVER: &str = "hkps://keyserver.ubuntu.com"; + +struct SourceSpec<'a> { + url: fn(&str) -> String, + variable: &'a str, + signature: &'a str, + keyserver: &'a str, + key_ids: &'a [&'a str], +} + +const NGINX_SOURCE: SourceSpec = SourceSpec { + url: |version| format!("{NGINX_URL_PREFIX}/nginx-{version}.tar.gz"), + variable: "NGX_VERSION", + signature: "asc", + keyserver: UBUNTU_KEYSEVER, + key_ids: &[ + // Key 1: Konstantin Pavlov's public key. For Nginx 1.25.3 and earlier + "13C82A63B603576156E30A4EA0EA981B66B0D967", + // Key 2: Sergey Kandaurov's public key. For Nginx 1.25.4 + "D6786CE303D9A9022998DC6CC8464D549AF75C0A", + // Key 3: Maxim Dounin's public key. At least used for Nginx 1.18.0 + "B0F4253373F8F6F510D42178520A9993A1C052F8", + // Key 4: Roman Arutyunyan's public key. For Nginx 1.25.5 + "43387825DDB1BB97EC36BA5D007C8D7C15D87369", + ], +}; + +const DEPENDENCIES: &[(&str, SourceSpec)] = &[ + ( + "openssl", + SourceSpec { + url: |version| { + if version.starts_with("1.") { + let ver_hyphened = version.replace('.', "_"); + format!("{OPENSSL_URL_PREFIX}/OpenSSL_{ver_hyphened}/openssl-{version}.tar.gz") + } else { + format!("{OPENSSL_URL_PREFIX}/openssl-{version}/openssl-{version}.tar.gz") + } + }, + variable: "OPENSSL_VERSION", + signature: "asc", + keyserver: UBUNTU_KEYSEVER, + key_ids: &[ + "EFC0A467D613CB83C7ED6D30D894E2CE8B3D79F5", + "A21FAB74B0088AA361152586B8EF1A6BA9DA2D5C", + "8657ABB260F056B1E5190839D9C4D26D0E604491", + "B7C1C14360F353A36862E4D5231C84CDDCC69C45", + "95A9908DDFA16830BE9FB9003D30A3A9FF1360DC", + "7953AC1FBC3DC8B3B292393ED5E9E43F7DF9EE8C", + "E5E52560DD91C556DDBDA5D02064C53641C25E5D", + "C1F33DD8CE1D4CC613AF14DA9195C48241FBF7DD", + "BA5473A2B0587B07FB27CF2D216094DFD0CB81EF", + ], + }, + ), + ( + "pcre", + SourceSpec { + url: |version| { + // We can distinguish pcre1/pcre2 by checking whether the second character is '.', + // because the final version of pcre1 is 8.45 and the first one of pcre2 is 10.00. + if version.chars().nth(1).is_some_and(|c| c == '.') { + format!("{PCRE1_URL_PREFIX}/{version}/pcre-{version}.tar.gz") + } else { + format!("{PCRE2_URL_PREFIX}/pcre2-{version}/pcre2-{version}.tar.gz") + } + }, + variable: "PCRE2_VERSION", + signature: "sig", + keyserver: UBUNTU_KEYSEVER, + key_ids: &[ + // Key 1: Phillip Hazel's public key. For PCRE2 10.44 and earlier + "45F68D54BBE23FB3039B46E59766E084FB0F43D8", + // Key 2: Nicholas Wilson's public key. For PCRE2 10.45 + "A95536204A3BB489715231282A98E77EB6F24CA8", + ], + }, + ), + ( + "zlib", + SourceSpec { + url: |version| format!("{ZLIB_URL_PREFIX}/v{version}/zlib-{version}.tar.gz"), + variable: "ZLIB_VERSION", + signature: "asc", + keyserver: UBUNTU_KEYSEVER, + key_ids: &[ + // Key 1: Mark Adler's public key. For zlib 1.3.1 and earlier + "5ED46A6721D365587791E2AA783FCD8E58BCAFBA", + ], + }, + ), +]; + +static VERIFIER: LazyLock> = LazyLock::new(|| { + SignatureVerifier::new() + .inspect_err(|err| eprintln!("GnuPG verifier: {err}")) + .ok() +}); + +fn make_cache_dir() -> io::Result { + let base_dir = env::var("CARGO_MANIFEST_DIR") + .map(PathBuf::from) + .unwrap_or_else(|_| env::current_dir().expect("Failed to get current directory")); + // Choose `.cache` relative to the manifest directory (nginx-src) as the default cache directory + // Environment variable `CACHE_DIR` overrides this + // Recommendation: set env "CACHE_DIR = { value = ".cache", relative = true }" in + // `.cargo/config.toml` in your project + let cache_dir = env::var("CACHE_DIR") + .map(PathBuf::from) + .unwrap_or(base_dir.join(".cache")); + if !cache_dir.exists() { + fs::create_dir_all(&cache_dir)?; + } + Ok(cache_dir) +} + +/// Downloads a tarball from the specified URL into the `.cache` directory. +fn download(cache_dir: &Path, url: &str) -> Result> { + fn proceed_with_download(file_path: &Path) -> bool { + // File does not exist or is zero bytes + !file_path.exists() || file_path.metadata().is_ok_and(|m| m.len() < 1) + } + let filename = url.split('/').next_back().unwrap(); + let file_path = cache_dir.join(filename); + if proceed_with_download(&file_path) { + println!("Downloading: {} -> {}", url, file_path.display()); + let mut response = ureq::get(url).call()?; + let mut reader = response.body_mut().as_reader(); + let mut file = File::create(&file_path)?; + std::io::copy(&mut reader, &mut file)?; + } + + if !file_path.exists() { + return Err( + format!("Downloaded file was not written to the expected location: {url}",).into(), + ); + } + Ok(file_path) +} + +/// Gets a given tarball and signature file from a remote URL and copies it to the `.cache` +/// directory. +fn get_archive(cache_dir: &Path, source: &SourceSpec, version: &str) -> io::Result { + let archive_url = (source.url)(version); + let archive = download(cache_dir, &archive_url).map_err(io::Error::other)?; + + if let Some(verifier) = &*VERIFIER { + let signature = format!("{archive_url}.{}", source.signature); + + let verify = || -> io::Result<()> { + let signature = download(cache_dir, &signature).map_err(io::Error::other)?; + verifier.import_keys(source.keyserver, source.key_ids)?; + verifier.verify_signature(&archive, &signature)?; + Ok(()) + }; + + if let Err(err) = verify() { + let _ = fs::remove_file(&archive); + let _ = fs::remove_file(&signature); + return Err(err); + } + } + + Ok(archive) +} + +/// Extracts a tarball into a subdirectory based on the tarball's name under the source base +/// directory. +fn extract_archive(archive_path: &Path, extract_output_base_dir: &Path) -> io::Result { + if !extract_output_base_dir.exists() { + fs::create_dir_all(extract_output_base_dir)?; + } + let archive_file = File::open(archive_path) + .unwrap_or_else(|_| panic!("Unable to open archive file: {}", archive_path.display())); + let stem = archive_path + .file_name() + .and_then(|s| s.to_str()) + .and_then(|s| s.rsplitn(3, '.').last()) + .expect("Unable to determine archive file name stem"); + + let extract_output_dir = extract_output_base_dir.to_owned(); + let archive_output_dir = extract_output_dir.join(stem); + if !archive_output_dir.exists() { + Archive::new(GzDecoder::new(archive_file)) + .entries()? + .filter_map(|e| e.ok()) + .for_each(|mut entry| { + let path = entry.path().unwrap(); + let stripped_path = path.components().skip(1).collect::(); + entry + .unpack(archive_output_dir.join(stripped_path)) + .unwrap(); + }); + } else { + println!( + "Archive [{}] already extracted to directory: {}", + stem, + archive_output_dir.display() + ); + } + + Ok(archive_output_dir) +} + +/// Downloads and extracts all requested sources. +pub fn prepare(source_dir: &Path, build_dir: &Path) -> io::Result<(PathBuf, Vec)> { + let extract_output_base_dir = build_dir.join("lib"); + if !extract_output_base_dir.exists() { + fs::create_dir_all(&extract_output_base_dir)?; + } + + let cache_dir = make_cache_dir()?; + let mut options = vec![]; + + // Download NGINX only if NGX_VERSION is set. + let source_dir = if let Ok(version) = env::var(NGINX_SOURCE.variable) { + let archive_path = get_archive(&cache_dir, &NGINX_SOURCE, version.as_str())?; + let output_base_dir: PathBuf = env::var("OUT_DIR").unwrap().into(); + extract_archive(&archive_path, &output_base_dir)? + } else { + source_dir.to_path_buf() + }; + + for (name, source) in DEPENDENCIES { + // Download dependencies if a corresponding DEPENDENCY_VERSION is set. + let Ok(requested) = env::var(source.variable) else { + continue; + }; + + let archive_path = get_archive(&cache_dir, source, &requested)?; + let output_dir = extract_archive(&archive_path, &extract_output_base_dir)?; + let output_dir = output_dir.to_string_lossy(); + options.push(format!("--with-{name}={output_dir}")); + } + + Ok((source_dir, options)) +} diff --git a/nginx-src/src/lib.rs b/nginx-src/src/lib.rs new file mode 100644 index 0000000..a606196 --- /dev/null +++ b/nginx-src/src/lib.rs @@ -0,0 +1,226 @@ +#![doc = include_str!("../README.md")] +#![warn(missing_docs)] + +use std::ffi::OsString; +use std::path::{Path, PathBuf}; +use std::process::Output; +use std::{env, io, thread}; + +mod download; +mod verifier; + +static NGINX_DEFAULT_SOURCE_DIR: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/nginx"); + +const NGINX_BUILD_INFO: &str = "last-build-info"; +const NGINX_BINARY: &str = "nginx"; + +static NGINX_CONFIGURE_BASE: &[&str] = &[ + "--with-compat", + "--with-http_realip_module", + "--with-http_ssl_module", + "--with-http_v2_module", + "--with-stream", + "--with-stream_realip_module", + "--with-stream_ssl_module", + "--with-threads", +]; + +const ENV_VARS_TRIGGERING_RECOMPILE: [&str; 10] = [ + "CACHE_DIR", + "CARGO_MANIFEST_DIR", + "CARGO_TARGET_TMPDIR", + "NGX_CONFIGURE_ARGS", + "NGX_CFLAGS", + "NGX_LDFLAGS", + "NGX_VERSION", + "OPENSSL_VERSION", + "PCRE2_VERSION", + "ZLIB_VERSION", +]; + +/* +########################################################################### +# NGINX Build Functions - Everything below here is for building NGINX # +########################################################################### + +In order to build Rust bindings for NGINX using the bindgen crate, we need +to do the following: + + 1. Obtain a copy of the NGINX source code and the necessary dependencies: + OpenSSL, PCRE2, Zlib. + 3. Run autoconf `configure` for NGINX. + 4. Compile NGINX. + 5. Read the autoconf generated makefile for NGINX and configure bindgen + to generate Rust bindings based on the includes in the makefile. +*/ + +/// Outputs cargo instructions required for using this crate from a buildscript. +pub fn print_cargo_metadata() { + for file in ["lib.rs", "download.rs", "verifier.rs"] { + println!( + "cargo::rerun-if-changed={path}/src/{file}", + path = env!("CARGO_MANIFEST_DIR") + ) + } + + for var in ENV_VARS_TRIGGERING_RECOMPILE { + println!("cargo::rerun-if-env-changed={var}"); + } +} + +/// Builds a copy of NGINX sources, either bundled with the crate or downloaded from the network. +pub fn build(build_dir: impl AsRef) -> io::Result<(PathBuf, PathBuf)> { + let source_dir = PathBuf::from(NGINX_DEFAULT_SOURCE_DIR); + let build_dir = build_dir.as_ref().to_owned(); + + let (source_dir, vendored_flags) = download::prepare(&source_dir, &build_dir)?; + + let flags = nginx_configure_flags(&vendored_flags); + + configure(&source_dir, &build_dir, &flags)?; + + make(&source_dir, &build_dir, ["build"])?; + + Ok((source_dir, build_dir)) +} + +/// Returns the options NGINX was built with +fn build_info(source_dir: &Path, configure_flags: &[String]) -> String { + // Flags should contain strings pointing to OS/platform as well as dependency versions, + // so if any of that changes, it can trigger a rebuild + format!("{:?}|{}", source_dir, configure_flags.join(" ")) +} + +/// Generate the flags to use with autoconf `configure` for NGINX. +fn nginx_configure_flags(vendored: &[String]) -> Vec { + let mut nginx_opts: Vec = NGINX_CONFIGURE_BASE + .iter() + .map(|x| String::from(*x)) + .collect(); + + nginx_opts.extend(vendored.iter().map(Into::into)); + + if let Ok(extra_args) = env::var("NGX_CONFIGURE_ARGS") { + // FIXME: shell style split? + nginx_opts.extend(extra_args.split_whitespace().map(Into::into)); + } + + if let Ok(cflags) = env::var("NGX_CFLAGS") { + nginx_opts.push(format!("--with-cc-opt={cflags}")); + } + + if let Ok(ldflags) = env::var("NGX_LDFLAGS") { + nginx_opts.push(format!("--with-ld-opt={ldflags}")); + } + + nginx_opts +} + +/// Runs external process invoking autoconf `configure` for NGINX. +fn configure(source_dir: &Path, build_dir: &Path, flags: &[String]) -> io::Result<()> { + let build_info = build_info(source_dir, flags); + + if build_dir.join("Makefile").is_file() + && build_dir.join(NGINX_BINARY).is_file() + && matches!( + std::fs::read_to_string(build_dir.join(NGINX_BUILD_INFO)).map(|x| x == build_info), + Ok(true) + ) + { + println!("Build info unchanged, skipping configure"); + return Ok(()); + } + + println!("Using NGINX source at {source_dir:?}"); + + let configure = ["configure", "auto/configure"] + .into_iter() + .map(|x| source_dir.join(x)) + .find(|x| x.is_file()) + .ok_or(io::ErrorKind::NotFound)?; + + println!( + "Running NGINX configure script with flags: {:?}", + flags.join(" ") + ); + + let mut build_dir_arg: OsString = "--builddir=".into(); + build_dir_arg.push(build_dir); + + let mut flags: Vec = flags.iter().map(|x| x.into()).collect(); + flags.push(build_dir_arg); + + let output = duct::cmd(configure, flags) + .dir(source_dir) + .stderr_to_stdout() + .run()?; + + if !output.status.success() { + println!("configure failed with {:?}", output.status); + return Err(io::ErrorKind::Other.into()); + } + + let _ = std::fs::write(build_dir.join(NGINX_BUILD_INFO), build_info); + + Ok(()) +} + +/// Runs `make` within the NGINX source directory as an external process. +fn make(source_dir: &Path, build_dir: &Path, extra_args: U) -> io::Result +where + U: IntoIterator, + U::Item: Into, +{ + // Level of concurrency to use when building nginx - cargo nicely provides this information + let num_jobs = match env::var("NUM_JOBS") { + Ok(s) => s.parse::().ok(), + Err(_) => thread::available_parallelism().ok().map(|n| n.get()), + } + .unwrap_or(1); + + let mut args = vec![ + OsString::from("-f"), + build_dir.join("Makefile").into(), + OsString::from("-j"), + num_jobs.to_string().into(), + ]; + args.extend(extra_args.into_iter().map(Into::into)); + + // Use MAKE passed from the parent process if set. Otherwise prefer `gmake` as it provides a + // better feature-wise implementation on some systems. + // Notably, we want to avoid SUN make on Solaris (does not support -j) or ancient GNU make 3.81 + // on MacOS. + let inherited = env::var("MAKE"); + let make_commands: &[&str] = match inherited { + Ok(ref x) => &[x.as_str(), "gmake", "make"], + _ => &["gmake", "make"], + }; + + // Give preference to the binary with the name of gmake if it exists because this is typically + // the GNU 4+ on MacOS (if it is installed via homebrew). + for make in make_commands { + /* Use the duct dependency here to merge the output of STDOUT and STDERR into a single stream, + and to provide the combined output as a reader which can be iterated over line-by-line. We + use duct to do this because it is a lot of work to implement this from scratch. */ + let result = duct::cmd(*make, &args) + .dir(source_dir) + .stderr_to_stdout() + .run(); + + match result { + Err(err) if err.kind() == io::ErrorKind::NotFound => { + eprintln!("make: command '{make}' not found"); + continue; + } + Ok(out) if !out.status.success() => { + return Err(io::Error::other(format!( + "make: '{}' failed with {:?}", + make, out.status + ))); + } + _ => return result, + } + } + + Err(io::ErrorKind::NotFound.into()) +} diff --git a/nginx-src/src/verifier.rs b/nginx-src/src/verifier.rs new file mode 100644 index 0000000..a576262 --- /dev/null +++ b/nginx-src/src/verifier.rs @@ -0,0 +1,121 @@ +use std::ffi::OsString; +use std::fs::{self, Permissions}; +use std::os::unix::fs::PermissionsExt; +use std::path::{Path, PathBuf}; +use std::{env, io}; + +static GNUPG_COMMAND: &str = "gpg"; + +pub struct SignatureVerifier { + gnupghome: PathBuf, +} + +impl SignatureVerifier { + pub fn new() -> io::Result { + if env::var("NGX_NO_SIGNATURE_CHECK").is_ok() { + return Err(io::Error::other( + "signature check disabled by user".to_string(), + )); + }; + + if let Err(x) = duct::cmd!(GNUPG_COMMAND, "--version").stdout_null().run() { + return Err(io::Error::other(format!( + "signature check disabled: \"{GNUPG_COMMAND}\" not found ({x})" + ))); + } + + // We do not want to mess with the default gpg data for the running user, + // so we store all gpg data within our build directory. + let gnupghome = env::var("OUT_DIR") + .map(PathBuf::from) + .map_err(io::Error::other)? + .join(".gnupg"); + + if !fs::exists(&gnupghome)? { + fs::create_dir_all(&gnupghome)?; + } + + change_permissions_recursively(gnupghome.as_path(), 0o700, 0o600)?; + + Ok(Self { gnupghome }) + } + + /// Imports all the required GPG keys into a temporary directory in order to + /// validate the integrity of the downloaded tarballs. + pub fn import_keys(&self, server: &str, key_ids: &[&str]) -> io::Result<()> { + println!( + "Importing {} GPG keys for key server: {}", + key_ids.len(), + server + ); + + let mut args = vec![ + OsString::from("--homedir"), + self.gnupghome.clone().into(), + OsString::from("--keyserver"), + server.into(), + OsString::from("--recv-keys"), + ]; + args.extend(key_ids.iter().map(OsString::from)); + + let cmd = duct::cmd(GNUPG_COMMAND, &args); + let output = cmd.stderr_to_stdout().stdout_capture().unchecked().run()?; + + if !output.status.success() { + eprintln!("{}", String::from_utf8_lossy(&output.stdout)); + return Err(io::Error::other(format!( + "Command: {:?}\nFailed to import GPG keys: {}", + cmd, + key_ids.join(" ") + ))); + } + + Ok(()) + } + + /// Validates the integrity of a file against the cryptographic signature associated with + /// the file. + pub fn verify_signature(&self, path: &Path, signature: &Path) -> io::Result<()> { + let cmd = duct::cmd!( + GNUPG_COMMAND, + "--homedir", + &self.gnupghome, + "--verify", + signature, + path + ); + let output = cmd.stderr_to_stdout().stdout_capture().unchecked().run()?; + if !output.status.success() { + eprintln!("{}", String::from_utf8_lossy(&output.stdout)); + return Err(io::Error::other(format!( + "Command: {:?}\nGPG signature verification of archive failed [{}]", + cmd, + path.display() + ))); + } + Ok(()) + } +} + +fn change_permissions_recursively( + path: &Path, + dir_mode: u32, + file_mode: u32, +) -> std::io::Result<()> { + if path.is_dir() { + // Set directory permissions to 700 + fs::set_permissions(path, Permissions::from_mode(dir_mode))?; + + for entry in fs::read_dir(path)? { + let entry = entry?; + let path = entry.path(); + + change_permissions_recursively(&path, dir_mode, file_mode)?; + } + } else { + // Set file permissions to 600 + fs::set_permissions(path, Permissions::from_mode(file_mode))?; + } + + Ok(()) +} diff --git a/nginx-sys/Cargo.toml b/nginx-sys/Cargo.toml index 7bfc6e9..8780749 100644 --- a/nginx-sys/Cargo.toml +++ b/nginx-sys/Cargo.toml @@ -14,6 +14,11 @@ homepage.workspace = true repository.workspace = true rust-version.workspace = true +[package.metadata.docs.rs] +all-features = true +default-target = "x86_64-unknown-linux-gnu" +targets = [] + [dependencies] [target.'cfg(not(windows))'.dependencies] @@ -22,13 +27,9 @@ errno = { version = "0.3", default-features = false } [build-dependencies] bindgen = "0.71" cc = "1.2.0" -duct = { version = "1", optional = true } dunce = "1.0.5" -flate2 = { version = "1.0.28", optional = true } regex = "1.11.1" -tar = { version = "0.4.40", optional = true } -ureq = { version = "3.0.10", optional = true } -which = { version = "7.0.0", optional = true } +nginx-src = { version = "~1.28.0", optional = true, path = "../nginx-src" } [features] -vendored = ["dep:which", "dep:duct", "dep:ureq", "dep:flate2", "dep:tar"] +vendored = ["dep:nginx-src"] diff --git a/nginx-sys/README.md b/nginx-sys/README.md index 73b1a69..95e5c7f 100644 --- a/nginx-sys/README.md +++ b/nginx-sys/README.md @@ -26,6 +26,7 @@ patches applied. - `vendored`: Enables the build scripts to download and build a copy of nginx source and link against it. + See `nginx-src` crate documentation for additional details. ## Input variables diff --git a/nginx-sys/build/main.rs b/nginx-sys/build/main.rs index 6303693..a206dee 100644 --- a/nginx-sys/build/main.rs +++ b/nginx-sys/build/main.rs @@ -6,9 +6,6 @@ use std::fs::{read_to_string, File}; use std::io::Write; use std::path::{Path, PathBuf}; -#[cfg(feature = "vendored")] -mod vendored; - const ENV_VARS_TRIGGERING_RECOMPILE: &[&str] = &["OUT_DIR", "NGINX_BUILD_DIR", "NGINX_SOURCE_DIR"]; /// The feature flags set by the nginx configuration script. @@ -135,8 +132,11 @@ impl NginxSource { #[cfg(feature = "vendored")] pub fn from_vendored() -> Self { - let build_dir = vendored::build().expect("vendored build"); - let source_dir = build_dir.parent().expect("source directory").to_path_buf(); + nginx_src::print_cargo_metadata(); + + let out_dir = env::var("OUT_DIR").unwrap(); + let build_dir = PathBuf::from(out_dir).join("objs"); + let (source_dir, build_dir) = nginx_src::build(build_dir).expect("nginx-src build"); Self { source_dir, @@ -207,7 +207,7 @@ fn generate_binding(nginx: &NginxSource) { .map(|path| format!("-I{}", path.to_string_lossy())) .collect(); - print_cargo_metadata(&includes).expect("cargo dependency metadata"); + print_cargo_metadata(nginx, &includes).expect("cargo dependency metadata"); // bindgen targets the latest known stable by default let rust_target: bindgen::RustTarget = env::var("CARGO_PKG_RUST_VERSION") @@ -294,7 +294,10 @@ fn parse_includes_from_makefile(nginx_autoconf_makefile_path: &PathBuf) -> Vec

>(includes: &[T]) -> Result<(), Box> { +pub fn print_cargo_metadata>( + nginx: &NginxSource, + includes: &[T], +) -> Result<(), Box> { // Unquote and merge C string constants let unquote_re = regex::Regex::new(r#""(.*?[^\\])"\s*"#).unwrap(); let unquote = |data: &str| -> String { @@ -334,6 +337,11 @@ pub fn print_cargo_metadata>(includes: &[T]) -> Result<(), Box Result> { - println!("Building NGINX"); - // Create .cache directory - let cache_dir = make_cache_dir()?; - println!("Cache directory created"); - // Import GPG keys used to verify dependency tarballs - import_gpg_keys(&cache_dir)?; - println!("GPG keys imported"); - // Ensure GPG directory has the correct permissions, if gpg is available - if gpg_path().is_some() { - ensure_gpg_permissions(&cache_dir)?; - } - println!("Verified GPG permissions"); - // Configure and Compile NGINX - let (_nginx_install_dir, nginx_src_dir) = compile_nginx(&cache_dir)?; - // Hint cargo to rebuild if any of the these environment variables values change - // because they will trigger a recompilation of NGINX with different parameters - for var in ENV_VARS_TRIGGERING_RECOMPILE { - println!("cargo:rerun-if-env-changed={var}"); - } - println!("cargo:rerun-if-changed=build/vendored.rs"); - - Ok(nginx_src_dir.join("objs")) -} - -/* -########################################################################### -# NGINX Build Functions - Everything below here is for building NGINX # -########################################################################### - -In order to build Rust bindings for NGINX using the bindgen crate, we need -to do the following: - - 1. Download NGINX source code and all dependencies (zlib, pcre2, openssl) - 2. Verify the integrity of the downloaded files using GPG signatures - 3. Extract the downloaded files - 4. Run autoconf `configure` for NGINX - 5. Compile NGINX - 6. Install NGINX in a subdirectory of the project - 7. Read the autoconf generated makefile for NGINX and configure bindgen - to generate Rust bindings based on the includes in the makefile. - -Additionally, we want to provide the following features as part of the -build process: - * Allow the user to specify the version of NGINX to build - * Allow the user to specify the version of each dependency to build - * Only reconfigure and recompile NGINX if any of the above versions - change or the configuration flags change (like enabling or disabling - the debug mode) - * Not rely on the user having NGINX dependencies installed on their - system (zlib, pcre2, openssl) - * Keep source code and binaries confined to a subdirectory of the - project to avoid having to track files outside of the project - * If GPG is not installed, the build will still continue. However, the - integrity of the downloaded files will not be verified. -*/ - -fn zlib_archive_url(version: &String) -> String { - format!("{ZLIB_DOWNLOAD_URL_PREFIX}/v{version}/zlib-{version}.tar.gz") -} - -fn pcre_archive_url(version: &String) -> String { - // We can distinguish pcre1/pcre2 by checking whether the second character is '.', because the - // final version of pcre1 is 8.45 and the first one of pcre2 is 10.00. - if version.chars().nth(1).is_some_and(|c| c == '.') { - format!("{PCRE1_DOWNLOAD_URL_PREFIX}/{version}/pcre-{version}.tar.gz") - } else { - format!("{PCRE2_DOWNLOAD_URL_PREFIX}/pcre2-{version}/pcre2-{version}.tar.gz") - } -} - -fn openssl_archive_url(version: &String) -> String { - if version.starts_with("1.1.1") { - let version_hyphened = version.replace('.', "_"); - format!("{OPENSSL_DOWNLOAD_URL_PREFIX}/OpenSSL_{version_hyphened}/openssl-{version}.tar.gz") - } else { - format!("{OPENSSL_DOWNLOAD_URL_PREFIX}/openssl-{version}/openssl-{version}.tar.gz") - } -} - -fn nginx_archive_url(version: &String) -> String { - format!("{NGX_DOWNLOAD_URL_PREFIX}/nginx-{version}.tar.gz") -} - -/// Returns a list of tuples containing the URL to a tarball archive and the GPG signature used -/// to validate the integrity of the tarball. -fn all_archives() -> Vec<(String, String)> { - let ngx_version = env::var("NGX_VERSION").unwrap_or_else(|_| NGX_DEFAULT_VERSION.into()); - let zlib_version = env::var("ZLIB_VERSION").unwrap_or_else(|_| ZLIB_DEFAULT_VERSION.into()); - // While Nginx 1.22.0 and later support pcre2 and openssl3, earlier ones only support pcre1 and - // openssl1. Here provides the appropriate (and as latest as possible) versions of these two - // dependencies as default, switching `***[major_version]_DEFAULT_VERSION` based on - // `is_after_1_22`. This facilitates to compile backport versions targeted for Nginx ealier than - // 1.22.0, which are still used in LTS releases of major Linux distributions. - let ngx_version_vec: Vec = ngx_version - .split('.') - .map(|s| s.parse().unwrap_or(-1)) - .collect(); - let is_after_1_22 = (ngx_version_vec.len() >= 2) - && (ngx_version_vec[0] > 1 || (ngx_version_vec[0] == 1 && ngx_version_vec[1] >= 22)); - // keep env name `PCRE2_VERSION` for compat - let pcre_version = env::var("PCRE2_VERSION").unwrap_or_else(|_| { - if is_after_1_22 { - PCRE2_DEFAULT_VERSION - } else { - PCRE1_DEFAULT_VERSION - } - .into() - }); - let openssl_version = env::var("OPENSSL_VERSION").unwrap_or_else(|_| { - if is_after_1_22 { - OPENSSL3_DEFAULT_VERSION - } else { - OPENSSL1_DEFAULT_VERSION - } - .into() - }); - fn url_pair(tar_url: String, pgp_ext: &str) -> (String, String) { - (tar_url.clone(), format!("{tar_url}.{pgp_ext}")) - } - vec![ - url_pair(zlib_archive_url(&zlib_version), "asc"), - url_pair(pcre_archive_url(&pcre_version), "sig"), - url_pair(openssl_archive_url(&openssl_version), "asc"), - url_pair(nginx_archive_url(&ngx_version), "asc"), - ] -} - -fn gpg_path() -> Option { - which::which("gpg").ok() -} - -/// Returns the base path to extract tarball contents into -fn source_output_dir(cache_dir: &Path, target: &str) -> PathBuf { - env::var("CARGO_TARGET_TMPDIR") - .map(PathBuf::from) - .unwrap_or_else(|_| cache_dir.join("src").join(target)) -} - -#[allow(clippy::ptr_arg)] -/// Returns the path to install NGINX to -fn nginx_install_dir(base_dir: &Path, target: &str) -> PathBuf { - let nginx_version = env::var("NGX_VERSION").unwrap_or_else(|_| NGX_DEFAULT_VERSION.to_string()); - base_dir.join(nginx_version).join(target) -} - -/// Ensure the correct permissions are applied to the local gnupg directory -fn ensure_gpg_permissions(cache_dir: &Path) -> Result<(), Box> { - fn change_permissions_recursively( - path: &Path, - dir_mode: u32, - file_mode: u32, - ) -> std::io::Result<()> { - if path.is_dir() { - // Set directory permissions to 700 - fs::set_permissions(path, Permissions::from_mode(dir_mode))?; - - for entry in fs::read_dir(path)? { - let entry = entry?; - let path = entry.path(); - - change_permissions_recursively(&path, dir_mode, file_mode)?; - } - } else { - // Set file permissions to 600 - fs::set_permissions(path, Permissions::from_mode(file_mode))?; - } - - Ok(()) - } - - let gnupghome = cache_dir.join(".gnupg"); - change_permissions_recursively(gnupghome.as_path(), 0o700, 0o600) - .map_err(|e| Box::new(e) as Box) -} - -/// Iterates through the tuples in `ALL_SERVERS_AND_PUBLIC_KEY_IDS` and returns a map of -/// key servers to public key IDs. -fn keys_indexed_by_key_server() -> HashMap> { - let mut map: HashMap> = HashMap::new(); - - for tuple in ALL_SERVERS_AND_PUBLIC_KEY_IDS { - let key = tuple.0.to_string(); - let value: Vec = tuple.1.split_whitespace().map(|s| s.to_string()).collect(); - match map.get_mut(&key) { - Some(keys) => keys.extend(value), - None => { - map.insert(key, value); - } - } - } - - map -} - -/// Imports all the required GPG keys into the `.cache/.gnupu` directory in order to -/// validate the integrity of the downloaded tarballs. -fn import_gpg_keys(cache_dir: &Path) -> Result<(), Box> { - if let Some(gpg) = gpg_path() { - // We do not want to mess with the default gpg data for the running user, - // so we store all gpg data with our cache directory. - let gnupghome = cache_dir.join(".gnupg"); - if !gnupghome.exists() { - fs::create_dir_all(&gnupghome)?; - } - ensure_gpg_permissions(cache_dir)?; - - for (server, key_ids) in keys_indexed_by_key_server() { - println!( - "Importing {} GPG keys for key server: {}", - key_ids.len(), - server - ); - - let homedir = gnupghome.clone(); - let homedir_str = homedir.to_string_lossy().to_string(); - let base_args = vec![ - "--homedir", - homedir_str.as_str(), - "--keyserver", - server.as_str(), - "--recv-keys", - ]; - let key_ids_str = key_ids.iter().map(|s| s.as_str()).collect::>(); - let args = [base_args, key_ids_str].concat(); - let cmd = duct::cmd(&gpg, &args); - - let output = cmd.stderr_to_stdout().stderr_capture().unchecked().run()?; - - if !output.status.success() { - eprintln!("{}", String::from_utf8_lossy(&output.stdout)); - return Err(format!( - "Command: {:?}\nFailed to import GPG keys: {}", - cmd, - key_ids.join(" ") - ) - .into()); - } - } - } - Ok(()) -} - -fn make_cache_dir() -> Result> { - let base_dir = env::var("CARGO_MANIFEST_DIR") - .map(PathBuf::from) - .unwrap_or_else(|_| env::current_dir().expect("Failed to get current directory")); - // Choose `.cache` relative to the manifest directory (nginx-sys) as the default cache directory - // Environment variable `CACHE_DIR` overrides this - // Recommendation: set env "CACHE_DIR = { value = ".cache", relative = true }" in - // `.cargo/config.toml` in your project - let cache_dir = env::var("CACHE_DIR") - .map(PathBuf::from) - .unwrap_or(base_dir.join(".cache")); - if !cache_dir.exists() { - fs::create_dir_all(&cache_dir)?; - } - Ok(cache_dir) -} - -/// Downloads a tarball from the specified URL into the `.cache` directory. -fn download(cache_dir: &Path, url: &str) -> Result> { - fn proceed_with_download(file_path: &Path) -> bool { - // File does not exist or is zero bytes - !file_path.exists() || file_path.metadata().is_ok_and(|m| m.len() < 1) - } - let filename = url.split('/').next_back().unwrap(); - let file_path = cache_dir.join(filename); - if proceed_with_download(&file_path) { - println!("Downloading: {} -> {}", url, file_path.display()); - let mut response = ureq::get(url).call()?; - let mut reader = response.body_mut().as_reader(); - let mut file = File::create(&file_path)?; - std::io::copy(&mut reader, &mut file)?; - } - - if !file_path.exists() { - return Err( - format!("Downloaded file was not written to the expected location: {url}",).into(), - ); - } - Ok(file_path) -} - -/// Validates that a file is a valid GPG signature file. -fn verify_signature_file(cache_dir: &Path, signature_path: &Path) -> Result<(), Box> { - if !signature_path.exists() { - return Err(Box::new(std::io::Error::new( - NotFound, - format!("GPG signature file not found: {}", signature_path.display()), - ))); - } - if let Some(gpg) = gpg_path() { - let gnupghome = cache_dir.join(".gnupg"); - let cmd = cmd!( - gpg, - "--homedir", - &gnupghome, - "--list-packets", - signature_path - ); - let output = cmd.stderr_to_stdout().stdout_capture().unchecked().run()?; - - if !output.status.success() { - eprintln!("{}", String::from_utf8_lossy(&output.stdout)); - return Err(Box::new(std::io::Error::other(format!( - "Command: {:?} \nGPG signature file verification failed for signature: {}", - cmd, - signature_path.display() - )))); - } - } else { - println!("GPG not found, skipping signature file verification"); - } - Ok(()) -} - -/// Validates the integrity of a tarball file against the cryptographic signature associated with -/// the file. -fn verify_archive_signature( - cache_dir: &Path, - archive_path: &Path, - signature_path: &Path, -) -> Result<(), Box> { - if let Some(gpg) = gpg_path() { - let gnupghome = cache_dir.join(".gnupg"); - let cmd = cmd!( - gpg, - "--homedir", - &gnupghome, - "--verify", - signature_path, - archive_path - ); - let output = cmd.stderr_to_stdout().stdout_capture().unchecked().run()?; - if !output.status.success() { - eprintln!("{}", String::from_utf8_lossy(&output.stdout)); - return Err(Box::new(std::io::Error::other(format!( - "Command: {:?}\nGPG signature verification failed of archive failed [{}]", - cmd, - archive_path.display() - )))); - } - } else { - println!("GPG not found, skipping signature verification"); - } - Ok(()) -} - -/// Get a given tarball and signature file from a remote URL and copy it to the `.cache` directory. -fn get_archive( - cache_dir: &Path, - archive_url: &str, - signature_url: &str, -) -> Result> { - let signature_path = download(cache_dir, signature_url)?; - if let Err(e) = verify_signature_file(cache_dir, &signature_path) { - fs::remove_file(&signature_path)?; - return Err(e); - } - let archive_path = download(cache_dir, archive_url)?; - match verify_archive_signature(cache_dir, &archive_path, &signature_path) { - Ok(_) => Ok(archive_path), - Err(e) => { - fs::remove_file(&archive_path)?; - Err(e) - } - } -} - -/// Extract a tarball into a subdirectory based on the tarball's name under the source base -/// directory. -fn extract_archive( - archive_path: &Path, - extract_output_base_dir: &Path, -) -> Result<(String, PathBuf), Box> { - if !extract_output_base_dir.exists() { - fs::create_dir_all(extract_output_base_dir)?; - } - let archive_file = File::open(archive_path) - .unwrap_or_else(|_| panic!("Unable to open archive file: {}", archive_path.display())); - let stem = archive_path - .file_name() - .and_then(|s| s.to_str()) - .and_then(|s| s.rsplitn(3, '.').last()) - .expect("Unable to determine archive file name stem"); - let dependency_name = stem - .split_once('-') - .map(|(s, _)| s.to_owned()) - .unwrap_or_else(|| panic!("Unable to determine dependency name based on stem: {stem}")); - - let extract_output_dir = extract_output_base_dir.to_owned(); - let archive_output_dir = extract_output_dir.join(stem); - if !archive_output_dir.exists() { - Archive::new(GzDecoder::new(archive_file)) - .entries()? - .filter_map(|e| e.ok()) - .for_each(|mut entry| { - let path = entry.path().unwrap(); - let stripped_path = path.components().skip(1).collect::(); - entry - .unpack(archive_output_dir.join(stripped_path)) - .unwrap(); - }); - } else { - println!( - "Archive [{}] already extracted to directory: {}", - stem, - archive_output_dir.display() - ); - } - - Ok((dependency_name, archive_output_dir)) -} - -/// Extract all of the tarballs into subdirectories within the source base directory. -fn extract_all_archives( - cache_dir: &Path, - target: &str, -) -> Result, Box> { - let archives = all_archives(); - let mut sources = Vec::new(); - let extract_output_base_dir = source_output_dir(cache_dir, target); - if !extract_output_base_dir.exists() { - fs::create_dir_all(&extract_output_base_dir)?; - } - for (archive_url, signature_url) in archives { - let archive_path = get_archive(cache_dir, &archive_url, &signature_url)?; - let (name, output_dir) = extract_archive(&archive_path, &extract_output_base_dir)?; - sources.push((name, output_dir)); - } - - Ok(sources) -} - -/// Invoke external processes to run autoconf `configure` to generate a makefile for NGINX and -/// then run `make install`. -fn compile_nginx(cache_dir: &Path) -> Result<(PathBuf, PathBuf), Box> { - fn find_dependency_path<'a>( - sources: &'a [(String, PathBuf)], - name: &str, - ) -> Result<&'a PathBuf, String> { - sources - .iter() - .find(|(n, _)| n == name) - .map(|(_, p)| p) - .ok_or(format!("Unable to find dependency [{name}] path")) - } - let target = env::var("TARGET")?; - let nginx_install_base_dir = env::var("NGX_INSTALL_ROOT_DIR") - .map(PathBuf::from) - .unwrap_or(cache_dir.join("nginx")); - let nginx_install_dir = env::var("NGX_INSTALL_DIR") - .map(PathBuf::from) - .unwrap_or(nginx_install_dir(&nginx_install_base_dir, &target)); - let sources = extract_all_archives(cache_dir, &target)?; - let zlib_src_dir = find_dependency_path(&sources, "zlib")?; - let openssl_src_dir = find_dependency_path(&sources, "openssl")?; - let pcre2_src_dir = - find_dependency_path(&sources, "pcre2").or(find_dependency_path(&sources, "pcre"))?; - let nginx_src_dir = find_dependency_path(&sources, "nginx")?; - let nginx_configure_flags = nginx_configure_flags( - &nginx_install_dir, - zlib_src_dir, - openssl_src_dir, - pcre2_src_dir, - ); - let nginx_binary_exists = nginx_install_dir.join("sbin").join("nginx").exists(); - let autoconf_makefile_exists = nginx_src_dir.join("Makefile").exists(); - // We find out how NGINX was configured last time, so that we can compare it to what - // we are going to configure it to this time. If there are no changes, then we can assume - // that we do not need to reconfigure and rebuild NGINX. - let build_info_path = nginx_src_dir.join("last-build-info"); - let current_build_info = build_info(&nginx_configure_flags); - let build_info_no_change = if build_info_path.exists() { - read_to_string(&build_info_path).is_ok_and(|s| s == current_build_info) - } else { - false - }; - - println!("NGINX already installed: {nginx_binary_exists}"); - println!("NGINX autoconf makefile already created: {autoconf_makefile_exists}"); - println!("NGINX build info changed: {}", !build_info_no_change); - - if !nginx_binary_exists || !autoconf_makefile_exists || !build_info_no_change { - fs::create_dir_all(&nginx_install_dir)?; - configure(nginx_configure_flags, nginx_src_dir)?; - make(nginx_src_dir, "install")?; - let mut output = File::create(build_info_path)?; - // Store the configure flags of the last successful build - output.write_all(current_build_info.as_bytes())?; - } - Ok((nginx_install_dir, nginx_src_dir.to_owned())) -} - -/// Returns the options in which NGINX was built with -fn build_info(nginx_configure_flags: &[String]) -> String { - // Flags should contain strings pointing to OS/platform as well as dependency versions, - // so if any of that changes, it can trigger a rebuild - nginx_configure_flags.join(" ") -} - -/// Generate the flags to use with autoconf `configure` for NGINX based on the downloaded -/// dependencies' paths. Note: the paths differ based on cargo targets because they may be -/// configured differently for different os/platform targets. -fn nginx_configure_flags( - nginx_install_dir: &Path, - zlib_src_dir: &Path, - openssl_src_dir: &Path, - pcre2_src_dir: &Path, -) -> Vec { - fn format_source_path(flag: &str, path: &Path) -> String { - format!( - "{}={}", - flag, - path.as_os_str() - .to_str() - .expect("Unable to read source path as string") - ) - } - let modules = || -> Vec { - let mut modules = vec![ - format_source_path("--with-zlib", zlib_src_dir), - format_source_path("--with-pcre", pcre2_src_dir), - format_source_path("--with-openssl", openssl_src_dir), - ]; - for module in NGX_BASE_MODULES { - modules.push(module.to_string()); - } - modules - }; - let mut nginx_opts = vec![format_source_path("--prefix", nginx_install_dir)]; - if env::var("NGX_DEBUG").is_ok_and(|s| s == "true") { - println!("Enabling --with-debug"); - nginx_opts.push("--with-debug".to_string()); - } - if env::var("CARGO_CFG_TARGET_OS").map_or(env::consts::OS == "linux", |s| s == "linux") { - for flag in NGX_LINUX_ADDITIONAL_OPTS { - nginx_opts.push(flag.to_string()); - } - } - for flag in modules() { - nginx_opts.push(flag); - } - - nginx_opts -} - -/// Run external process invoking autoconf `configure` for NGINX. -fn configure(nginx_configure_flags: Vec, nginx_src_dir: &Path) -> std::io::Result { - let flags = nginx_configure_flags - .iter() - .map(OsString::from) - .collect::>(); - let configure_executable = nginx_src_dir.join("configure"); - if !configure_executable.exists() { - panic!( - "Unable to find NGINX configure script at: {}", - configure_executable.to_string_lossy() - ); - } - println!( - "Running NGINX configure script with flags: {:?}", - nginx_configure_flags.join(" ") - ); - duct::cmd(configure_executable, flags) - .dir(nginx_src_dir) - .stderr_to_stdout() - .run() -} - -/// Run `make` within the NGINX source directory as an external process. -fn make(nginx_src_dir: &Path, arg: &str) -> std::io::Result { - // Give preference to the binary with the name of gmake if it exists because this is typically - // the GNU 4+ on MacOS (if it is installed via homebrew). - let make_bin_path = match (which("gmake"), which("make")) { - (Ok(path), _) => Ok(path), - (_, Ok(path)) => Ok(path), - _ => Err(IoError::new( - NotFound, - "Unable to find make in path (gmake or make)", - )), - }?; - - // Level of concurrency to use when building nginx - cargo nicely provides this information - let num_jobs = match env::var("NUM_JOBS") { - Ok(s) => s.parse::().ok(), - Err(_) => thread::available_parallelism().ok().map(|n| n.get()), - } - .unwrap_or(1); - - /* Use the duct dependency here to merge the output of STDOUT and STDERR into a single stream, - and to provide the combined output as a reader which can be iterated over line-by-line. We - use duct to do this because it is a lot of work to implement this from scratch. */ - cmd!(make_bin_path, "-j", num_jobs.to_string(), arg) - .dir(nginx_src_dir) - .stderr_to_stdout() - .run() -} diff --git a/tests/build.rs b/tests/build.rs deleted file mode 100644 index 81caa36..0000000 --- a/tests/build.rs +++ /dev/null @@ -1,6 +0,0 @@ -fn main() { - println!( - "cargo:rustc-env=TARGET={}", - std::env::var("TARGET").unwrap() - ); -} diff --git a/tests/log_test.rs b/tests/log_test.rs index fc15b88..b1fe9f6 100644 --- a/tests/log_test.rs +++ b/tests/log_test.rs @@ -1,4 +1,6 @@ +use std::env; use std::fs; +use std::io; use std::io::Result; #[cfg(unix)] use std::os::unix::ffi::OsStrExt; @@ -6,7 +8,7 @@ use std::path::{Path, PathBuf}; use std::process::Command; use std::process::Output; -use ngx::ffi::{NGX_CONF_PATH, NGX_PREFIX, NGX_SBIN_PATH}; +const NGINX_BINARY_NAME: &str = "nginx"; /// Convert a CStr to a PathBuf pub fn cstr_to_path(val: &std::ffi::CStr) -> Option { @@ -22,42 +24,68 @@ pub fn cstr_to_path(val: &std::ffi::CStr) -> Option { Some(PathBuf::from(str)) } +/// Find nginx binary in the build directory +pub fn find_nginx_binary() -> io::Result { + let path = [ + // TEST_NGINX_BINARY is specified for tests + env::var("TEST_NGINX_BINARY").ok().map(PathBuf::from), + // The module is built against an external NGINX source tree + env::var("NGINX_BUILD_DIR") + .map(PathBuf::from) + .map(|x| x.join(NGINX_BINARY_NAME)) + .ok(), + env::var("NGINX_SOURCE_DIR") + .map(PathBuf::from) + .map(|x| x.join("objs").join(NGINX_BINARY_NAME)) + .ok(), + // Fallback to the build directory exposed by nginx-sys + option_env!("DEP_NGINX_BUILD_DIR") + .map(PathBuf::from) + .map(|x| x.join(NGINX_BINARY_NAME)), + ] + .into_iter() + .flatten() + .find(|x| x.is_file()) + .ok_or(io::ErrorKind::NotFound)?; + + Ok(path) +} + /// harness to test nginx pub struct Nginx { - pub install_path: PathBuf, + pub prefix: tempfile::TempDir, + pub bin_path: PathBuf, pub config_path: PathBuf, } impl Default for Nginx { /// create nginx with default fn default() -> Nginx { - let install_path = cstr_to_path(NGX_PREFIX).expect("installation prefix"); - Nginx::new(install_path) + let binary = find_nginx_binary().expect("nginx binary"); + Nginx::new(binary).expect("test harness") } } impl Nginx { - pub fn new>(path: P) -> Nginx { - let install_path = path.as_ref(); - let config_path = cstr_to_path(NGX_CONF_PATH).expect("configuration path"); - let config_path = install_path.join(config_path); - - Nginx { - install_path: install_path.into(), - config_path, - } - } + pub fn new(binary: impl AsRef) -> io::Result { + let prefix = tempfile::tempdir()?; + let config = prefix.path().join("nginx.conf"); + + fs::create_dir(prefix.path().join("logs"))?; - /// get bin path to nginx instance - pub fn bin_path(&mut self) -> PathBuf { - let bin_path = cstr_to_path(NGX_SBIN_PATH).expect("binary path"); - self.install_path.join(bin_path) + Ok(Nginx { + prefix, + bin_path: binary.as_ref().to_owned(), + config_path: config, + }) } /// start nginx process with arguments - pub fn cmd(&mut self, args: &[&str]) -> Result { - let bin_path = self.bin_path(); - let result = Command::new(bin_path).args(args).output(); + pub fn cmd(&self, args: &[&str]) -> Result { + let prefix = self.prefix.path().to_string_lossy(); + let config_path = self.config_path.to_string_lossy(); + let args = [&["-p", &prefix, "-c", &config_path], args].concat(); + let result = Command::new(&self.bin_path).args(args).output(); match result { Err(e) => Err(e), diff --git a/tests/nginx.conf b/tests/nginx.conf index 495fc07..be3e9c9 100644 --- a/tests/nginx.conf +++ b/tests/nginx.conf @@ -16,7 +16,6 @@ events { http { - include mime.types; default_type application/octet-stream;