diff --git a/src/api/mod.rs b/src/api/mod.rs index a400e6981f..146636c673 100644 --- a/src/api/mod.rs +++ b/src/api/mod.rs @@ -13,7 +13,7 @@ mod pagination; use std::borrow::Cow; use std::cell::RefCell; -use std::collections::{HashMap, HashSet}; +use std::collections::HashMap; use std::ffi::OsStr; use std::fs::File; use std::io::{self, Read as _, Write}; @@ -90,7 +90,6 @@ pub struct AuthenticatedApi<'a> { pub struct RegionSpecificApi<'a> { api: &'a AuthenticatedApi<'a>, - org: &'a str, region_url: Option>, } @@ -863,34 +862,6 @@ impl<'a> AuthenticatedApi<'a> { .map(|_| true) } - /// Given a list of checksums for DIFs, this returns a list of those - /// that do not exist for the project yet. - pub fn find_missing_dif_checksums( - &self, - org: &str, - project: &str, - checksums: I, - ) -> ApiResult> - where - I: IntoIterator, - { - let mut url = format!( - "/projects/{}/{}/files/dsyms/unknown/?", - PathArg(org), - PathArg(project) - ); - for (idx, checksum) in checksums.into_iter().enumerate() { - if idx > 0 { - url.push('&'); - } - url.push_str("checksums="); - url.push_str(&checksum.to_string()); - } - - let state: MissingChecksumsResponse = self.get(&url)?.convert()?; - Ok(state.missing) - } - /// Get the server configuration for chunked file uploads. pub fn get_chunk_upload_options(&self, org: &str) -> ApiResult { let url = format!("/organizations/{}/chunk-upload/", PathArg(org)); @@ -1252,7 +1223,6 @@ impl<'a> AuthenticatedApi<'a> { // Do not specify a region URL unless the URL is configured to https://sentry.io (i.e. the default). return RegionSpecificApi { api: self, - org, region_url: None, }; } @@ -1279,7 +1249,6 @@ impl<'a> AuthenticatedApi<'a> { RegionSpecificApi { api: self, - org, region_url, } } @@ -1363,22 +1332,6 @@ impl RegionSpecificApi<'_> { .request(method, url, self.region_url.as_deref()) } - /// Uploads a ZIP archive containing DIFs from the given path. - pub fn upload_dif_archive(&self, project: &str, file: &Path) -> ApiResult> { - let path = format!( - "/projects/{}/{}/files/dsyms/", - PathArg(self.org), - PathArg(project) - ); - let mut form = curl::easy::Form::new(); - form.part("file").file(file).add()?; - self.request(Method::Post, &path)? - .with_form_data(form)? - .progress_bar_mode(ProgressBarMode::Request) - .send()? - .convert() - } - /// Uploads a new release file. The file is loaded directly from the file /// system and uploaded as `name`. pub fn upload_release_file( @@ -2096,11 +2049,6 @@ impl DebugInfoFile { } } -#[derive(Deserialize)] -struct MissingChecksumsResponse { - missing: HashSet, -} - #[derive(Clone, Debug, Deserialize)] #[serde(rename_all = "camelCase")] pub struct Issue { diff --git a/src/config.rs b/src/config.rs index 18683f164d..4fca141587 100644 --- a/src/config.rs +++ b/src/config.rs @@ -19,7 +19,6 @@ use sentry::types::Dsn; use crate::constants::CONFIG_INI_FILE_PATH; use crate::constants::DEFAULT_MAX_DIF_ITEM_SIZE; -use crate::constants::DEFAULT_MAX_DIF_UPLOAD_SIZE; use crate::constants::{CONFIG_RC_FILE_NAME, DEFAULT_RETRIES, DEFAULT_URL}; use crate::utils::args; use crate::utils::auth_token::AuthToken; @@ -457,17 +456,6 @@ impl Config { ) } - /// Returns the maximum DIF upload size - pub fn get_max_dif_archive_size(&self) -> u64 { - let key = "max_upload_size"; - - self.ini - .get_from(Some("dif"), key) - .or_else(|| self.ini.get_from(Some("dsym"), key)) - .and_then(|x| x.parse().ok()) - .unwrap_or(DEFAULT_MAX_DIF_UPLOAD_SIZE) - } - /// Returns the maximum file size of a single file inside DIF bundle pub fn get_max_dif_item_size(&self) -> u64 { let key = "max_item_size"; diff --git a/src/constants.rs b/src/constants.rs index ac3770e201..9d8f52ada9 100644 --- a/src/constants.rs +++ b/src/constants.rs @@ -42,7 +42,6 @@ pub const DEFAULT_MAX_DIF_SIZE: u64 = 2 * 1024 * 1024 * 1024; // 2GB /// Default maximum file size of a single file inside DIF bundle. pub const DEFAULT_MAX_DIF_ITEM_SIZE: u64 = 1024 * 1024; // 1MB /// Default maximum DIF upload size. -pub const DEFAULT_MAX_DIF_UPLOAD_SIZE: u64 = 35 * 1024 * 1024; // 35MB /// Default maximum time to wait for file assembly. pub const DEFAULT_MAX_WAIT: Duration = Duration::from_secs(5 * 60); /// Maximum length for commit SHA values, enforced in backend. diff --git a/src/utils/dif_upload/mod.rs b/src/utils/dif_upload/mod.rs index bb4ccf3a32..6271a17ce0 100644 --- a/src/utils/dif_upload/mod.rs +++ b/src/utils/dif_upload/mod.rs @@ -14,7 +14,6 @@ use std::fs::{self, File}; use std::io::{BufReader, BufWriter, Read as _, Seek as _, Write as _}; use std::iter::IntoIterator; use std::mem::transmute; -use std::ops::Deref; use std::path::{Component, Path, PathBuf}; use std::process::Command; use std::str; @@ -23,7 +22,6 @@ use std::time::Duration; use anyhow::{bail, format_err, Error, Result}; use console::style; use log::{debug, info, warn}; -use sha1_smol::Digest; use symbolic::common::{Arch, AsSelf, ByteView, DebugId, SelfCell, Uuid}; use symbolic::debuginfo::macho::{BcSymbolMap, UuidMapping}; use symbolic::debuginfo::pe::PeObject; @@ -33,26 +31,21 @@ use symbolic::il2cpp::ObjectLineMapping; use walkdir::WalkDir; use which::which; use zip::result::ZipError; -use zip::write::SimpleFileOptions; -use zip::{ZipArchive, ZipWriter}; +use zip::ZipArchive; use self::error::ValidationError; use crate::api::{Api, ChunkServerOptions, ChunkUploadCapability}; use crate::config::Config; use crate::constants::{DEFAULT_MAX_DIF_SIZE, DEFAULT_MAX_WAIT}; use crate::utils::chunks; -use crate::utils::chunks::{Assemblable, BatchedSliceExt as _, ChunkOptions, Chunked, ItemSize}; +use crate::utils::chunks::{Assemblable, ChunkOptions, Chunked}; use crate::utils::dif::ObjectDifFeatures; -use crate::utils::fs::{get_sha1_checksum, TempDir, TempFile}; +use crate::utils::fs::{TempDir, TempFile}; use crate::utils::progress::{ProgressBar, ProgressStyle}; -use crate::utils::ui::{copy_with_progress, make_byte_progress_bar}; /// A debug info file on the server. pub use crate::api::DebugInfoFile; -/// Fallback maximum number of chunks in a batch for the legacy upload. -static MAX_CHUNKS: u64 = 64; - /// A Debug Information File. /// /// This is primarily used to store inside the [`DifMatch`] so does not contain any @@ -212,11 +205,6 @@ impl<'data> DifMatch<'data> { } } - /// Returns the size of of this DIF in bytes. - pub fn size(&self) -> u64 { - self.data().len() as u64 - } - /// Returns the path of this DIF relative to the search origin. pub fn path(&self) -> &str { &self.name @@ -315,40 +303,6 @@ impl Assemblable for DifMatch<'_> { } } -/// A `DifMatch` with computed SHA1 checksum. -#[derive(Debug)] -struct HashedDifMatch<'data> { - inner: DifMatch<'data>, - checksum: Digest, -} - -impl<'data> HashedDifMatch<'data> { - /// Calculates the SHA1 checksum for the given DIF. - fn from(inner: DifMatch<'data>) -> Result { - let checksum = get_sha1_checksum(inner.data())?; - Ok(HashedDifMatch { inner, checksum }) - } - - /// Returns the SHA1 checksum of this DIF. - fn checksum(&self) -> Digest { - self.checksum - } -} - -impl<'data> Deref for HashedDifMatch<'data> { - type Target = DifMatch<'data>; - - fn deref(&self) -> &Self::Target { - &self.inner - } -} - -impl ItemSize for HashedDifMatch<'_> { - fn size(&self) -> u64 { - self.deref().size() - } -} - type ZipFileArchive = ZipArchive>; /// A handle to the source of a potential `DifMatch` used inside `search_difs`. @@ -1250,130 +1204,6 @@ fn upload_difs_chunked( chunks::upload_chunked_objects(&chunked, options) } -/// Returns debug files missing on the server. -fn get_missing_difs<'data>( - objects: Vec>, - options: &DifUpload, -) -> Result>> { - info!( - "Checking for missing debug information files: {:#?}", - &objects - ); - - let api = Api::current(); - let missing_checksums = { - let checksums = objects.iter().map(HashedDifMatch::checksum); - api.authenticated()? - .find_missing_dif_checksums(options.org, options.project, checksums)? - }; - - let missing = objects - .into_iter() - .filter(|sym| missing_checksums.contains(&sym.checksum())) - .collect(); - - info!("Missing debug information files: {:#?}", &missing); - Ok(missing) -} - -/// Compresses the given batch into a ZIP archive. -fn create_batch_archive(difs: &[HashedDifMatch<'_>]) -> Result { - let total_bytes = difs.iter().map(ItemSize::size).sum(); - let pb = make_byte_progress_bar(total_bytes); - let tf = TempFile::create()?; - - { - let mut zip = ZipWriter::new(tf.open()?); - - for symbol in difs { - zip.start_file(symbol.file_name(), SimpleFileOptions::default())?; - copy_with_progress(&pb, &mut symbol.data(), &mut zip)?; - } - } - - pb.finish_and_clear(); - Ok(tf) -} - -/// Uploads the given DIFs to the server in batched ZIP archives. -fn upload_in_batches( - objects: &[HashedDifMatch<'_>], - options: &DifUpload, -) -> Result> { - let api = Api::current(); - let max_size = Config::current().get_max_dif_archive_size(); - let mut dsyms = Vec::new(); - - for (i, (batch, _)) in objects.batches(max_size, MAX_CHUNKS).enumerate() { - println!("\n{}", style(format!("Batch {}", i + 1)).bold()); - - println!( - "{} Compressing {} debug symbol files", - style(">").dim(), - style(batch.len()).yellow() - ); - let archive = create_batch_archive(batch)?; - - println!("{} Uploading debug symbol files", style(">").dim()); - dsyms.extend( - api.authenticated()? - .region_specific(options.org) - .upload_dif_archive(options.project, archive.path())?, - ); - } - - Ok(dsyms) -} - -/// Uploads debug info files using the legacy endpoint. -#[deprecated = "this non-chunked upload mechanism is deprecated in favor of upload_difs_chunked"] -fn upload_difs_batched(options: &DifUpload) -> Result> { - // Search for debug files in the file system and ZIPs - let found = search_difs(options)?; - if found.is_empty() { - println!("{} No debug information files found", style(">").dim()); - return Ok(Default::default()); - } - - // Try to resolve BCSymbolMaps - let symbol_map = options.symbol_map.as_deref(); - let processed = process_symbol_maps(found, symbol_map)?; - - // Calculate checksums - let hashed = prepare_difs(processed, HashedDifMatch::from)?; - - // Check which files are missing on the server - let missing = get_missing_difs(hashed, options)?; - if missing.is_empty() { - println!( - "{} Nothing to upload, all files are on the server", - style(">").dim() - ); - println!("{} Nothing to upload", style(">").dim()); - return Ok(Default::default()); - } - if options.no_upload { - println!("{} skipping upload.", style(">").dim()); - return Ok(Default::default()); - } - - // Upload missing DIFs in batches - let uploaded = upload_in_batches(&missing, options)?; - if !uploaded.is_empty() { - println!("{} File upload complete:\n", style(">").dim()); - for dif in &uploaded { - println!( - " {} ({}; {})", - style(&dif.id()).dim(), - &dif.object_name, - dif.cpu_name - ); - } - } - - Ok(uploaded) -} - /// The format of a Debug Information File (DIF). /// /// Most DIFs are also object files, but we also know of some auxiliary DIF formats. @@ -1641,23 +1471,16 @@ impl<'a> DifUpload<'a> { self.bcsymbolmaps_allowed = chunk_options.supports(ChunkUploadCapability::BcSymbolmap); self.il2cpp_mappings_allowed = chunk_options.supports(ChunkUploadCapability::Il2Cpp); - if chunk_options.supports(ChunkUploadCapability::DebugFiles) { - self.validate_capabilities(); - return upload_difs_chunked(self, chunk_options); + if !chunk_options.supports(ChunkUploadCapability::DebugFiles) { + anyhow::bail!( + "Your Sentry server does not support chunked uploads for debug files. Please upgrade \ + your Sentry server, or if you cannot upgrade your server, downgrade your Sentry \ + CLI version to 2.x." + ); } self.validate_capabilities(); - - log::warn!( - "[DEPRECATION NOTICE] Your Sentry server does not support chunked uploads for debug \ - files. Falling back to deprecated upload method. Support for this deprecated upload \ - method will be removed in Sentry CLI 3.0.0. Please upgrade your Sentry server, or if \ - you cannot upgrade, pin your Sentry CLI version to 2.x, so you don't get upgraded \ - to 3.x when it is released." - ); - - #[expect(deprecated, reason = "fallback to legacy upload")] - Ok((upload_difs_batched(&self)?, false)) + upload_difs_chunked(self, chunk_options) } /// Validate that the server supports all requested capabilities. diff --git a/src/utils/fs.rs b/src/utils/fs.rs index 3849afa75e..d3181614ed 100644 --- a/src/utils/fs.rs +++ b/src/utils/fs.rs @@ -143,21 +143,6 @@ pub fn set_executable_mode>(path: P) -> Result<()> { Ok(()) } -/// Returns the SHA1 hash of the given input. -pub fn get_sha1_checksum(rdr: R) -> Result { - let mut sha = Sha1::new(); - let mut buf = [0u8; 16384]; - let mut rdr = io::BufReader::new(rdr); - loop { - let read = rdr.read(&mut buf)?; - if read == 0 { - break; - } - sha.update(&buf[..read]); - } - Ok(sha.digest()) -} - /// Returns the SHA1 hash for the entire input, as well as each chunk of it. The /// `chunk_size` must be non-zero. pub fn get_sha1_checksums(data: &[u8], chunk_size: NonZeroUsize) -> (Digest, Vec) { diff --git a/src/utils/ui.rs b/src/utils/ui.rs index 14ba86939a..eaf8c079c5 100644 --- a/src/utils/ui.rs +++ b/src/utils/ui.rs @@ -1,5 +1,5 @@ use std::io; -use std::io::{Read, Write}; +use std::io::Write as _; use crate::utils::progress::{ProgressBar, ProgressStyle}; @@ -45,27 +45,6 @@ pub fn capitalize_string(s: &str) -> String { String::from_utf8(bytes).unwrap() } -/// Like ``io::copy`` but advances a progress bar set to bytes. -pub fn copy_with_progress(pb: &ProgressBar, reader: &mut R, writer: &mut W) -> io::Result -where - R: Read + ?Sized, - W: Write + ?Sized, -{ - let mut buf = [0; 16384]; - let mut written = 0; - loop { - let len = match reader.read(&mut buf) { - Ok(0) => return Ok(written), - Ok(len) => len, - Err(ref e) if e.kind() == io::ErrorKind::Interrupted => continue, - Err(e) => return Err(e), - }; - writer.write_all(&buf[..len])?; - written += len as u64; - pb.inc(len as u64); - } -} - /// Creates a progress bar for byte stuff pub fn make_byte_progress_bar(length: u64) -> ProgressBar { let pb = ProgressBar::new(length as usize);