diff --git a/configs/experiment/sample_uncapped_2AA_diffusion.yaml b/configs/experiment/sample_uncapped_2AA_diffusion.yaml new file mode 100644 index 0000000..da4fe7c --- /dev/null +++ b/configs/experiment/sample_uncapped_2AA_diffusion.yaml @@ -0,0 +1,42 @@ +# @package _global_ + +defaults: + - override /sampler: "diffusion.yaml" + - override /callbacks: + - sampler/save_sample.yaml + - _self_ + +callbacks: + sampler: + save_sample: + save_trajectory: false + +init_datasets: + _target_: jamun.data.parse_datasets_from_directory + root: "${paths.data_path}/timewarp/2AA-1-large/test/" + traj_pattern: "^(.*)-traj-arrays.npz" + pdb_pattern: "^(.*)-traj-state0.pdb" + subsample: 1 + num_workers: 16 + +sampler: + sigma_min: 1e-4 + sigma_max: 1.0 + rho: 7 + num_steps: 64 + use_second_order_correction: true + +finetune_on_init: false + +num_batches: 2048 +repeat_init_samples: 16 +num_init_samples_per_dataset: 1 +continue_chain: false + + +wandb_train_run_path: ??? +checkpoint_type: "best_so_far" + +logger: + wandb: + group: sample_uncapped_2AA diff --git a/configs/experiment/train_uncapped_2AA_diffusion.yaml b/configs/experiment/train_uncapped_2AA_diffusion.yaml new file mode 100644 index 0000000..2444702 --- /dev/null +++ b/configs/experiment/train_uncapped_2AA_diffusion.yaml @@ -0,0 +1,68 @@ +# @package _global_ + +defaults: + - override /callbacks: + - visualize_denoise.yaml + - timing.yaml + - ema.yaml + - ema_model_checkpoint.yaml + - _self_ + +compute_average_squared_distance_from_data: true + +model: + sigma_distribution: + _target_: jamun.distributions.ClippedLogNormalSigma + log_sigma_mean: -3.2188758248682006 # log(0.04) + log_sigma_std: 1.0 + max_radius: 1.0 + optim: + lr: 2e-3 + arch: + irreps_hidden: "120x0e + 32x1e" + hidden_layer_factory: + _target_: "e3tools.nn.SeparableConvBlock" + + +callbacks: + viz: + sigma_list: [0.04] + +data: + datamodule: + batch_size: 32 + datasets: + train: + _target_: jamun.data.parse_datasets_from_directory + root: "${paths.data_path}/timewarp/2AA-1-large/train/" + traj_pattern: "^(.*)-traj-arrays.npz" + pdb_pattern: "^(.*)-traj-state0.pdb" + num_workers: 16 + + val: + _target_: jamun.data.parse_datasets_from_directory + root: "${paths.data_path}/timewarp/2AA-1-large/val/" + traj_pattern: "^(.*)-traj-arrays.npz" + pdb_pattern: "^(.*)-traj-state0.pdb" + subsample: 100 + max_datasets: 20 + num_workers: 16 + + test: + _target_: jamun.data.parse_datasets_from_directory + root: "${paths.data_path}/timewarp/2AA-1-large/test/" + traj_pattern: "^(.*)-traj-arrays.npz" + pdb_pattern: "^(.*)-traj-state0.pdb" + subsample: 100 + max_datasets: 20 + num_workers: 16 + +trainer: + val_check_interval: 50000 + check_val_every_n_epoch: null + max_epochs: 10 + num_sanity_val_steps: 0 + +logger: + wandb: + group: train_uncapped_2AA diff --git a/profiling/profile.sh b/profiling/profile.sh index 47d8c4e..a084d09 100755 --- a/profiling/profile.sh +++ b/profiling/profile.sh @@ -13,6 +13,7 @@ nsys profile \ -t cuda,nvtx,osrt,cudnn,cublas \ -s cpu \ -x true \ + --pytorch=autograd-nvtx \ -o nsys.profile \ --force-overwrite true \ --capture-range=cudaProfilerApi \ diff --git a/pyproject.toml b/pyproject.toml index dc388ab..ad92294 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -44,6 +44,13 @@ dependencies = [ jamun_train = "jamun.cmdline.train:main" jamun_sample = "jamun.cmdline.sample:main" +[project.optional-dependencies] +analysis = [ + "polars>=1.32.0", + "pyarrow>=21.0.0", + "seaborn>=0.13.2", +] + [build-system] requires = ["hatchling", "hatch-vcs"] build-backend = "hatchling.build" diff --git a/src/jamun/callbacks/sampler/_chemical_validity.py b/src/jamun/callbacks/sampler/_chemical_validity.py index dcc53ca..4ac9a27 100644 --- a/src/jamun/callbacks/sampler/_chemical_validity.py +++ b/src/jamun/callbacks/sampler/_chemical_validity.py @@ -19,7 +19,7 @@ def __init__( datasets=datasets, metric_fn=lambda dataset: ChemicalValidityMetrics(*args, dataset=dataset, **kwargs), ) - py_logger = logging.getLogger("jamun") + py_logger = logging.getLogger(__name__) py_logger.info( f"Initialized ChemicalValidityMetricsCallback with datasets of labels: {list(self.meters.keys())}." ) diff --git a/src/jamun/callbacks/sampler/_measure_sampling_time.py b/src/jamun/callbacks/sampler/_measure_sampling_time.py index 4d71f70..a56342d 100644 --- a/src/jamun/callbacks/sampler/_measure_sampling_time.py +++ b/src/jamun/callbacks/sampler/_measure_sampling_time.py @@ -73,7 +73,7 @@ def on_after_sample_batch(self, sample, fabric, batch_idx): fabric.log("sampler/avg_time_per_graph", self.total_sampling_time / self.total_num_graphs, step=batch_idx) # Log to console - py_logger = logging.getLogger("jamun") + py_logger = logging.getLogger(__name__) py_logger.info( f"Sampled batch {batch_idx} with {num_graphs} samples in {time_elapsed:.4f} seconds " f"({time_elapsed / num_graphs:.4f} seconds per sample)." @@ -102,7 +102,7 @@ def on_sample_end(self, fabric): fabric.log("sampler/std_batch_time", torch.std(torch.tensor(self.batch_times)).item()) # Log to console. - py_logger = logging.getLogger("jamun") + py_logger = logging.getLogger(__name__) py_logger.info( f"Total sampling time: {self.total_sampling_time:.4f} seconds " f"for {self.total_num_graphs} samples " diff --git a/src/jamun/callbacks/sampler/_posebusters.py b/src/jamun/callbacks/sampler/_posebusters.py index 9b0f7ae..b031c81 100644 --- a/src/jamun/callbacks/sampler/_posebusters.py +++ b/src/jamun/callbacks/sampler/_posebusters.py @@ -19,5 +19,5 @@ def __init__( datasets=datasets, metric_fn=lambda dataset: PoseBustersMetrics(*args, dataset=dataset, **kwargs), ) - py_logger = logging.getLogger("jamun") + py_logger = logging.getLogger(__name__) py_logger.info(f"Initialized PoseBustersCallback with datasets of labels: {list(self.meters.keys())}.") diff --git a/src/jamun/callbacks/sampler/_ramachandran.py b/src/jamun/callbacks/sampler/_ramachandran.py index 6bf2819..aa7f837 100644 --- a/src/jamun/callbacks/sampler/_ramachandran.py +++ b/src/jamun/callbacks/sampler/_ramachandran.py @@ -17,7 +17,7 @@ def __init__( datasets=datasets, metric_fn=lambda dataset: RamachandranPlotMetrics(dataset=dataset), ) - py_logger = logging.getLogger("jamun") + py_logger = logging.getLogger(__name__) py_logger.info( f"Initialized RamachandranPlotMetricsCallback with datasets of labels: {list(self.meters.keys())}." ) diff --git a/src/jamun/callbacks/sampler/_save_trajectory.py b/src/jamun/callbacks/sampler/_save_trajectory.py index 339370d..e5b677b 100644 --- a/src/jamun/callbacks/sampler/_save_trajectory.py +++ b/src/jamun/callbacks/sampler/_save_trajectory.py @@ -19,5 +19,5 @@ def __init__( datasets=datasets, metric_fn=lambda dataset: SaveTrajectory(*args, dataset=dataset, **kwargs), ) - py_logger = logging.getLogger("jamun") + py_logger = logging.getLogger(__name__) py_logger.info(f"Initialized SaveTrajectoryCallback with datasets of labels: {list(self.meters.keys())}.") diff --git a/src/jamun/callbacks/sampler/_score_distribution.py b/src/jamun/callbacks/sampler/_score_distribution.py index 9963489..638192f 100644 --- a/src/jamun/callbacks/sampler/_score_distribution.py +++ b/src/jamun/callbacks/sampler/_score_distribution.py @@ -19,5 +19,5 @@ def __init__( datasets=datasets, metric_fn=lambda dataset: ScoreDistributionMetrics(*args, dataset=dataset, **kwargs), ) - py_logger = logging.getLogger("jamun") + py_logger = logging.getLogger(__name__) py_logger.info(f"Initialized ScoreDistributionCallback with datasets of labels: {list(self.meters.keys())}.") diff --git a/src/jamun/callbacks/sampler/_trajectory_animation.py b/src/jamun/callbacks/sampler/_trajectory_animation.py index 1b99725..72c28b6 100644 --- a/src/jamun/callbacks/sampler/_trajectory_animation.py +++ b/src/jamun/callbacks/sampler/_trajectory_animation.py @@ -19,5 +19,5 @@ def __init__( datasets=datasets, metric_fn=lambda dataset: TrajectoryVisualizer(*args, dataset=dataset, **kwargs), ) - py_logger = logging.getLogger("jamun") + py_logger = logging.getLogger(__name__) py_logger.info(f"Initialized TrajectoryVisualizerCallback with datasets of labels: {list(self.meters.keys())}.") diff --git a/src/jamun/callbacks/sampler/_visualize_samples.py b/src/jamun/callbacks/sampler/_visualize_samples.py index 9352bad..97490db 100644 --- a/src/jamun/callbacks/sampler/_visualize_samples.py +++ b/src/jamun/callbacks/sampler/_visualize_samples.py @@ -19,5 +19,5 @@ def __init__( datasets=datasets, metric_fn=lambda dataset: SampleVisualizer(*args, dataset=dataset, **kwargs), ) - py_logger = logging.getLogger("jamun") + py_logger = logging.getLogger(__name__) py_logger.info(f"Initialized SampleVisualizerCallback with datasets of labels: {list(self.meters.keys())}.") diff --git a/src/jamun/cmdline/sample.py b/src/jamun/cmdline/sample.py index 62f9289..bd97000 100644 --- a/src/jamun/cmdline/sample.py +++ b/src/jamun/cmdline/sample.py @@ -1,3 +1,4 @@ +import logging import os import sys import traceback @@ -25,11 +26,13 @@ dotenv.load_dotenv(".env", verbose=True) OmegaConf.register_new_resolver("format", format_resolver) +py_logger = logging.getLogger("jamun") + def sample_loop( fabric, model, - batch_sampler, + sampler, num_batches: int, init_graphs: torch_geometric.data.Data, continue_chain: bool = False, @@ -38,7 +41,7 @@ def sample_loop( model_wrapped = jamun.utils.ModelSamplingWrapper( model=model, init_graphs=init_graphs, - sigma=batch_sampler.sigma, + sigma=sampler.sigma, ) y_init = model_wrapped.sample_initial_noisy_positions() @@ -57,7 +60,7 @@ def sample_loop( for batch_idx in iterable: fabric.call("on_before_sample_batch", fabric=fabric, batch_idx=batch_idx) - out = batch_sampler.sample(model=model_wrapped, y_init=y_init, v_init=v_init) + out = sampler.sample(model=model_wrapped, y_init=y_init, v_init=v_init) samples = model_wrapped.unbatch_samples(out) # Start next chain from the end state of the previous chain? @@ -90,6 +93,27 @@ def get_initial_graphs( def run(cfg): log_cfg = OmegaConf.to_container(cfg, throw_on_missing=True, resolve=True) + rank_zero_logging_level = cfg.get("rank_zero_logging_level", "INFO") + non_rank_zero_logging_level = cfg.get("non_rank_zero_logging_level", "ERROR") + + if rank_zero_only.rank == 0: + level = logging.getLevelNamesMapping()[rank_zero_logging_level] + else: + level = logging.getLevelNamesMapping()[non_rank_zero_logging_level] + + py_logger.setLevel(level) + + loggers = instantiate_dict_cfg(cfg.get("logger"), verbose=(rank_zero_only.rank == 0)) + wandb_logger = None + for logger in loggers: + if isinstance(logger, pl.loggers.WandbLogger): + wandb_logger = logger + + callbacks = instantiate_dict_cfg(cfg.get("callbacks"), verbose=(rank_zero_only.rank == 0)) + fabric = hydra.utils.instantiate(cfg.fabric, callbacks=callbacks, loggers=loggers) + + fabric.launch() + dist_log(f"{OmegaConf.to_yaml(log_cfg)}") dist_log(f"{os.getcwd()=}") dist_log(f"{torch.__config__.parallel_info()}") @@ -100,14 +124,8 @@ def run(cfg): dist_log(f"Setting float_32_matmul_precision to {matmul_prec}") torch.set_float32_matmul_precision(matmul_prec) - loggers = instantiate_dict_cfg(cfg.get("logger"), verbose=(rank_zero_only.rank == 0)) - wandb_logger = None - for logger in loggers: - if isinstance(logger, pl.loggers.WandbLogger): - wandb_logger = logger - if rank_zero_only.rank == 0 and wandb_logger: - dist_log(f"{wandb_logger.experiment.name=}") + py_logger.info(f"{wandb_logger.experiment.name=}") wandb_logger.experiment.config.update({"cfg": log_cfg, "version": jamun.__version__, "cwd": os.getcwd()}) # Load the checkpoint either given the wandb run path or the checkpoint path. @@ -128,14 +146,10 @@ def run(cfg): repeat=cfg.repeat_init_samples, ) - callbacks = instantiate_dict_cfg(cfg.get("callbacks"), verbose=(rank_zero_only.rank == 0)) - fabric = hydra.utils.instantiate(cfg.fabric, callbacks=callbacks, loggers=loggers) - - fabric.launch() fabric.setup(model) model.eval() - batch_sampler = hydra.utils.instantiate(cfg.batch_sampler) + sampler = hydra.utils.instantiate(cfg.sampler) if seed := cfg.get("seed"): # During sampling, we want ranks to generate different chains. @@ -172,7 +186,7 @@ def run(cfg): sample_loop( fabric=fabric, model=model, - batch_sampler=batch_sampler, + sampler=sampler, init_graphs=init_graphs, num_batches=cfg.num_batches, continue_chain=cfg.continue_chain, diff --git a/src/jamun/cmdline/train.py b/src/jamun/cmdline/train.py index 5ec8883..b5de96e 100644 --- a/src/jamun/cmdline/train.py +++ b/src/jamun/cmdline/train.py @@ -1,3 +1,4 @@ +import logging import os import pathlib import sys @@ -22,6 +23,8 @@ dotenv.load_dotenv(".env", verbose=True) OmegaConf.register_new_resolver("format", format_resolver) +py_logger = logging.getLogger("jamun") + def compute_average_squared_distance_from_config(cfg: OmegaConf) -> float: """Computes the average squared distance for normalization from the data.""" @@ -36,6 +39,16 @@ def compute_average_squared_distance_from_config(cfg: OmegaConf) -> float: def run(cfg): log_cfg = OmegaConf.to_container(cfg, throw_on_missing=True, resolve=True) + rank_zero_logging_level = cfg.get("rank_zero_logging_level", "INFO") + non_rank_zero_logging_level = cfg.get("non_rank_zero_logging_level", "ERROR") + + if rank_zero_only.rank == 0: + level = logging.getLevelNamesMapping()[rank_zero_logging_level] + else: + level = logging.getLevelNamesMapping()[non_rank_zero_logging_level] + + py_logger.setLevel(level) + dist_log(f"{OmegaConf.to_yaml(log_cfg)}") dist_log(f"{os.getcwd()=}") dist_log(f"{torch.__config__.parallel_info()}") diff --git a/src/jamun/data/_mdtraj.py b/src/jamun/data/_mdtraj.py index e0bbdf3..e14169c 100644 --- a/src/jamun/data/_mdtraj.py +++ b/src/jamun/data/_mdtraj.py @@ -104,7 +104,7 @@ def __init__( self.original_topology.atom(atom_indices[i]), self.original_topology.atom(atom_indices[i + 1]) ) - py_logger = logging.getLogger("jamun") + py_logger = logging.getLogger(__name__) py_logger.warning( f"Dataset {self.label()}: No bonds found in topology. Assuming a coarse-grained model and creating bonds between consecutive residues." ) @@ -250,7 +250,7 @@ def __init__( self.traj.topology.atom(atom_indices[i]), self.traj.topology.atom(atom_indices[i + 1]) ) - py_logger = logging.getLogger("jamun") + py_logger = logging.getLogger(__name__) py_logger.warning( f"Dataset {self.label()}: No bonds found in topology. Assuming a coarse-grained model and creating bonds between consecutive residues." ) diff --git a/src/jamun/data/_utils.py b/src/jamun/data/_utils.py index 0300bad..b82535a 100644 --- a/src/jamun/data/_utils.py +++ b/src/jamun/data/_utils.py @@ -46,6 +46,7 @@ def download_file(url: str, path: str, verbose: bool = False, block_size: int | pbar.update(len(data)) +# FIXME num_workers>0 breaks singleton cacheing of datasets def parse_datasets_from_directory( root: str, traj_pattern: str, diff --git a/src/jamun/hydra/utils.py b/src/jamun/hydra/utils.py index c0a7556..b23ae1f 100644 --- a/src/jamun/hydra/utils.py +++ b/src/jamun/hydra/utils.py @@ -18,7 +18,7 @@ def instantiate_dict_cfg(cfg: DictConfig | None, verbose: bool = False): raise TypeError("cfg must be a DictConfig") if verbose: - py_logger = logging.getLogger("jamun") + py_logger = logging.getLogger(__name__) for k, v in cfg.items(): if isinstance(v, DictConfig): diff --git a/src/jamun/hydra_config/sample.yaml b/src/jamun/hydra_config/sample.yaml index a34ea11..51e9b98 100644 --- a/src/jamun/hydra_config/sample.yaml +++ b/src/jamun/hydra_config/sample.yaml @@ -1,7 +1,7 @@ defaults: - _self_ - model: denoiser_pretrained - - batch_sampler: walkjump.yaml + - sampler: walkjump.yaml - logger: default - paths: default - hydra: default diff --git a/src/jamun/hydra_config/batch_sampler/diffusion.yaml b/src/jamun/hydra_config/sampler/diffusion.yaml similarity index 82% rename from src/jamun/hydra_config/batch_sampler/diffusion.yaml rename to src/jamun/hydra_config/sampler/diffusion.yaml index df7f82b..4da480a 100644 --- a/src/jamun/hydra_config/batch_sampler/diffusion.yaml +++ b/src/jamun/hydra_config/sampler/diffusion.yaml @@ -6,11 +6,12 @@ _target_: jamun.sampling.DiffusionSampler sigma_min: 0.01 sigma_max: 1.0 rho: 7.0 -N: 64 +num_steps: 64 y_init_distribution: null verbose: true S_churn: 0.0 S_min: 0.0 S_max: 100.0 S_noise: 1.0 +use_second_order_correction: true save_trajectory: true diff --git a/src/jamun/hydra_config/batch_sampler/mcmc/aboba.yaml b/src/jamun/hydra_config/sampler/mcmc/aboba.yaml similarity index 100% rename from src/jamun/hydra_config/batch_sampler/mcmc/aboba.yaml rename to src/jamun/hydra_config/sampler/mcmc/aboba.yaml diff --git a/src/jamun/hydra_config/batch_sampler/mcmc/baoab.yaml b/src/jamun/hydra_config/sampler/mcmc/baoab.yaml similarity index 100% rename from src/jamun/hydra_config/batch_sampler/mcmc/baoab.yaml rename to src/jamun/hydra_config/sampler/mcmc/baoab.yaml diff --git a/src/jamun/hydra_config/batch_sampler/walkjump.yaml b/src/jamun/hydra_config/sampler/walkjump.yaml similarity index 100% rename from src/jamun/hydra_config/batch_sampler/walkjump.yaml rename to src/jamun/hydra_config/sampler/walkjump.yaml diff --git a/src/jamun/metrics/_posebusters.py b/src/jamun/metrics/_posebusters.py index 4925c81..5929741 100644 --- a/src/jamun/metrics/_posebusters.py +++ b/src/jamun/metrics/_posebusters.py @@ -41,7 +41,7 @@ def on_sample_start(self): metrics = {} if df is None: - py_logger = logging.getLogger("posebusters") + py_logger = logging.getLogger(__name__) py_logger.info(f"{self.dataset.label()}/PoseBusters found no molecules in the trajectory.") return metrics @@ -66,7 +66,7 @@ def compute(self) -> dict[str, float]: subsampling_factor = max(len(pred_trajectory) // self.num_molecules_per_trajectory, 1) df = run_posebusters_on_trajectory(pred_trajectory[::subsampling_factor]) if df is None: - py_logger = logging.getLogger("posebusters") + py_logger = logging.getLogger(__name__) py_logger.info("PoseBusters found no molecules in the trajectory.") else: mean_fail_rates = 1 - df.mean() diff --git a/src/jamun/metrics/_ramachandran.py b/src/jamun/metrics/_ramachandran.py index 44542c1..a24ebeb 100644 --- a/src/jamun/metrics/_ramachandran.py +++ b/src/jamun/metrics/_ramachandran.py @@ -253,7 +253,7 @@ def __init__(self, show_animation: bool = False, *args, **kwargs): def on_sample_start(self): # Hide the matplotlib logging. plt.set_loglevel("warning") - py_logger = logging.getLogger("jamun") + py_logger = logging.getLogger(__name__) # Plot the Ramachandran for the true trajectory. true_trajectory = self.dataset.trajectory @@ -285,7 +285,7 @@ def on_sample_start(self): def compute(self) -> dict[str, float]: # Hide the matplotlib logging. plt.set_loglevel("warning") - py_logger = logging.getLogger("jamun") + py_logger = logging.getLogger(__name__) # Convert the samples to trajectories. pred_trajectories = self.sample_trajectories(new=True) diff --git a/src/jamun/metrics/_save_trajectory.py b/src/jamun/metrics/_save_trajectory.py index 5e600ce..377e68f 100644 --- a/src/jamun/metrics/_save_trajectory.py +++ b/src/jamun/metrics/_save_trajectory.py @@ -92,7 +92,7 @@ def compute(self) -> dict[str, float]: # utils.save_pdb(pred_trajectory_joined, self.filename_pred("joined", "pdb")) pred_trajectory_joined.save_dcd(self.filename_pred("joined", "dcd")) - py_logger = logging.getLogger("jamun") + py_logger = logging.getLogger(__name__) py_logger.info(f"{self.dataset.label()}: Saved predicted samples to {os.path.abspath(self.output_dir)}") return {} diff --git a/src/jamun/metrics/_utils.py b/src/jamun/metrics/_utils.py index 434c0e8..39b2373 100644 --- a/src/jamun/metrics/_utils.py +++ b/src/jamun/metrics/_utils.py @@ -103,7 +103,7 @@ def sample_trajectories(self, *, new: bool) -> list[md.Trajectory]: def joined_sample_trajectory(self) -> md.Trajectory: """Convert the samples to a single MD trajectory.""" - py_logger = logging.getLogger("jamun") + py_logger = logging.getLogger(__name__) trajectories = utils.coordinates_to_trajectories(self.sample_tensors(new=False), self.dataset.topology) py_logger.info(f"{self.dataset.label()}: Joining {len(trajectories)} trajectories into 1.") diff --git a/src/jamun/model/denoiser.py b/src/jamun/model/denoiser.py index 25f7df1..314c522 100644 --- a/src/jamun/model/denoiser.py +++ b/src/jamun/model/denoiser.py @@ -46,7 +46,7 @@ def __init__( self.g = torch.compile(self.g, **torch_compile_kwargs) - py_logger = logging.getLogger("jamun") + py_logger = logging.getLogger(__name__) py_logger.info(self.g) self.optim_factory = optim @@ -311,8 +311,11 @@ def training_step(self, data: torch_geometric.data.Batch, data_idx: int): with torch.cuda.nvtx.range("sample_sigma"): sigma = self.sigma_distribution.sample().to(self.device) - topology = data.clone() - del topology.pos, topology.batch, topology.num_graphs + with torch.cuda.nvtx.range("clone_data"): + topology = data.clone() + + with torch.cuda.nvtx.range("clear_topology"): + del topology.pos, topology.batch, topology.num_graphs x, batch, num_graphs = data.pos, data.batch, data.num_graphs if self.rotational_augmentation: diff --git a/src/jamun/model/energy.py b/src/jamun/model/energy.py index ffbb84d..1e24b4f 100644 --- a/src/jamun/model/energy.py +++ b/src/jamun/model/energy.py @@ -99,7 +99,7 @@ def __init__( self.use_torch_compile = use_torch_compile self.torch_compile_kwargs = torch_compile_kwargs or {} - py_logger = logging.getLogger("jamun") + py_logger = logging.getLogger(__name__) py_logger.info(self.g) self.optim_factory = optim diff --git a/src/jamun/utils/checkpoint.py b/src/jamun/utils/checkpoint.py index 4b9d601..ef984fe 100644 --- a/src/jamun/utils/checkpoint.py +++ b/src/jamun/utils/checkpoint.py @@ -14,7 +14,7 @@ def get_wandb_run_cwd(wandb_run_path: str) -> str: def get_wandb_run_config(wandb_run_path: str) -> dict[str, Any]: """Get the wandb run config.""" run = wandb.Api().run(wandb_run_path) - py_logger = logging.getLogger("jamun") + py_logger = logging.getLogger(__name__) py_logger.info(f"Loading checkpoint corresponding to wandb run {run.name} at {run.url}") return run.config["cfg"] @@ -67,7 +67,7 @@ def find_checkpoint_in_directory(checkpoint_dir: str, checkpoint_type: str) -> s def find_checkpoint(wandb_train_run_path: str = None, checkpoint_dir: str = None, checkpoint_type: str = None) -> str: """Find the checkpoint based on the wandb run path or the checkpoint directory.""" - py_logger = logging.getLogger("jamun") + py_logger = logging.getLogger(__name__) if (wandb_train_run_path and checkpoint_dir) or (not wandb_train_run_path and not checkpoint_dir): raise ValueError( "Exactly one of wandb_train_run_path or checkpoint_dir must be provided." diff --git a/src/jamun/utils/dist_log.py b/src/jamun/utils/dist_log.py index 55e9f51..992b8a3 100644 --- a/src/jamun/utils/dist_log.py +++ b/src/jamun/utils/dist_log.py @@ -16,7 +16,7 @@ def dist_log(msg: str, logger: logging.Logger = None) -> None: """Helper for distributed logging.""" if logger is None: - logger = logging.getLogger("jamun") + logger = logging.getLogger(__name__) if torch.distributed.is_initialized(): world_size = torch.distributed.get_world_size() diff --git a/src/jamun/utils/rdkit.py b/src/jamun/utils/rdkit.py index d46a7dc..61dc9e0 100644 --- a/src/jamun/utils/rdkit.py +++ b/src/jamun/utils/rdkit.py @@ -21,7 +21,7 @@ def to_rdkit_mols(traj: md.Trajectory) -> list[Chem.Mol]: traj_mol = Chem.MolFromPDBFile(temp_pdb, removeHs=False, sanitize=False) if traj_mol is None: - py_logger = logging.getLogger("jamun") + py_logger = logging.getLogger(__name__) py_logger.warning("Could not convert the trajectory to RDKit mols.") return [] diff --git a/src/jamun/utils/sampling_wrapper.py b/src/jamun/utils/sampling_wrapper.py index 70ecbce..8ee42f9 100644 --- a/src/jamun/utils/sampling_wrapper.py +++ b/src/jamun/utils/sampling_wrapper.py @@ -61,7 +61,7 @@ def unbatch_samples(self, samples: dict[str, torch.Tensor]) -> list[torch_geomet for key, value in samples.items(): if value.ndim not in [2, 3]: - # py_logger = logging.getLogger("jamun") + # py_logger = logging.getLogger(__name__) # py_logger.info(f"Skipping unbatching of key {key} with shape {value.shape} as it is not 2D or 3D.") continue diff --git a/uv.lock b/uv.lock index 61f7351..6fde9d7 100644 --- a/uv.lock +++ b/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 2 +revision = 3 requires-python = ">=3.10" resolution-markers = [ "python_full_version >= '3.13' and sys_platform == 'linux'", @@ -646,17 +646,18 @@ wheels = [ [[package]] name = "e3tools" -version = "0.1.2" +version = "0.1.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "e3nn" }, + { name = "einops" }, { name = "jaxtyping" }, { name = "setuptools" }, { name = "torch" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/be/c5/05873faa48ccf3b29eb36caaa57dc184a217e37e41baa10130f46b0194f3/e3tools-0.1.2.tar.gz", hash = "sha256:a53354e148aeb3c29ac2a96cecba4ea64d1a3c33c3f756b82871873de9085bad", size = 86812, upload-time = "2025-07-24T23:11:31.292Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1a/03/d3560619bc9c7d7bb1548a260087201a217400f125f6b8c68e5c5532be3e/e3tools-0.1.3.tar.gz", hash = "sha256:a49d919b6f754767ca3c09eaa6a6e1c12fbddace156572b878bbe40ad70ceaa8", size = 106214, upload-time = "2025-08-04T23:29:52.313Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2d/50/1b4eff9273a21090a86e9168cfd59a8b32508418007aef045e03724bfc3a/e3tools-0.1.2-py3-none-any.whl", hash = "sha256:6bb8d7e87bb786fcbb5eccc3747d339fca7a0ead89d35074fec4b9864400480d", size = 21534, upload-time = "2025-07-24T23:11:30.181Z" }, + { url = "https://files.pythonhosted.org/packages/78/c2/76b4ea3bb1426a13d769bf1ae5a13766e8398116f9f158ee5298970c6664/e3tools-0.1.3-py3-none-any.whl", hash = "sha256:39fd064c42f5fe2edd5b15955b5cc514bf64863a5178dd6c041e73423dd03239", size = 21918, upload-time = "2025-08-04T23:29:51.107Z" }, ] [[package]] @@ -1177,6 +1178,13 @@ dependencies = [ { name = "wandb" }, ] +[package.optional-dependencies] +analysis = [ + { name = "polars" }, + { name = "pyarrow" }, + { name = "seaborn" }, +] + [package.dev-dependencies] dev = [ { name = "ipykernel" }, @@ -1205,14 +1213,17 @@ requires-dist = [ { name = "orb-models", specifier = ">=0.5.4" }, { name = "pandas", specifier = ">=2.1.0" }, { name = "plotly", specifier = ">=5.24.1" }, + { name = "polars", marker = "extra == 'analysis'", specifier = ">=1.32.0" }, { name = "posebusters", specifier = ">=0.3.1" }, { name = "pot", specifier = ">=0.9.5" }, { name = "py3dmol", specifier = ">=2.4.2" }, + { name = "pyarrow", marker = "extra == 'analysis'", specifier = ">=21.0.0" }, { name = "python-dotenv", specifier = ">=1.0.1" }, { name = "rdkit", specifier = ">=2024.3.6" }, { name = "requests", specifier = ">=2.32.3" }, { name = "s3fs", extras = ["boto3"], specifier = ">=2024.10.0" }, { name = "scipy", specifier = ">=1.13.1" }, + { name = "seaborn", marker = "extra == 'analysis'", specifier = ">=0.13.2" }, { name = "statsmodels", specifier = ">=0.14.0" }, { name = "tabulate", specifier = ">=0.9.0" }, { name = "torch", specifier = ">=2.5.1" }, @@ -1223,6 +1234,7 @@ requires-dist = [ { name = "universal-pathlib", specifier = ">=0.2.6" }, { name = "wandb", specifier = ">=0.19.1" }, ] +provides-extras = ["analysis"] [package.metadata.requires-dev] dev = [ @@ -2296,6 +2308,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556, upload-time = "2024-04-20T21:34:40.434Z" }, ] +[[package]] +name = "polars" +version = "1.32.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/67/23/6a5f151981f3ac409bed6dc48a3eaecd0592a03eb382693d4c7e749eda8b/polars-1.32.0.tar.gz", hash = "sha256:b01045981c0f23eeccfbfc870b782f93e73b74b29212fdfc8aae0be9024bc1fb", size = 4761045, upload-time = "2025-08-01T01:43:22.413Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e2/40/5b27067d10b5a77ab4094932118e16629ffb20ea9ae5f7d1178e04087891/polars-1.32.0-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:94f7c6a3b30bc99bc6b682ea42bb1ae983e33a302ca21aacbac50ae19e34fcf2", size = 37479518, upload-time = "2025-08-01T01:42:18.603Z" }, + { url = "https://files.pythonhosted.org/packages/08/b7/ca28ac10d340fb91bffb2751efd52aebc9799ae161b867214c6299c8f75b/polars-1.32.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:8bf14c16164839e62c741a863942a94a9a463db21e797452fca996c8afaf8827", size = 34214196, upload-time = "2025-08-01T01:42:22.667Z" }, + { url = "https://files.pythonhosted.org/packages/61/97/fe3797e8e1d4f9eadab32ffe218a841b8874585b6c9bd0f1a26469fb2992/polars-1.32.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f4c15adb97d44766d30c759f5cebbdb64d361e8349ef10b5afc7413f71bf4b72", size = 37985353, upload-time = "2025-08-01T01:42:26.033Z" }, + { url = "https://files.pythonhosted.org/packages/a0/7e/2baa2858556e970cc6a35c0d8ad34b2f9d982f1766c0a1fec20ca529a947/polars-1.32.0-cp39-abi3-manylinux_2_24_aarch64.whl", hash = "sha256:13af55890734f89b76016a395fb2e7460e7d9feecf50ed2f55cf0f05a1c0c991", size = 35183912, upload-time = "2025-08-01T01:42:30.446Z" }, + { url = "https://files.pythonhosted.org/packages/ef/41/0e6821dccc5871186a9b95af3990404aa283318263918d33ac974b35cb37/polars-1.32.0-cp39-abi3-win_amd64.whl", hash = "sha256:0397fc2501a5d5f1bb3fe8d27e0c26c7a5349b4110157c0fb7833cd3f5921c9e", size = 37747905, upload-time = "2025-08-01T01:42:33.975Z" }, + { url = "https://files.pythonhosted.org/packages/c2/93/d06df0817da93f922a67e27e9e0f407856991374daa62687e2a45a18935c/polars-1.32.0-cp39-abi3-win_arm64.whl", hash = "sha256:dd84e24422509e1ec9be46f67f758d0bd9944d1ae4eacecee4f53adaa8ecd822", size = 33978543, upload-time = "2025-08-01T01:42:36.779Z" }, +] + [[package]] name = "posebusters" version = "0.3.6" @@ -2529,6 +2555,49 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/04/20/923885064f4e4d4392eb2be798532d91b315f9e60ef44f49f4800ba3c57a/py3Dmol-2.4.2-py2.py3-none-any.whl", hash = "sha256:bec23d9a015d692279a5f7d4db92803e4e82ba3bdcc1434a5b6a2be98a347856", size = 7046, upload-time = "2024-11-08T22:19:21.631Z" }, ] +[[package]] +name = "pyarrow" +version = "21.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ef/c2/ea068b8f00905c06329a3dfcd40d0fcc2b7d0f2e355bdb25b65e0a0e4cd4/pyarrow-21.0.0.tar.gz", hash = "sha256:5051f2dccf0e283ff56335760cbc8622cf52264d67e359d5569541ac11b6d5bc", size = 1133487, upload-time = "2025-07-18T00:57:31.761Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/17/d9/110de31880016e2afc52d8580b397dbe47615defbf09ca8cf55f56c62165/pyarrow-21.0.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:e563271e2c5ff4d4a4cbeb2c83d5cf0d4938b891518e676025f7268c6fe5fe26", size = 31196837, upload-time = "2025-07-18T00:54:34.755Z" }, + { url = "https://files.pythonhosted.org/packages/df/5f/c1c1997613abf24fceb087e79432d24c19bc6f7259cab57c2c8e5e545fab/pyarrow-21.0.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:fee33b0ca46f4c85443d6c450357101e47d53e6c3f008d658c27a2d020d44c79", size = 32659470, upload-time = "2025-07-18T00:54:38.329Z" }, + { url = "https://files.pythonhosted.org/packages/3e/ed/b1589a777816ee33ba123ba1e4f8f02243a844fed0deec97bde9fb21a5cf/pyarrow-21.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:7be45519b830f7c24b21d630a31d48bcebfd5d4d7f9d3bdb49da9cdf6d764edb", size = 41055619, upload-time = "2025-07-18T00:54:42.172Z" }, + { url = "https://files.pythonhosted.org/packages/44/28/b6672962639e85dc0ac36f71ab3a8f5f38e01b51343d7aa372a6b56fa3f3/pyarrow-21.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:26bfd95f6bff443ceae63c65dc7e048670b7e98bc892210acba7e4995d3d4b51", size = 42733488, upload-time = "2025-07-18T00:54:47.132Z" }, + { url = "https://files.pythonhosted.org/packages/f8/cc/de02c3614874b9089c94eac093f90ca5dfa6d5afe45de3ba847fd950fdf1/pyarrow-21.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:bd04ec08f7f8bd113c55868bd3fc442a9db67c27af098c5f814a3091e71cc61a", size = 43329159, upload-time = "2025-07-18T00:54:51.686Z" }, + { url = "https://files.pythonhosted.org/packages/a6/3e/99473332ac40278f196e105ce30b79ab8affab12f6194802f2593d6b0be2/pyarrow-21.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9b0b14b49ac10654332a805aedfc0147fb3469cbf8ea951b3d040dab12372594", size = 45050567, upload-time = "2025-07-18T00:54:56.679Z" }, + { url = "https://files.pythonhosted.org/packages/7b/f5/c372ef60593d713e8bfbb7e0c743501605f0ad00719146dc075faf11172b/pyarrow-21.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:9d9f8bcb4c3be7738add259738abdeddc363de1b80e3310e04067aa1ca596634", size = 26217959, upload-time = "2025-07-18T00:55:00.482Z" }, + { url = "https://files.pythonhosted.org/packages/94/dc/80564a3071a57c20b7c32575e4a0120e8a330ef487c319b122942d665960/pyarrow-21.0.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:c077f48aab61738c237802836fc3844f85409a46015635198761b0d6a688f87b", size = 31243234, upload-time = "2025-07-18T00:55:03.812Z" }, + { url = "https://files.pythonhosted.org/packages/ea/cc/3b51cb2db26fe535d14f74cab4c79b191ed9a8cd4cbba45e2379b5ca2746/pyarrow-21.0.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:689f448066781856237eca8d1975b98cace19b8dd2ab6145bf49475478bcaa10", size = 32714370, upload-time = "2025-07-18T00:55:07.495Z" }, + { url = "https://files.pythonhosted.org/packages/24/11/a4431f36d5ad7d83b87146f515c063e4d07ef0b7240876ddb885e6b44f2e/pyarrow-21.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:479ee41399fcddc46159a551705b89c05f11e8b8cb8e968f7fec64f62d91985e", size = 41135424, upload-time = "2025-07-18T00:55:11.461Z" }, + { url = "https://files.pythonhosted.org/packages/74/dc/035d54638fc5d2971cbf1e987ccd45f1091c83bcf747281cf6cc25e72c88/pyarrow-21.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:40ebfcb54a4f11bcde86bc586cbd0272bac0d516cfa539c799c2453768477569", size = 42823810, upload-time = "2025-07-18T00:55:16.301Z" }, + { url = "https://files.pythonhosted.org/packages/2e/3b/89fced102448a9e3e0d4dded1f37fa3ce4700f02cdb8665457fcc8015f5b/pyarrow-21.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8d58d8497814274d3d20214fbb24abcad2f7e351474357d552a8d53bce70c70e", size = 43391538, upload-time = "2025-07-18T00:55:23.82Z" }, + { url = "https://files.pythonhosted.org/packages/fb/bb/ea7f1bd08978d39debd3b23611c293f64a642557e8141c80635d501e6d53/pyarrow-21.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:585e7224f21124dd57836b1530ac8f2df2afc43c861d7bf3d58a4870c42ae36c", size = 45120056, upload-time = "2025-07-18T00:55:28.231Z" }, + { url = "https://files.pythonhosted.org/packages/6e/0b/77ea0600009842b30ceebc3337639a7380cd946061b620ac1a2f3cb541e2/pyarrow-21.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:555ca6935b2cbca2c0e932bedd853e9bc523098c39636de9ad4693b5b1df86d6", size = 26220568, upload-time = "2025-07-18T00:55:32.122Z" }, + { url = "https://files.pythonhosted.org/packages/ca/d4/d4f817b21aacc30195cf6a46ba041dd1be827efa4a623cc8bf39a1c2a0c0/pyarrow-21.0.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:3a302f0e0963db37e0a24a70c56cf91a4faa0bca51c23812279ca2e23481fccd", size = 31160305, upload-time = "2025-07-18T00:55:35.373Z" }, + { url = "https://files.pythonhosted.org/packages/a2/9c/dcd38ce6e4b4d9a19e1d36914cb8e2b1da4e6003dd075474c4cfcdfe0601/pyarrow-21.0.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:b6b27cf01e243871390474a211a7922bfbe3bda21e39bc9160daf0da3fe48876", size = 32684264, upload-time = "2025-07-18T00:55:39.303Z" }, + { url = "https://files.pythonhosted.org/packages/4f/74/2a2d9f8d7a59b639523454bec12dba35ae3d0a07d8ab529dc0809f74b23c/pyarrow-21.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:e72a8ec6b868e258a2cd2672d91f2860ad532d590ce94cdf7d5e7ec674ccf03d", size = 41108099, upload-time = "2025-07-18T00:55:42.889Z" }, + { url = "https://files.pythonhosted.org/packages/ad/90/2660332eeb31303c13b653ea566a9918484b6e4d6b9d2d46879a33ab0622/pyarrow-21.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b7ae0bbdc8c6674259b25bef5d2a1d6af5d39d7200c819cf99e07f7dfef1c51e", size = 42829529, upload-time = "2025-07-18T00:55:47.069Z" }, + { url = "https://files.pythonhosted.org/packages/33/27/1a93a25c92717f6aa0fca06eb4700860577d016cd3ae51aad0e0488ac899/pyarrow-21.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:58c30a1729f82d201627c173d91bd431db88ea74dcaa3885855bc6203e433b82", size = 43367883, upload-time = "2025-07-18T00:55:53.069Z" }, + { url = "https://files.pythonhosted.org/packages/05/d9/4d09d919f35d599bc05c6950095e358c3e15148ead26292dfca1fb659b0c/pyarrow-21.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:072116f65604b822a7f22945a7a6e581cfa28e3454fdcc6939d4ff6090126623", size = 45133802, upload-time = "2025-07-18T00:55:57.714Z" }, + { url = "https://files.pythonhosted.org/packages/71/30/f3795b6e192c3ab881325ffe172e526499eb3780e306a15103a2764916a2/pyarrow-21.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:cf56ec8b0a5c8c9d7021d6fd754e688104f9ebebf1bf4449613c9531f5346a18", size = 26203175, upload-time = "2025-07-18T00:56:01.364Z" }, + { url = "https://files.pythonhosted.org/packages/16/ca/c7eaa8e62db8fb37ce942b1ea0c6d7abfe3786ca193957afa25e71b81b66/pyarrow-21.0.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:e99310a4ebd4479bcd1964dff9e14af33746300cb014aa4a3781738ac63baf4a", size = 31154306, upload-time = "2025-07-18T00:56:04.42Z" }, + { url = "https://files.pythonhosted.org/packages/ce/e8/e87d9e3b2489302b3a1aea709aaca4b781c5252fcb812a17ab6275a9a484/pyarrow-21.0.0-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:d2fe8e7f3ce329a71b7ddd7498b3cfac0eeb200c2789bd840234f0dc271a8efe", size = 32680622, upload-time = "2025-07-18T00:56:07.505Z" }, + { url = "https://files.pythonhosted.org/packages/84/52/79095d73a742aa0aba370c7942b1b655f598069489ab387fe47261a849e1/pyarrow-21.0.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:f522e5709379d72fb3da7785aa489ff0bb87448a9dc5a75f45763a795a089ebd", size = 41104094, upload-time = "2025-07-18T00:56:10.994Z" }, + { url = "https://files.pythonhosted.org/packages/89/4b/7782438b551dbb0468892a276b8c789b8bbdb25ea5c5eb27faadd753e037/pyarrow-21.0.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:69cbbdf0631396e9925e048cfa5bce4e8c3d3b41562bbd70c685a8eb53a91e61", size = 42825576, upload-time = "2025-07-18T00:56:15.569Z" }, + { url = "https://files.pythonhosted.org/packages/b3/62/0f29de6e0a1e33518dec92c65be0351d32d7ca351e51ec5f4f837a9aab91/pyarrow-21.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:731c7022587006b755d0bdb27626a1a3bb004bb56b11fb30d98b6c1b4718579d", size = 43368342, upload-time = "2025-07-18T00:56:19.531Z" }, + { url = "https://files.pythonhosted.org/packages/90/c7/0fa1f3f29cf75f339768cc698c8ad4ddd2481c1742e9741459911c9ac477/pyarrow-21.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:dc56bc708f2d8ac71bd1dcb927e458c93cec10b98eb4120206a4091db7b67b99", size = 45131218, upload-time = "2025-07-18T00:56:23.347Z" }, + { url = "https://files.pythonhosted.org/packages/01/63/581f2076465e67b23bc5a37d4a2abff8362d389d29d8105832e82c9c811c/pyarrow-21.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:186aa00bca62139f75b7de8420f745f2af12941595bbbfa7ed3870ff63e25636", size = 26087551, upload-time = "2025-07-18T00:56:26.758Z" }, + { url = "https://files.pythonhosted.org/packages/c9/ab/357d0d9648bb8241ee7348e564f2479d206ebe6e1c47ac5027c2e31ecd39/pyarrow-21.0.0-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:a7a102574faa3f421141a64c10216e078df467ab9576684d5cd696952546e2da", size = 31290064, upload-time = "2025-07-18T00:56:30.214Z" }, + { url = "https://files.pythonhosted.org/packages/3f/8a/5685d62a990e4cac2043fc76b4661bf38d06efed55cf45a334b455bd2759/pyarrow-21.0.0-cp313-cp313t-macosx_12_0_x86_64.whl", hash = "sha256:1e005378c4a2c6db3ada3ad4c217b381f6c886f0a80d6a316fe586b90f77efd7", size = 32727837, upload-time = "2025-07-18T00:56:33.935Z" }, + { url = "https://files.pythonhosted.org/packages/fc/de/c0828ee09525c2bafefd3e736a248ebe764d07d0fd762d4f0929dbc516c9/pyarrow-21.0.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:65f8e85f79031449ec8706b74504a316805217b35b6099155dd7e227eef0d4b6", size = 41014158, upload-time = "2025-07-18T00:56:37.528Z" }, + { url = "https://files.pythonhosted.org/packages/6e/26/a2865c420c50b7a3748320b614f3484bfcde8347b2639b2b903b21ce6a72/pyarrow-21.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:3a81486adc665c7eb1a2bde0224cfca6ceaba344a82a971ef059678417880eb8", size = 42667885, upload-time = "2025-07-18T00:56:41.483Z" }, + { url = "https://files.pythonhosted.org/packages/0a/f9/4ee798dc902533159250fb4321267730bc0a107d8c6889e07c3add4fe3a5/pyarrow-21.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:fc0d2f88b81dcf3ccf9a6ae17f89183762c8a94a5bdcfa09e05cfe413acf0503", size = 43276625, upload-time = "2025-07-18T00:56:48.002Z" }, + { url = "https://files.pythonhosted.org/packages/5a/da/e02544d6997037a4b0d22d8e5f66bc9315c3671371a8b18c79ade1cefe14/pyarrow-21.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6299449adf89df38537837487a4f8d3bd91ec94354fdd2a7d30bc11c48ef6e79", size = 44951890, upload-time = "2025-07-18T00:56:52.568Z" }, + { url = "https://files.pythonhosted.org/packages/e5/4e/519c1bc1876625fe6b71e9a28287c43ec2f20f73c658b9ae1d485c0c206e/pyarrow-21.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:222c39e2c70113543982c6b34f3077962b44fca38c0bd9e68bb6781534425c10", size = 26371006, upload-time = "2025-07-18T00:56:56.379Z" }, +] + [[package]] name = "pyasn1" version = "0.6.1" @@ -3151,6 +3220,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/0a/c8/b3f566db71461cabd4b2d5b39bcc24a7e1c119535c8361f81426be39bb47/scipy-1.15.2-cp313-cp313t-win_amd64.whl", hash = "sha256:fe8a9eb875d430d81755472c5ba75e84acc980e4a8f6204d402849234d3017db", size = 40477705, upload-time = "2025-02-17T00:34:43.619Z" }, ] +[[package]] +name = "seaborn" +version = "0.13.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "matplotlib" }, + { name = "numpy" }, + { name = "pandas" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/86/59/a451d7420a77ab0b98f7affa3a1d78a313d2f7281a57afb1a34bae8ab412/seaborn-0.13.2.tar.gz", hash = "sha256:93e60a40988f4d65e9f4885df477e2fdaff6b73a9ded434c1ab356dd57eefff7", size = 1457696, upload-time = "2024-01-25T13:21:52.551Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/83/11/00d3c3dfc25ad54e731d91449895a79e4bf2384dc3ac01809010ba88f6d5/seaborn-0.13.2-py3-none-any.whl", hash = "sha256:636f8336facf092165e27924f223d3c62ca560b1f2bb5dff7ab7fad265361987", size = 294914, upload-time = "2024-01-25T13:21:49.598Z" }, +] + [[package]] name = "sentry-sdk" version = "2.24.0"