From bcc0b439f616b13a8629cb64d8bf0f88fc9083a8 Mon Sep 17 00:00:00 2001 From: Harim Kang Date: Tue, 5 Nov 2024 19:29:23 +0900 Subject: [PATCH] =?UTF-8?q?=F0=9F=90=9EReplace=20package=5Favailable=20wit?= =?UTF-8?q?h=20module=5Favailable=20(#2407)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/anomalib/cli/pipelines.py | 4 ++-- src/anomalib/cli/utils/openvino.py | 4 ++-- src/anomalib/deploy/inferencers/openvino_inferencer.py | 4 ++-- src/anomalib/loggers/wandb.py | 4 ++-- src/anomalib/models/components/base/export_mixin.py | 6 +++--- src/anomalib/models/image/vlm_ad/backends/chat_gpt.py | 4 ++-- src/anomalib/models/image/vlm_ad/backends/huggingface.py | 4 ++-- src/anomalib/models/image/vlm_ad/backends/ollama.py | 4 ++-- src/anomalib/utils/exceptions/imports.py | 2 +- 9 files changed, 18 insertions(+), 18 deletions(-) diff --git a/src/anomalib/cli/pipelines.py b/src/anomalib/cli/pipelines.py index 8cfb04fd2e..ba6030491b 100644 --- a/src/anomalib/cli/pipelines.py +++ b/src/anomalib/cli/pipelines.py @@ -6,13 +6,13 @@ import logging from jsonargparse import Namespace -from lightning_utilities.core.imports import package_available +from lightning_utilities.core.imports import module_available from anomalib.cli.utils.help_formatter import get_short_docstring logger = logging.getLogger(__name__) -if package_available("anomalib.pipelines"): +if module_available("anomalib.pipelines"): from anomalib.pipelines import Benchmark from anomalib.pipelines.components.base import Pipeline diff --git a/src/anomalib/cli/utils/openvino.py b/src/anomalib/cli/utils/openvino.py index ee54bf09b2..50a894c304 100644 --- a/src/anomalib/cli/utils/openvino.py +++ b/src/anomalib/cli/utils/openvino.py @@ -6,12 +6,12 @@ import logging from jsonargparse import ArgumentParser -from lightning_utilities.core.imports import package_available +from lightning_utilities.core.imports import module_available logger = logging.getLogger(__name__) -if package_available("openvino"): +if module_available("openvino"): from openvino.tools.ovc.cli_parser import get_common_cli_parser else: get_common_cli_parser = None diff --git a/src/anomalib/deploy/inferencers/openvino_inferencer.py b/src/anomalib/deploy/inferencers/openvino_inferencer.py index 8dea77b92e..b85df0536c 100644 --- a/src/anomalib/deploy/inferencers/openvino_inferencer.py +++ b/src/anomalib/deploy/inferencers/openvino_inferencer.py @@ -9,7 +9,7 @@ import cv2 import numpy as np -from lightning_utilities.core.imports import package_available +from lightning_utilities.core.imports import module_available from omegaconf import DictConfig from PIL import Image @@ -94,7 +94,7 @@ def __init__( task: str | None = None, config: dict | None = None, ) -> None: - if not package_available("openvino"): + if not module_available("openvino"): msg = "OpenVINO is not installed. Please install OpenVINO to use OpenVINOInferencer." raise ImportError(msg) diff --git a/src/anomalib/loggers/wandb.py b/src/anomalib/loggers/wandb.py index 55e65e6d54..ff41a0949e 100644 --- a/src/anomalib/loggers/wandb.py +++ b/src/anomalib/loggers/wandb.py @@ -9,12 +9,12 @@ from lightning.fabric.utilities.types import _PATH from lightning.pytorch.loggers.wandb import WandbLogger from lightning.pytorch.utilities import rank_zero_only -from lightning_utilities.core.imports import package_available +from lightning_utilities.core.imports import module_available from matplotlib.figure import Figure from .base import ImageLoggerBase -if package_available("wandb"): +if module_available("wandb"): import wandb if TYPE_CHECKING: diff --git a/src/anomalib/models/components/base/export_mixin.py b/src/anomalib/models/components/base/export_mixin.py index d11b50ff99..327cb87e02 100644 --- a/src/anomalib/models/components/base/export_mixin.py +++ b/src/anomalib/models/components/base/export_mixin.py @@ -12,7 +12,7 @@ import numpy as np import torch -from lightning_utilities.core.imports import package_available +from lightning_utilities.core.imports import module_available from torch import nn from torchmetrics import Metric from torchvision.transforms.v2 import Transform @@ -245,7 +245,7 @@ def to_openvino( ... task="segmentation", ... ) """ - if not package_available("openvino"): + if not module_available("openvino"): logger.exception("Could not find OpenVINO. Please check OpenVINO installation.") raise ModuleNotFoundError @@ -294,7 +294,7 @@ def _compress_ov_model( Returns: model (CompiledModel): Model in the OpenVINO format compressed with NNCF quantization. """ - if not package_available("nncf"): + if not module_available("nncf"): logger.exception("Could not find NCCF. Please check NNCF installation.") raise ModuleNotFoundError diff --git a/src/anomalib/models/image/vlm_ad/backends/chat_gpt.py b/src/anomalib/models/image/vlm_ad/backends/chat_gpt.py index 741288354f..53648e688a 100644 --- a/src/anomalib/models/image/vlm_ad/backends/chat_gpt.py +++ b/src/anomalib/models/image/vlm_ad/backends/chat_gpt.py @@ -10,13 +10,13 @@ from typing import TYPE_CHECKING from dotenv import load_dotenv -from lightning_utilities.core.imports import package_available +from lightning_utilities.core.imports import module_available from anomalib.models.image.vlm_ad.utils import Prompt from .base import Backend -if package_available("openai"): +if module_available("openai"): from openai import OpenAI else: OpenAI = None diff --git a/src/anomalib/models/image/vlm_ad/backends/huggingface.py b/src/anomalib/models/image/vlm_ad/backends/huggingface.py index e25e9dccb3..e8d3c1e84b 100644 --- a/src/anomalib/models/image/vlm_ad/backends/huggingface.py +++ b/src/anomalib/models/image/vlm_ad/backends/huggingface.py @@ -7,7 +7,7 @@ from pathlib import Path from typing import TYPE_CHECKING -from lightning_utilities.core.imports import package_available +from lightning_utilities.core.imports import module_available from PIL import Image from anomalib.models.image.vlm_ad.utils import Prompt @@ -18,7 +18,7 @@ from transformers.modeling_utils import PreTrainedModel from transformers.processing_utils import ProcessorMixin -if package_available("transformers"): +if module_available("transformers"): import transformers else: transformers = None diff --git a/src/anomalib/models/image/vlm_ad/backends/ollama.py b/src/anomalib/models/image/vlm_ad/backends/ollama.py index db5a215bb3..ff680bee3b 100644 --- a/src/anomalib/models/image/vlm_ad/backends/ollama.py +++ b/src/anomalib/models/image/vlm_ad/backends/ollama.py @@ -12,13 +12,13 @@ import logging from pathlib import Path -from lightning_utilities.core.imports import package_available +from lightning_utilities.core.imports import module_available from anomalib.models.image.vlm_ad.utils import Prompt from .base import Backend -if package_available("ollama"): +if module_available("ollama"): from ollama import chat from ollama._client import _encode_image else: diff --git a/src/anomalib/utils/exceptions/imports.py b/src/anomalib/utils/exceptions/imports.py index dac22ba056..6ef8dbd89d 100644 --- a/src/anomalib/utils/exceptions/imports.py +++ b/src/anomalib/utils/exceptions/imports.py @@ -22,7 +22,7 @@ def try_import(import_path: str) -> bool: warnings.warn( "The 'try_import' function is deprecated and will be removed in v2.0.0. " - "Use 'package_available' from lightning-utilities instead.", + "Use 'module_available' from lightning-utilities instead.", DeprecationWarning, stacklevel=2, )