diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index bf1f0f7..2677e85 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -34,7 +34,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [ '3.8', '3.9', '3.10', '3.11', '3.12', '3.13' ] + python-version: [ '3.9', '3.10', '3.11', '3.12', '3.13', '3.14' ] steps: - name: Checkout repository uses: actions/checkout@v4 diff --git a/CHANGELOG.md b/CHANGELOG.md index 0c6088e..4001205 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,14 @@ # Changelog ## [Unreleased] +### Added +- Official `Python 3.14` support, by @HardNorth +- Custom log level support in `RPLogHandler` class, by @HardNorth +### Removed +- `Python 3.7` support, by @HardNorth +- Deprecated `log_manager.py` module, by @HardNorth + +## [5.6.7] ### Changed - `log_batch_payload_size` was renamed to `log_batch_payload_limit` as it was originally supposed, by @HardNorth diff --git a/reportportal_client/__init__.py b/reportportal_client/__init__.py index 9a66feb..bf99d38 100644 --- a/reportportal_client/__init__.py +++ b/reportportal_client/__init__.py @@ -15,7 +15,7 @@ import sys import warnings -from typing import Optional, Tuple, TypedDict, Union +from typing import Optional, TypedDict, Union # noinspection PyUnreachableCode if sys.version_info >= (3, 11): @@ -23,7 +23,7 @@ else: from typing_extensions import Unpack -import aenum +import aenum # type: ignore # noinspection PyProtectedMember from reportportal_client._internal.local import current, set_current @@ -43,9 +43,6 @@ class ClientType(aenum.Enum): class _ClientOptions(TypedDict, total=False): - client_type: ClientType - endpoint: str - project: str api_key: Optional[str] # OAuth 2.0 parameters oauth_uri: Optional[str] @@ -60,7 +57,7 @@ class _ClientOptions(TypedDict, total=False): verify_ssl: Union[bool, str] retries: int max_pool_size: int - http_timeout: Union[float, Tuple[float, float]] + http_timeout: Union[float, tuple[float, float]] mode: str launch_uuid_print: bool print_output: OutputType @@ -122,15 +119,16 @@ def create_client( :return: ReportPortal Client instance. """ my_kwargs = kwargs.copy() - if "log_batch_payload_size" in my_kwargs: + if "log_batch_payload_size" in my_kwargs: # type: ignore warnings.warn( message="Your agent is using `log_batch_payload_size` property which was introduced by mistake. " "The real property name is `log_batch_payload_limit`. Please consider Agent version update.", category=DeprecationWarning, stacklevel=2, ) + payload_size = my_kwargs.pop("log_batch_payload_size") # type: ignore if "log_batch_payload_limit" not in my_kwargs: - my_kwargs["log_batch_payload_limit"] = my_kwargs.pop("log_batch_payload_size") + my_kwargs["log_batch_payload_limit"] = payload_size if client_type is ClientType.SYNC: return RPClient(endpoint, project, **my_kwargs) diff --git a/reportportal_client/_internal/aio/http.py b/reportportal_client/_internal/aio/http.py index e969239..9022e6b 100644 --- a/reportportal_client/_internal/aio/http.py +++ b/reportportal_client/_internal/aio/http.py @@ -24,9 +24,9 @@ import asyncio import sys from types import TracebackType -from typing import Any, Callable, Coroutine, Optional, Type, Union +from typing import Any, Callable, Coroutine, Optional, Union -from aenum import Enum +from aenum import Enum # type: ignore from aiohttp import ClientResponse, ClientResponseError from aiohttp import ClientSession as AioHttpClientSession from aiohttp import ServerConnectionError @@ -77,31 +77,33 @@ def __init__( self.__retry_number = max_retry_number self.__retry_delay = base_retry_delay - async def __nothing(self): + async def __nothing(self) -> None: pass - def __sleep(self, retry_num: int, retry_factor: int) -> Coroutine: - if retry_num > 0: # don't wait at the first retry attempt + def __sleep(self, retry_num: int, retry_factor: Optional[int]) -> Coroutine: + if retry_num > 0 and retry_factor is not None: # don't wait at the first retry attempt delay = (((retry_factor * self.__retry_delay) * 1000) ** retry_num) / 1000 return asyncio.sleep(delay) else: return self.__nothing() - async def __request(self, method: Callable, url, **kwargs: Any) -> ClientResponse: + async def __request( + self, method: Callable[..., Coroutine[Any, Any, ClientResponse]], url: str, **kwargs: Any + ) -> ClientResponse: """Make a request and retry if necessary. The method retries requests depending on error class and retry number. For no-retry errors, such as 400 Bad Request it just returns result, for cases where it's reasonable to retry it does it in exponential manner. """ - result = None + result: Optional[ClientResponse] = None exceptions = [] for i in range(self.__retry_number + 1): # add one for the first attempt, which is not a retry - retry_factor = None + retry_factor: Optional[int] = None if result is not None: # Release previous result to return connection to pool - await result.release() + result.release() try: result = await method(url, **kwargs) except Exception as exc: @@ -136,6 +138,8 @@ async def __request(self, method: Callable, url, **kwargs: Any) -> ClientRespons raise exceptions[-1] else: raise exceptions[0] + if result is None: + raise IOError("Request failed without exceptions") return result def get(self, url: str, *, allow_redirects: bool = True, **kwargs: Any) -> Coroutine[Any, Any, ClientResponse]: @@ -150,7 +154,7 @@ def put(self, url: str, *, data: Any = None, **kwargs: Any) -> Coroutine[Any, An """Perform HTTP PUT request.""" return self.__request(self._client.put, url, data=data, **kwargs) - def close(self) -> Coroutine: + def close(self) -> Coroutine[None, None, None]: """Gracefully close internal aiohttp.ClientSession class instance.""" return self._client.close() @@ -160,7 +164,7 @@ async def __aenter__(self) -> "RetryingClientSession": async def __aexit__( self, - exc_type: Optional[Type[BaseException]], + exc_type: Optional[type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType], ) -> None: @@ -241,7 +245,7 @@ async def __aenter__(self) -> "ClientSession": async def __aexit__( self, - exc_type: Optional[Type[BaseException]], + exc_type: Optional[type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType], ) -> None: diff --git a/reportportal_client/_internal/aio/tasks.py b/reportportal_client/_internal/aio/tasks.py index 00cbfbb..7b75d26 100644 --- a/reportportal_client/_internal/aio/tasks.py +++ b/reportportal_client/_internal/aio/tasks.py @@ -17,7 +17,7 @@ import sys import time from asyncio import Future -from typing import Any, Awaitable, Coroutine, Generator, Generic, List, Optional, TypeVar, Union +from typing import Any, Awaitable, Coroutine, Generator, Generic, Optional, TypeVar, Union from reportportal_client.aio.tasks import BlockingOperationError, Task @@ -142,7 +142,7 @@ def __call__( class TriggerTaskBatcher(Generic[_T]): """Batching class which compile its batches by object number or by passed time.""" - __task_list: List[_T] + __task_list: list[_T] __last_run_time: float __trigger_num: int __trigger_interval: float @@ -170,7 +170,7 @@ def __ready_to_run(self) -> bool: return True return False - def append(self, value: _T) -> Optional[List[_T]]: + def append(self, value: _T) -> Optional[list[_T]]: """Add an object to internal batch and return the batch if it's triggered. :param value: an object to add to the batch @@ -184,7 +184,7 @@ def append(self, value: _T) -> Optional[List[_T]]: self.__task_list = [] return tasks - def flush(self) -> Optional[List[_T]]: + def flush(self) -> Optional[list[_T]]: """Immediately return everything what's left in the internal batch. :return: a batch or None @@ -200,7 +200,7 @@ def flush(self) -> Optional[List[_T]]: class BackgroundTaskList(Generic[_T]): """Task list class which collects Tasks into internal batch and removes when they complete.""" - __task_list: List[_T] + __task_list: list[_T] def __init__(self): """Initialize an instance of the Batcher.""" @@ -222,7 +222,7 @@ def append(self, value: _T) -> None: self.__remove_finished() self.__task_list.append(value) - def flush(self) -> Optional[List[_T]]: + def flush(self) -> Optional[list[_T]]: """Immediately return everything what's left unfinished in the internal batch. :return: a batch or None diff --git a/reportportal_client/_internal/http.py b/reportportal_client/_internal/http.py index 06d1513..358ef87 100644 --- a/reportportal_client/_internal/http.py +++ b/reportportal_client/_internal/http.py @@ -15,7 +15,7 @@ """This module designed to help with synchronous HTTP request/response handling.""" from types import TracebackType -from typing import Any, Callable, Optional, Type, Union +from typing import Any, Callable, Optional, Union from requests import Response, Session from requests.adapters import BaseAdapter @@ -104,7 +104,7 @@ def __enter__(self) -> "ClientSession": def __exit__( self, - exc_type: Optional[Type[BaseException]], + exc_type: Optional[type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType], ) -> None: diff --git a/reportportal_client/_internal/logs/batcher.py b/reportportal_client/_internal/logs/batcher.py index fc7378c..00fdf9f 100644 --- a/reportportal_client/_internal/logs/batcher.py +++ b/reportportal_client/_internal/logs/batcher.py @@ -15,7 +15,7 @@ import logging import threading -from typing import Any, Dict, Generic, List, Optional, TypeVar +from typing import Any, Generic, Optional, TypeVar from reportportal_client.core.rp_requests import AsyncRPRequestLog, RPRequestLog from reportportal_client.logs import MAX_LOG_BATCH_PAYLOAD_SIZE, MAX_LOG_BATCH_SIZE @@ -35,7 +35,7 @@ class LogBatcher(Generic[T_co]): entry_num: int payload_limit: int _lock: threading.Lock - _batch: List[T_co] + _batch: list[T_co] _payload_size: int def __init__(self, entry_num=MAX_LOG_BATCH_SIZE, payload_limit=MAX_LOG_BATCH_PAYLOAD_SIZE) -> None: @@ -50,7 +50,7 @@ def __init__(self, entry_num=MAX_LOG_BATCH_SIZE, payload_limit=MAX_LOG_BATCH_PAY self._batch = [] self._payload_size = 0 - def _append(self, size: int, log_req: RPRequestLog) -> Optional[List[RPRequestLog]]: + def _append(self, size: int, log_req: RPRequestLog) -> Optional[list[RPRequestLog]]: with self._lock: if self._payload_size + size >= self.payload_limit: if len(self._batch) > 0: @@ -68,7 +68,7 @@ def _append(self, size: int, log_req: RPRequestLog) -> Optional[List[RPRequestLo self._payload_size = 0 return batch - def append(self, log_req: RPRequestLog) -> Optional[List[RPRequestLog]]: + def append(self, log_req: RPRequestLog) -> Optional[list[RPRequestLog]]: """Add a log request object to internal batch and return the batch if it's full. :param log_req: log request object @@ -76,7 +76,7 @@ def append(self, log_req: RPRequestLog) -> Optional[List[RPRequestLog]]: """ return self._append(log_req.multipart_size, log_req) - async def append_async(self, log_req: AsyncRPRequestLog) -> Optional[List[AsyncRPRequestLog]]: + async def append_async(self, log_req: AsyncRPRequestLog) -> Optional[list[AsyncRPRequestLog]]: """Add a log request object to internal batch and return the batch if it's full. :param log_req: log request object @@ -84,7 +84,7 @@ async def append_async(self, log_req: AsyncRPRequestLog) -> Optional[List[AsyncR """ return self._append(await log_req.multipart_size, log_req) - def flush(self) -> Optional[List[T_co]]: + def flush(self) -> Optional[list[T_co]]: """Immediately return everything what's left in the internal batch. :return: a batch or None @@ -99,7 +99,7 @@ def flush(self) -> Optional[List[T_co]]: self._payload_size = 0 return batch - def __getstate__(self) -> Dict[str, Any]: + def __getstate__(self) -> dict[str, Any]: """Control object pickling and return object fields as Dictionary. :return: object state dictionary @@ -110,7 +110,7 @@ def __getstate__(self) -> Dict[str, Any]: del state["_lock"] return state - def __setstate__(self, state: Dict[str, Any]) -> None: + def __setstate__(self, state: dict[str, Any]) -> None: """Control object pickling, receives object state as Dictionary. :param dict state: object state dictionary diff --git a/reportportal_client/_internal/services/client_id.py b/reportportal_client/_internal/services/client_id.py index ed7c0b7..6331344 100644 --- a/reportportal_client/_internal/services/client_id.py +++ b/reportportal_client/_internal/services/client_id.py @@ -17,6 +17,7 @@ import io import logging import os +from typing import Iterable, Optional from uuid import uuid4 from .constants import CLIENT_ID_PROPERTY, RP_FOLDER_PATH, RP_PROPERTIES_FILE_PATH @@ -32,35 +33,35 @@ def __preprocess_file(self, fp): content = "[" + self.DEFAULT_SECTION + "]\n" + fp.read() return io.StringIO(content) - def read(self, filenames, encoding=None): + def read_file(self, filenames: Iterable[str], source: Optional[str] = None) -> None: if isinstance(filenames, str): filenames = [filenames] for filename in filenames: with open(filename, "r") as fp: preprocessed_fp = self.__preprocess_file(fp) - self.read_file(preprocessed_fp, filename) + super().read_file(preprocessed_fp, filename) - def write(self, fp, space_around_delimiters=True): + def write(self, fp, space_around_delimiters: bool = True) -> None: for key, value in self.items(self.DEFAULT_SECTION): delimiter = " = " if space_around_delimiters else "=" fp.write("{}{}{}\n".format(key, delimiter, value)) -def __read_config(): +def __read_config() -> configparser.ConfigParser: config = __NoSectionConfigParser() if os.path.exists(RP_PROPERTIES_FILE_PATH): - config.read(RP_PROPERTIES_FILE_PATH) + config.read_file(RP_PROPERTIES_FILE_PATH) return config -def _read_client_id(): +def _read_client_id() -> Optional[str]: config = __read_config() if config.has_option(__NoSectionConfigParser.DEFAULT_SECTION, CLIENT_ID_PROPERTY): return config.get(__NoSectionConfigParser.DEFAULT_SECTION, CLIENT_ID_PROPERTY) return None -def _store_client_id(client_id): +def _store_client_id(client_id: str) -> None: config = __read_config() if not os.path.exists(RP_FOLDER_PATH): os.makedirs(RP_FOLDER_PATH) @@ -69,7 +70,7 @@ def _store_client_id(client_id): config.write(fp) -def get_client_id(): +def get_client_id() -> str: """Return unique client ID of the instance, generate new if not exists.""" client_id = None try: diff --git a/reportportal_client/_internal/services/client_id.pyi b/reportportal_client/_internal/services/client_id.pyi deleted file mode 100644 index c290676..0000000 --- a/reportportal_client/_internal/services/client_id.pyi +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) 2023 EPAM Systems -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License - -from typing import Optional, Text - -def _read_client_id() -> Optional[Text]: ... -def _store_client_id(client_id: Text) -> None: ... -def get_client_id() -> Text: ... diff --git a/reportportal_client/_internal/services/constants.py b/reportportal_client/_internal/services/constants.py index 04b06c4..adbdf80 100644 --- a/reportportal_client/_internal/services/constants.py +++ b/reportportal_client/_internal/services/constants.py @@ -17,7 +17,7 @@ import os -def _decode_string(text): +def _decode_string(text: str) -> str: """Decode value of the given string. :param text: Encoded string @@ -28,8 +28,8 @@ def _decode_string(text): return message_bytes.decode("ascii") -CLIENT_INFO = _decode_string("Ry1XUDU3UlNHOFhMOm5Ib3dqRjJQUVotNDFJbzBPcDRoZlE=") -ENDPOINT = "https://www.google-analytics.com/mp/collect" -CLIENT_ID_PROPERTY = "client.id" -RP_FOLDER_PATH = os.path.join(os.path.expanduser("~"), ".rp") -RP_PROPERTIES_FILE_PATH = os.path.join(RP_FOLDER_PATH, "rp.properties") +CLIENT_INFO: str = _decode_string("Ry1XUDU3UlNHOFhMOm5Ib3dqRjJQUVotNDFJbzBPcDRoZlE=") +ENDPOINT: str = "https://www.google-analytics.com/mp/collect" +CLIENT_ID_PROPERTY: str = "client.id" +RP_FOLDER_PATH: str = os.path.join(os.path.expanduser("~"), ".rp") +RP_PROPERTIES_FILE_PATH: str = os.path.join(RP_FOLDER_PATH, "rp.properties") diff --git a/reportportal_client/_internal/services/constants.pyi b/reportportal_client/_internal/services/constants.pyi deleted file mode 100644 index 8c72d17..0000000 --- a/reportportal_client/_internal/services/constants.pyi +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright (c) 2023 EPAM Systems -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License - -from typing import Text - -def _decode_string(text: Text) -> Text: ... - -CLIENT_INFO: Text -ENDPOINT: Text -CLIENT_ID_PROPERTY: Text -RP_FOLDER_PATH: Text -RP_PROPERTIES_FILE_PATH: Text diff --git a/reportportal_client/_internal/services/statistics.py b/reportportal_client/_internal/services/statistics.py index b65f432..6b8cd67 100644 --- a/reportportal_client/_internal/services/statistics.py +++ b/reportportal_client/_internal/services/statistics.py @@ -16,7 +16,7 @@ import logging import ssl from platform import python_version -from typing import Optional, Tuple +from typing import Optional import aiohttp import certifi @@ -31,7 +31,7 @@ ID, KEY = CLIENT_INFO.split(":") -def _get_client_info() -> Tuple[str, str]: +def _get_client_info() -> tuple[str, str]: """Get name of the client and its version. :return: ('reportportal-client', '5.0.4') diff --git a/reportportal_client/_internal/static/abstract.py b/reportportal_client/_internal/static/abstract.py index 48f5bc2..b7a78d9 100644 --- a/reportportal_client/_internal/static/abstract.py +++ b/reportportal_client/_internal/static/abstract.py @@ -36,7 +36,7 @@ class Implementation(Interface): i = Implementation() -> success """ - _abc_registry = set() + _abc_registry: set[str] = set() def __call__(cls, *args, **kwargs): """Disable instantiation for the interface classes.""" diff --git a/reportportal_client/aio/client.py b/reportportal_client/aio/client.py index 06f5583..420da55 100644 --- a/reportportal_client/aio/client.py +++ b/reportportal_client/aio/client.py @@ -20,7 +20,7 @@ import time as datetime import warnings from os import getenv -from typing import Any, Coroutine, Dict, List, Optional, Tuple, TypeVar, Union +from typing import Any, Coroutine, Optional, TypeVar, Union import aiohttp import certifi @@ -115,7 +115,7 @@ class Client: verify_ssl: Union[bool, str] retries: Optional[int] max_pool_size: int - http_timeout: Optional[Union[float, Tuple[float, float]]] + http_timeout: Optional[Union[float, tuple[float, float]]] keepalive_timeout: Optional[float] mode: str launch_uuid_print: bool @@ -135,7 +135,7 @@ def __init__( verify_ssl: Union[bool, str] = True, retries: int = NOT_SET, max_pool_size: int = 50, - http_timeout: Optional[Union[float, Tuple[float, float]]] = (10, 10), + http_timeout: Optional[Union[float, tuple[float, float]]] = (10, 10), keepalive_timeout: Optional[float] = None, mode: str = "DEFAULT", launch_uuid_print: bool = False, @@ -259,12 +259,12 @@ async def session(self) -> ClientSession: else: ssl_config = ssl.create_default_context(ssl.Purpose.SERVER_AUTH, cafile=certifi.where()) - connection_params: Dict[str, Any] = {"ssl": ssl_config, "limit": self.max_pool_size} + connection_params: dict[str, Any] = {"ssl": ssl_config, "limit": self.max_pool_size} if self.keepalive_timeout: connection_params["keepalive_timeout"] = self.keepalive_timeout connector = aiohttp.TCPConnector(**connection_params) - session_params: Dict[str, Any] = {"connector": connector} + session_params: dict[str, Any] = {"connector": connector} if self.http_timeout: if type(self.http_timeout) is tuple: @@ -366,7 +366,7 @@ async def start_test_item( *, parent_item_id: Optional[Union[str, Task[str]]] = None, description: Optional[str] = None, - attributes: Optional[Union[List[dict], dict]] = None, + attributes: Optional[Union[list[dict], dict]] = None, parameters: Optional[dict] = None, code_ref: Optional[str] = None, test_case_id: Optional[str] = None, @@ -629,7 +629,7 @@ async def get_project_settings(self) -> Optional[dict]: response = await AsyncHttpRequest((await self.session()).get, url=url, name="get_project_settings").make() return await response.json if response else None - async def log_batch(self, log_batch: Optional[List[AsyncRPRequestLog]]) -> Optional[Tuple[str, ...]]: + async def log_batch(self, log_batch: Optional[list[AsyncRPRequestLog]]) -> Optional[tuple[str, ...]]: """Send batch logging message to the ReportPortal. :param log_batch: A list of log message objects. @@ -672,7 +672,7 @@ def clone(self) -> "Client": ) return cloned - def __getstate__(self) -> Dict[str, Any]: + def __getstate__(self) -> dict[str, Any]: """Control object pickling and return object fields as Dictionary. :return: object state dictionary @@ -683,7 +683,7 @@ def __getstate__(self) -> Dict[str, Any]: del state["_session"] return state - def __setstate__(self, state: Dict[str, Any]) -> None: + def __setstate__(self, state: dict[str, Any]) -> None: """Control object pickling, receives object state as Dictionary. :param dict state: object state dictionary @@ -847,7 +847,7 @@ async def start_test_item( start_time: str, item_type: str, description: Optional[str] = None, - attributes: Optional[List[dict]] = None, + attributes: Optional[list[dict]] = None, parameters: Optional[dict] = None, parent_item_id: Optional[str] = None, has_stats: bool = True, @@ -1047,7 +1047,7 @@ async def log( level: Optional[Union[int, str]] = None, attachment: Optional[dict] = None, item_id: Optional[str] = None, - ) -> Optional[Tuple[str, ...]]: + ) -> Optional[tuple[str, ...]]: """Send Log message to the ReportPortal and attach it to a Test Item or Launch. This method stores Log messages in internal batch and sent it when batch is full, so not every method @@ -1293,7 +1293,7 @@ def start_test_item( start_time: str, item_type: str, description: Optional[str] = None, - attributes: Optional[List[dict]] = None, + attributes: Optional[list[dict]] = None, parameters: Optional[dict] = None, parent_item_id: Optional[Task[str]] = None, has_stats: bool = True, @@ -1485,10 +1485,10 @@ def get_project_settings(self) -> Task[Optional[str]]: result_task = self.create_task(result_coro) return result_task - async def _log_batch(self, log_rq: Optional[List[AsyncRPRequestLog]]) -> Optional[Tuple[str, ...]]: + async def _log_batch(self, log_rq: Optional[list[AsyncRPRequestLog]]) -> Optional[tuple[str, ...]]: return await self.__client.log_batch(log_rq) - async def _log(self, log_rq: AsyncRPRequestLog) -> Optional[Tuple[str, ...]]: + async def _log(self, log_rq: AsyncRPRequestLog) -> Optional[tuple[str, ...]]: return await self._log_batch(await self._log_batcher.append_async(log_rq)) def log( @@ -1498,7 +1498,7 @@ def log( level: Optional[Union[int, str]] = None, attachment: Optional[dict] = None, item_id: Optional[Task[str]] = None, - ) -> Task[Optional[Tuple[str, ...]]]: + ) -> Task[Optional[tuple[str, ...]]]: """Send Log message to the ReportPortal and attach it to a Test Item or Launch. This method stores Log messages in internal batch and sent it when batch is full, so not every method @@ -1697,7 +1697,7 @@ def clone(self) -> "ThreadedRPClient": cloned._add_current_item(current_item) return cloned - def __getstate__(self) -> Dict[str, Any]: + def __getstate__(self) -> dict[str, Any]: """Control object pickling and return object fields as Dictionary. :return: object state dictionary @@ -1710,7 +1710,7 @@ def __getstate__(self) -> Dict[str, Any]: del state["_thread"] return state - def __setstate__(self, state: Dict[str, Any]) -> None: + def __setstate__(self, state: dict[str, Any]) -> None: """Control object pickling, receives object state as Dictionary. :param dict state: object state dictionary @@ -1889,7 +1889,7 @@ def clone(self) -> "BatchedRPClient": cloned._add_current_item(current_item) return cloned - def __getstate__(self) -> Dict[str, Any]: + def __getstate__(self) -> dict[str, Any]: """Control object pickling and return object fields as Dictionary. :return: object state dictionary @@ -1901,7 +1901,7 @@ def __getstate__(self) -> Dict[str, Any]: del state["_loop"] return state - def __setstate__(self, state: Dict[str, Any]) -> None: + def __setstate__(self, state: dict[str, Any]) -> None: """Control object pickling, receives object state as Dictionary. :param dict state: object state dictionary diff --git a/reportportal_client/client.py b/reportportal_client/client.py index 63a8d51..8078010 100644 --- a/reportportal_client/client.py +++ b/reportportal_client/client.py @@ -20,7 +20,7 @@ import warnings from abc import abstractmethod from os import getenv -from typing import Any, Dict, List, Optional, TextIO, Tuple, Union +from typing import Any, Optional, TextIO, Union import aenum from requests.adapters import DEFAULT_RETRIES, HTTPAdapter, Retry @@ -167,7 +167,7 @@ def start_test_item( start_time: str, item_type: str, description: Optional[str] = None, - attributes: Optional[Union[List[dict], dict]] = None, + attributes: Optional[Union[list[dict], dict]] = None, parameters: Optional[dict] = None, parent_item_id: Optional[str] = None, has_stats: Optional[bool] = True, @@ -316,7 +316,7 @@ def log( level: Optional[Union[int, str]] = None, attachment: Optional[dict] = None, item_id: Optional[str] = None, - ) -> Optional[Tuple[str, ...]]: + ) -> Optional[tuple[str, ...]]: """Send Log message to the ReportPortal and attach it to a Test Item or Launch. This method stores Log messages in internal batch and sent it when batch is full, so not every method @@ -387,7 +387,7 @@ class RPClient(RP): base_url_v2: str __endpoint: str is_skipped_an_issue: bool - __launch_uuid: str + __launch_uuid: Optional[str] use_own_launch: bool log_batch_size: int log_batch_payload_limit: int @@ -403,7 +403,7 @@ class RPClient(RP): verify_ssl: Union[bool, str] retries: int max_pool_size: int - http_timeout: Union[float, Tuple[float, float]] + http_timeout: Union[float, tuple[float, float]] session: ClientSession __step_reporter: StepReporter mode: str @@ -468,8 +468,8 @@ def __init__( verify_ssl: Union[bool, str] = True, retries: int = None, max_pool_size: int = 50, - launch_uuid: str = None, - http_timeout: Union[float, Tuple[float, float]] = (10, 10), + launch_uuid: Optional[str] = None, + http_timeout: Union[float, tuple[float, float]] = (10, 10), log_batch_payload_limit: int = MAX_LOG_BATCH_PAYLOAD_SIZE, mode: str = "DEFAULT", launch_uuid_print: bool = False, @@ -522,7 +522,7 @@ def __init__( self.is_skipped_an_issue = is_skipped_an_issue self.__launch_uuid = launch_uuid if not self.__launch_uuid: - launch_id = kwargs.get("launch_id") + launch_id = kwargs.get("launch_id") # type: ignore if launch_id: warnings.warn( message="`launch_id` property is deprecated since 5.5.0 and will be subject for removing" @@ -658,7 +658,7 @@ def start_test_item( start_time: str, item_type: str, description: Optional[str] = None, - attributes: Optional[Union[List[dict], dict]] = None, + attributes: Optional[Union[list[dict], dict]] = None, parameters: Optional[dict] = None, parent_item_id: Optional[str] = None, has_stats: bool = True, @@ -856,7 +856,7 @@ def update_test_item( logger.debug("update_test_item - Item: %s", item_id) return response.message - def _log(self, batch: Optional[List[RPRequestLog]]) -> Optional[Tuple[str, ...]]: + def _log(self, batch: Optional[list[RPRequestLog]]) -> Optional[tuple[str, ...]]: if not batch: return None @@ -878,7 +878,7 @@ def log( level: Optional[Union[int, str]] = None, attachment: Optional[dict] = None, item_id: Optional[str] = None, - ) -> Optional[Tuple[str, ...]]: + ) -> Optional[tuple[str, ...]]: """Send Log message to the ReportPortal and attach it to a Test Item or Launch. This method stores Log messages in internal batch and sent it when batch is full, so not every method @@ -1036,7 +1036,7 @@ def close(self) -> None: self._log(self._log_batcher.flush()) self.session.close() - def __getstate__(self) -> Dict[str, Any]: + def __getstate__(self) -> dict[str, Any]: """Control object pickling and return object fields as Dictionary. :return: object state dictionary @@ -1047,7 +1047,7 @@ def __getstate__(self) -> Dict[str, Any]: del state["session"] return state - def __setstate__(self, state: Dict[str, Any]) -> None: + def __setstate__(self, state: dict[str, Any]) -> None: """Control object pickling, receives object state as Dictionary. :param dict state: object state dictionary diff --git a/reportportal_client/core/rp_file.py b/reportportal_client/core/rp_file.py index a905e3c..0610c06 100644 --- a/reportportal_client/core/rp_file.py +++ b/reportportal_client/core/rp_file.py @@ -14,12 +14,24 @@ """This module contains classes representing RP file object.""" import uuid +from typing import Any, Optional class RPFile(object): """Class representation for a file that will be attached to the log.""" - def __init__(self, name=None, content=None, content_type=None, data=None, mime=None): + content: Optional[Any] + content_type: Optional[str] + name: str + + def __init__( + self, + name: Optional[str] = None, + content: Optional[Any] = None, + content_type: Optional[str] = None, + data: Optional[Any] = None, + mime: Optional[str] = None, + ) -> None: """Initialize instance attributes. :param name: File name @@ -33,6 +45,6 @@ def __init__(self, name=None, content=None, content_type=None, data=None, mime=N self.name = name if name and name.strip() else str(uuid.uuid4()) @property - def payload(self): + def payload(self) -> dict[str, Any]: """Get HTTP payload for the request.""" return {"content": self.content, "contentType": self.content_type, "name": self.name} diff --git a/reportportal_client/core/rp_file.pyi b/reportportal_client/core/rp_file.pyi deleted file mode 100644 index 9eb7996..0000000 --- a/reportportal_client/core/rp_file.pyi +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright (c) 2022 EPAM Systems -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License - -from typing import Any, Dict, Optional, Text - -class RPFile: - content: Any = ... - content_type: Text = ... - name: Text = ... - - def __init__(self, name: Optional[Text], content: Any, content_type: Optional[Text]) -> None: ... - @property - def payload(self) -> Dict: ... diff --git a/reportportal_client/core/rp_issues.py b/reportportal_client/core/rp_issues.py index 3c12a59..48fab89 100644 --- a/reportportal_client/core/rp_issues.py +++ b/reportportal_client/core/rp_issues.py @@ -39,11 +39,25 @@ } """ +from typing import Any, Optional + class Issue: """This class represents an issue that can be attached to test result.""" - def __init__(self, issue_type, comment=None, auto_analyzed=False, ignore_analyzer=True): + _external_issues: list + auto_analyzed: bool + comment: Optional[str] + ignore_analyzer: bool + issue_type: str + + def __init__( + self, + issue_type: str, + comment: Optional[str] = None, + auto_analyzed: bool = False, + ignore_analyzer: bool = True, + ) -> None: """Initialize instance attributes. :param issue_type: Issue type locator. Allowable values: "pb***", @@ -61,12 +75,12 @@ def __init__(self, issue_type, comment=None, auto_analyzed=False, ignore_analyze self.ignore_analyzer = ignore_analyzer self.issue_type = issue_type - def external_issue_add(self, issue): + def external_issue_add(self, issue: "ExternalIssue") -> None: """Add external system issue to the issue.""" self._external_issues.append(issue.payload) @property - def payload(self): + def payload(self) -> dict[str, Optional[Any]]: """Form the correct dictionary for the issue.""" return { "autoAnalyzed": self.auto_analyzed, @@ -80,7 +94,20 @@ def payload(self): class ExternalIssue: """This class represents external(BTS) system issue.""" - def __init__(self, bts_url=None, bts_project=None, submit_date=None, ticket_id=None, url=None): + bts_url: Optional[str] + bts_project: Optional[str] + submit_date: Optional[str] + ticket_id: Optional[str] + url: Optional[str] + + def __init__( + self, + bts_url: Optional[str] = None, + bts_project: Optional[str] = None, + submit_date: Optional[str] = None, + ticket_id: Optional[str] = None, + url: Optional[str] = None, + ) -> None: """Initialize instance attributes. :param bts_url: Bug tracker system URL @@ -96,7 +123,7 @@ def __init__(self, bts_url=None, bts_project=None, submit_date=None, ticket_id=N self.url = url @property - def payload(self): + def payload(self) -> dict[str, Optional[str]]: """Form the correct dictionary for the BTS issue.""" return { "btsUrl": self.bts_url, diff --git a/reportportal_client/core/rp_issues.pyi b/reportportal_client/core/rp_issues.pyi deleted file mode 100644 index d1dfb9f..0000000 --- a/reportportal_client/core/rp_issues.pyi +++ /dev/null @@ -1,50 +0,0 @@ -# Copyright (c) 2022 EPAM Systems -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License - -from typing import Dict, List, Optional, Text - -class Issue: - _external_issues: List = ... - auto_analyzed: bool = ... - comment: Text = ... - ignore_analyzer: bool = ... - issue_type: Text = ... - - def __init__( - self, - issue_type: Text, - comment: Optional[Text] = ..., - auto_analyzed: Optional[bool] = ..., - ignore_analyzer: Optional[bool] = ..., - ) -> None: ... - def external_issue_add(self, issue: ExternalIssue) -> None: ... - @property - def payload(self) -> Dict: ... - -class ExternalIssue: - bts_url: Text = ... - bts_project: Text = ... - submit_date: Text = ... - ticket_id: Text = ... - url: Text = ... - - def __init__( - self, - bts_url: Optional[Text] = ..., - bts_project: Optional[Text] = ..., - submit_date: Optional[Text] = ..., - ticket_id: Optional[Text] = ..., - url: Optional[Text] = ..., - ) -> None: ... - @property - def payload(self) -> Dict: ... diff --git a/reportportal_client/core/rp_requests.py b/reportportal_client/core/rp_requests.py index 6bfc96f..ec68681 100644 --- a/reportportal_client/core/rp_requests.py +++ b/reportportal_client/core/rp_requests.py @@ -24,7 +24,7 @@ import traceback from dataclasses import dataclass from datetime import datetime -from typing import Any, Callable, List, Optional, Tuple, TypeVar, Union +from typing import Any, Callable, Optional, TypeVar, Union import aiohttp @@ -59,7 +59,7 @@ class HttpRequest: data: Optional[Any] json: Optional[Any] verify_ssl: Optional[Union[bool, str]] - http_timeout: Union[float, Tuple[float, float]] + http_timeout: Union[float, tuple[float, float]] name: Optional[str] _priority: Priority @@ -71,7 +71,7 @@ def __init__( json: Optional[Any] = None, files: Optional[Any] = None, verify_ssl: Optional[Union[bool, str]] = None, - http_timeout: Union[float, Tuple[float, float]] = (10, 10), + http_timeout: Union[float, tuple[float, float]] = (10, 10), name: Optional[str] = None, ) -> None: """Initialize an instance of the request with attributes. @@ -589,10 +589,10 @@ class RPLogBatch(RPRequestBase): """ default_content: str - log_reqs: List[Union[RPRequestLog, AsyncRPRequestLog]] + log_reqs: list[Union[RPRequestLog, AsyncRPRequestLog]] priority: Priority - def __init__(self, log_reqs: List[Union[RPRequestLog, AsyncRPRequestLog]]) -> None: + def __init__(self, log_reqs: list[Union[RPRequestLog, AsyncRPRequestLog]]) -> None: """Initialize instance attributes. :param log_reqs: @@ -602,11 +602,11 @@ def __init__(self, log_reqs: List[Union[RPRequestLog, AsyncRPRequestLog]]) -> No self.log_reqs = log_reqs self.priority = LOW_PRIORITY - def __get_file(self, rp_file) -> Tuple[str, tuple]: + def __get_file(self, rp_file) -> tuple[str, tuple]: """Form a tuple for the single file.""" return "file", (rp_file.name, rp_file.content, rp_file.content_type or self.default_content) - def _get_files(self) -> List[Tuple[str, tuple]]: + def _get_files(self) -> list[tuple[str, tuple]]: """Get list of files for the JSON body.""" files = [] for req in self.log_reqs: @@ -614,7 +614,7 @@ def _get_files(self) -> List[Tuple[str, tuple]]: files.append(self.__get_file(req.file)) return files - def __get_request_part(self) -> List[Tuple[str, tuple]]: + def __get_request_part(self) -> list[tuple[str, tuple]]: body = [ ( "json_request_part", @@ -624,7 +624,7 @@ def __get_request_part(self) -> List[Tuple[str, tuple]]: return body @property - def payload(self) -> List[Tuple[str, tuple]]: + def payload(self) -> list[tuple[str, tuple]]: r"""Get HTTP payload for the request. Example: @@ -657,7 +657,7 @@ def __int__(self, *args, **kwargs) -> None: """Initialize an instance of the request with attributes.""" super.__init__(*args, **kwargs) - async def __get_request_part(self) -> List[dict]: + async def __get_request_part(self) -> list[dict]: coroutines = [log.payload for log in self.log_reqs] return list(await asyncio.gather(*coroutines)) diff --git a/reportportal_client/core/rp_responses.py b/reportportal_client/core/rp_responses.py index 0c3b0cf..861d7f7 100644 --- a/reportportal_client/core/rp_responses.py +++ b/reportportal_client/core/rp_responses.py @@ -19,7 +19,7 @@ """ import logging -from typing import Any, Generator, Mapping, Optional, Tuple, Union +from typing import Any, Generator, Mapping, Optional, Union from aiohttp import ClientError, ClientResponse from requests import Response @@ -117,7 +117,7 @@ def message(self) -> Optional[str]: return _get_field("message", self.json) @property - def messages(self) -> Optional[Tuple[str, ...]]: + def messages(self) -> Optional[tuple[str, ...]]: """Get list of messages received in the response. :return: a variable size tuple of strings or NOT_FOUND, or None if the response is not JSON @@ -181,7 +181,7 @@ async def message(self) -> Optional[str]: return _get_field("message", await self.json) @property - async def messages(self) -> Optional[Tuple[str, ...]]: + async def messages(self) -> Optional[tuple[str, ...]]: """Get list of messages received in the response. :return: a variable size tuple of strings or NOT_FOUND, or None if the response is not JSON diff --git a/reportportal_client/core/worker.py b/reportportal_client/core/worker.py index 5e74597..a9c0a48 100644 --- a/reportportal_client/core/worker.py +++ b/reportportal_client/core/worker.py @@ -17,17 +17,20 @@ import queue import threading import warnings +from queue import PriorityQueue from threading import Thread, current_thread +from typing import Optional, Union from aenum import Enum, auto, unique # noinspection PyProtectedMember from reportportal_client._internal.static.defines import Priority +from reportportal_client.core.rp_requests import HttpRequest logger = logging.getLogger(__name__) logger.addHandler(logging.NullHandler()) -THREAD_TIMEOUT = 10 # Thread termination / wait timeout in seconds +THREAD_TIMEOUT: int = 10 # Thread termination / wait timeout in seconds @unique @@ -40,18 +43,18 @@ class ControlCommand(Enum): STOP = auto() STOP_IMMEDIATE = auto() - def is_stop_cmd(self): + def is_stop_cmd(self) -> bool: """Verify if the command is the stop one.""" return self in (ControlCommand.STOP, ControlCommand.STOP_IMMEDIATE) @property - def priority(self): + def priority(self) -> Priority: """Get the priority of the command.""" if self is ControlCommand.STOP_IMMEDIATE: return Priority.PRIORITY_IMMEDIATE return Priority.PRIORITY_LOW - def __lt__(self, other): + def __lt__(self, other: Union["ControlCommand", "HttpRequest"]) -> bool: """Priority protocol for the PriorityQueue.""" return self.priority < other.priority @@ -59,7 +62,12 @@ def __lt__(self, other): class APIWorker(object): """Worker that makes HTTP requests to the ReportPortal.""" - def __init__(self, task_queue): + _queue: PriorityQueue + _thread: Optional[Thread] + _stop_lock: threading.Condition + name: str + + def __init__(self, task_queue: PriorityQueue) -> None: """Initialize instance attributes.""" warnings.warn( message="`APIWorker` class is deprecated since 5.5.0 and will be subject for removing in the" @@ -72,7 +80,7 @@ def __init__(self, task_queue): self._stop_lock = threading.Condition() self.name = self.__class__.__name__ - def _command_get(self): + def _command_get(self) -> Optional[ControlCommand]: """Get command from the queue.""" try: cmd = self._queue.get(timeout=0.1) @@ -80,7 +88,7 @@ def _command_get(self): except queue.Empty: return None - def _command_process(self, cmd): + def _command_process(self, cmd: Optional[ControlCommand]) -> None: """Process control command sent to the worker. :param cmd: a command to be processed @@ -95,15 +103,17 @@ def _command_process(self, cmd): else: self._stop() - def _request_process(self, request): + def _request_process(self, request: Optional[HttpRequest]) -> None: """Send request to RP and update response attribute of the request.""" + if not request: + return logger.debug("[%s] Processing {%s} request", self.name, request) try: request.make() except Exception as err: logger.exception("[%s] Unknown exception has occurred. " "Skipping it.", err) - def _monitor(self): + def _monitor(self) -> None: """Monitor worker queues and process them. This method runs on a separate, internal thread. The thread will @@ -126,7 +136,7 @@ def _monitor(self): logger.debug("[%s] Received {%s} request", self.name, cmd) self._request_process(cmd) - def _stop(self): + def _stop(self) -> None: """Routine that stops the worker thread(s). This method process everything in worker's queue first, ignoring @@ -139,7 +149,7 @@ def _stop(self): request = self._command_get() self._stop_immediately() - def _stop_immediately(self): + def _stop_immediately(self) -> None: """Routine that stops the worker thread(s) immediately. This asks the thread to terminate, and then waits for it to do so. @@ -153,18 +163,18 @@ def _stop_immediately(self): self._stop_lock.notify_all() self._stop_lock.release() - def is_alive(self): + def is_alive(self) -> bool: """Check whether the current worker is alive or not. :return: True is self._thread is not None, False otherwise """ return bool(self._thread) and self._thread.is_alive() - def send(self, entity): + def send(self, entity: Union[ControlCommand, HttpRequest]) -> None: """Send control command or a request to the worker queue.""" self._queue.put(entity) - def start(self): + def start(self) -> None: """Start the worker. This starts up a background thread to monitor the queue for @@ -177,7 +187,7 @@ def start(self): self._thread.daemon = True self._thread.start() - def __perform_stop(self, stop_command): + def __perform_stop(self, stop_command: ControlCommand) -> None: if not self.is_alive(): # Already stopped or already dead or not even started return @@ -191,14 +201,14 @@ def __perform_stop(self, stop_command): # pytest self._stop_lock.wait(THREAD_TIMEOUT) - def stop(self): + def stop(self) -> None: """Stop the worker. Send the appropriate control command to the worker. """ self.__perform_stop(ControlCommand.STOP) - def stop_immediate(self): + def stop_immediate(self) -> None: """Stop the worker immediately. Send the appropriate control command to the worker. diff --git a/reportportal_client/core/worker.pyi b/reportportal_client/core/worker.pyi deleted file mode 100644 index 85b341e..0000000 --- a/reportportal_client/core/worker.pyi +++ /dev/null @@ -1,60 +0,0 @@ -# Copyright (c) 2022 EPAM Systems -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License - -import threading -from logging import Logger -from queue import PriorityQueue -from threading import Thread -from typing import Any, Optional, Text, Union - -from aenum import Enum - -# noinspection PyProtectedMember -from reportportal_client._internal.static.defines import Priority -from reportportal_client.core.rp_requests import HttpRequest -from reportportal_client.core.rp_requests import RPRequestBase as RPRequest - -logger: Logger -THREAD_TIMEOUT: int - -class ControlCommand(Enum): - CLEAR_QUEUE: Any = ... - NOP: Any = ... - REPORT_STATUS: Any = ... - STOP: Any = ... - STOP_IMMEDIATE: Any = ... - - def is_stop_cmd(self) -> bool: ... - def __lt__(self, other: Union[ControlCommand, RPRequest]) -> bool: ... - @property - def priority(self) -> Priority: ... - -class APIWorker: - _queue: PriorityQueue = ... - _thread: Optional[Thread] = ... - _stop_lock: threading.Condition = ... - name: Text = ... - - def __init__(self, task_queue: PriorityQueue) -> None: ... - def _command_get(self) -> Optional[ControlCommand]: ... - def _command_process(self, cmd: Optional[ControlCommand]) -> None: ... - def _request_process(self, request: Optional[HttpRequest]) -> None: ... - def _monitor(self) -> None: ... - def _stop(self) -> None: ... - def _stop_immediately(self) -> None: ... - def is_alive(self) -> bool: ... - def send(self, cmd: Union[ControlCommand, HttpRequest]) -> Any: ... - def start(self) -> None: ... - def __perform_stop(self, stop_command: ControlCommand) -> None: ... - def stop(self) -> None: ... - def stop_immediate(self) -> None: ... diff --git a/reportportal_client/helpers/common_helpers.py b/reportportal_client/helpers/common_helpers.py index a36abf8..330c822 100644 --- a/reportportal_client/helpers/common_helpers.py +++ b/reportportal_client/helpers/common_helpers.py @@ -24,7 +24,7 @@ import uuid from platform import machine, processor, system from types import MappingProxyType -from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, Tuple, TypeVar, Union +from typing import Any, Callable, Generic, Iterable, Optional, TypeVar, Union from reportportal_client.core.rp_file import RPFile @@ -69,7 +69,7 @@ class LifoQueue(Generic[_T]): """Primitive thread-safe Last-in-first-out queue implementation.""" _lock: threading.Lock - __items: List[_T] + __items: list[_T] def __init__(self): """Initialize the queue instance.""" @@ -111,7 +111,7 @@ def qsize(self): with self._lock: return len(self.__items) - def __getstate__(self) -> Dict[str, Any]: + def __getstate__(self) -> dict[str, Any]: """Control object pickling and return object fields as Dictionary. :return: object state dictionary @@ -122,7 +122,7 @@ def __getstate__(self) -> Dict[str, Any]: del state["_lock"] return state - def __setstate__(self, state: Dict[str, Any]) -> None: + def __setstate__(self, state: dict[str, Any]) -> None: """Control object pickling, receives object state as Dictionary. :param dict state: object state dictionary @@ -136,7 +136,7 @@ def generate_uuid() -> str: return str(uuid.uuid4()) -def dict_to_payload(dictionary: Optional[dict]) -> Optional[List[dict]]: +def dict_to_payload(dictionary: Optional[dict]) -> Optional[list[dict]]: """Convert incoming dictionary to the list of dictionaries. This function transforms the given dictionary of tags/attributes into @@ -153,14 +153,14 @@ def dict_to_payload(dictionary: Optional[dict]) -> Optional[List[dict]]: hidden = my_dictionary.pop("system", None) result = [] for key, value in sorted(my_dictionary.items()): - attribute: Dict[str, Any] = {"key": str(key), "value": str(value)} + attribute: dict[str, Any] = {"key": str(key), "value": str(value)} if hidden is not None: attribute["system"] = hidden result.append(attribute) return result -def gen_attributes(rp_attributes: Iterable[str]) -> List[Dict[str, str]]: +def gen_attributes(rp_attributes: Iterable[str]) -> list[dict[str, str]]: """Generate list of attributes for the API request. Example of input list: @@ -186,7 +186,7 @@ def gen_attributes(rp_attributes: Iterable[str]) -> List[Dict[str, str]]: return attributes -def get_launch_sys_attrs() -> Dict[str, str]: +def get_launch_sys_attrs() -> dict[str, str]: """Generate attributes for the launch containing system information. :return: dict {'os': 'Windows', @@ -201,7 +201,7 @@ def get_launch_sys_attrs() -> Dict[str, str]: } -def get_package_parameters(package_name: str, parameters: List[str] = None) -> List[Optional[str]]: +def get_package_parameters(package_name: str, parameters: list[str] = None) -> list[Optional[str]]: """Get parameters of the given package. :param package_name: Name of the package. @@ -244,7 +244,7 @@ def truncate_attribute_string(text: str) -> str: return text -def verify_value_length(attributes: Optional[Union[List[dict], dict]]) -> Optional[List[dict]]: +def verify_value_length(attributes: Optional[Union[list[dict], dict]]) -> Optional[list[dict]]: """Verify length of the attribute value. The length of the attribute value should have size from '1' to '128'. @@ -312,7 +312,7 @@ def root_uri_join(*uri_parts: str) -> str: return "/" + uri_join(*uri_parts) -def get_function_params(func: Callable, args: tuple, kwargs: Dict[str, Any]) -> Dict[str, Any]: +def get_function_params(func: Callable, args: tuple, kwargs: dict[str, Any]) -> dict[str, Any]: """Extract argument names from the function and combine them with values. :param func: the function to get arg names @@ -383,7 +383,7 @@ def calculate_file_part_size(file: Optional[RPFile]) -> int: return size -def agent_name_version(attributes: Optional[Union[list, dict]] = None) -> Tuple[Optional[str], Optional[str]]: +def agent_name_version(attributes: Optional[Union[list, dict]] = None) -> tuple[Optional[str], Optional[str]]: """Extract Agent name and version from given Launch attributes. :param attributes: Launch attributes as they provided to Start Launch call @@ -429,7 +429,7 @@ def is_binary(iterable: Union[bytes, bytearray, str]) -> bool: return False -def guess_content_type_from_bytes(data: Union[bytes, bytearray, List[int]]) -> str: +def guess_content_type_from_bytes(data: Union[bytes, bytearray, list[int]]) -> str: """Guess content type from bytes. :param data: bytes or bytearray diff --git a/reportportal_client/helpers/markdown_helpers.py b/reportportal_client/helpers/markdown_helpers.py index 88cb4e6..9730003 100644 --- a/reportportal_client/helpers/markdown_helpers.py +++ b/reportportal_client/helpers/markdown_helpers.py @@ -1,11 +1,11 @@ """A set of utility methods for reporting to ReportPortal.""" from itertools import zip_longest -from typing import Any, Dict, List, Optional +from typing import Any, Optional MARKDOWN_MODE = "!!!MARKDOWN_MODE!!!" NEW_LINE = "\n" -ONE_SPACE = "\xA0" +ONE_SPACE = "\xa0" TABLE_INDENT = ONE_SPACE * 4 TABLE_COLUMN_SEPARATOR = "|" TABLE_ROW_SEPARATOR = "-" @@ -36,7 +36,7 @@ def as_code(language: Optional[str], script: Optional[str]) -> str: return as_markdown(f"```{lang}\n{script}\n```") -def calculate_col_sizes(table: List[List[str]]) -> List[int]: +def calculate_col_sizes(table: list[list[str]]) -> list[int]: """Calculate maximum width for each column in the table. :param table: Table data as list of rows @@ -52,7 +52,7 @@ def calculate_col_sizes(table: List[List[str]]) -> List[int]: return [max(len(str(cell)) for cell in col if cell is not None) for col in cols] -def calculate_table_size(col_sizes: List[int]) -> int: +def calculate_table_size(col_sizes: list[int]) -> int: """Calculate total table width including separators and padding. :param col_sizes: List of column widths @@ -66,7 +66,7 @@ def calculate_table_size(col_sizes: List[int]) -> int: return col_table_size -def transpose_table(table: List[List[Any]]) -> List[List[Any]]: +def transpose_table(table: list[list[Any]]) -> list[list[Any]]: """Transpose table rows into columns. :param table: Table data as list of rows @@ -82,7 +82,7 @@ def transpose_table(table: List[List[Any]]) -> List[List[Any]]: return [list(filter(None, col)) for col in transposed] -def adjust_col_sizes(col_sizes: List[int], max_table_size: int) -> List[int]: +def adjust_col_sizes(col_sizes: list[int], max_table_size: int) -> list[int]: """Adjust column sizes to fit maximum table width. :param col_sizes: List of column widths @@ -109,7 +109,7 @@ def adjust_col_sizes(col_sizes: List[int], max_table_size: int) -> List[int]: return [size for size, _ in sorted(cols_by_size, key=lambda x: x[1])] -def format_data_table(table: List[List[str]], max_table_size: int = MAX_TABLE_SIZE) -> str: +def format_data_table(table: list[list[str]], max_table_size: int = MAX_TABLE_SIZE) -> str: """Convert a table represented as List of Lists to a formatted table string. :param table: Table data as list of rows @@ -160,7 +160,7 @@ def format_data_table(table: List[List[str]], max_table_size: int = MAX_TABLE_SI return "\n".join(result) -def format_data_table_dict(table: Dict[str, str]) -> str: +def format_data_table_dict(table: dict[str, str]) -> str: """Convert a table represented as Map to a formatted table string. :param table: Table data as dictionary diff --git a/reportportal_client/logs/__init__.py b/reportportal_client/logs/__init__.py index 56b02c9..abc029c 100644 --- a/reportportal_client/logs/__init__.py +++ b/reportportal_client/logs/__init__.py @@ -16,20 +16,33 @@ import logging import sys import threading +from typing import TYPE_CHECKING, Any, Optional, Union from urllib.parse import urlparse # noinspection PyProtectedMember from reportportal_client._internal.local import current, set_current from reportportal_client.helpers import TYPICAL_MULTIPART_FOOTER_LENGTH, timestamp +if TYPE_CHECKING: + from reportportal_client.client import RP + +LOG_LEVEL_MAPPING: dict[int, str] = { + logging.NOTSET: "TRACE", + logging.DEBUG: "DEBUG", + logging.INFO: "INFO", + logging.WARNING: "WARN", + logging.ERROR: "ERROR", + logging.CRITICAL: "ERROR", +} + MAX_LOG_BATCH_SIZE: int = 20 MAX_LOG_BATCH_PAYLOAD_SIZE: int = int((64 * 1024 * 1024) * 0.98) - TYPICAL_MULTIPART_FOOTER_LENGTH -class RPLogger(logging.getLoggerClass()): +class RPLogger(logging.getLoggerClass()): # type: ignore """RPLogger class for low-level logging in tests.""" - def __init__(self, name, level=0): + def __init__(self, name: str, level: int = 0) -> None: """ Initialize RPLogger instance. @@ -38,7 +51,17 @@ def __init__(self, name, level=0): """ super(RPLogger, self).__init__(name, level=level) - def _log(self, level, msg, args, exc_info=None, extra=None, stack_info=False, attachment=None, **kwargs): + def _log( + self, + level: int, + msg: Any, + args: tuple, + exc_info: Optional[Union[bool, tuple]] = None, + extra: Optional[dict] = None, + stack_info: bool = False, + attachment: Optional[dict] = None, + **kwargs: Any, + ) -> None: """ Low-level logging routine which creates a LogRecord and then calls. @@ -82,24 +105,22 @@ class RPLogHandler(logging.Handler): """RPLogHandler class for logging tests.""" # Map loglevel codes from `logging` module to ReportPortal text names: - _loglevel_map = { - logging.NOTSET: "TRACE", - logging.DEBUG: "DEBUG", - logging.INFO: "INFO", - logging.WARNING: "WARN", - logging.ERROR: "ERROR", - logging.CRITICAL: "ERROR", - } - _sorted_levelnos = sorted(_loglevel_map.keys(), reverse=True) + _loglevel_map: dict[int, str] + _sorted_levelnos: list[int] + filter_client_logs: bool + ignored_record_names: tuple[str, ...] + endpoint: Optional[str] + rp_client: Optional["RP"] def __init__( self, - level=logging.NOTSET, - filter_client_logs=False, - endpoint=None, - ignored_record_names=tuple("reportportal_client"), - rp_client=None, - ): + level: int = logging.NOTSET, + filter_client_logs: bool = False, + endpoint: Optional[str] = None, + ignored_record_names: tuple = tuple("reportportal_client"), + rp_client: Optional["RP"] = None, + custom_levels: Optional[dict[int, str]] = None, + ) -> None: """ Initialize RPLogHandler instance. @@ -112,12 +133,16 @@ def __init__( (with startswith method) """ super(RPLogHandler, self).__init__(level) + self._loglevel_map = LOG_LEVEL_MAPPING.copy() + if custom_levels: + self._loglevel_map.update(custom_levels) + self._sorted_levelnos = sorted(self._loglevel_map.keys(), reverse=True) self.filter_client_logs = filter_client_logs self.ignored_record_names = ignored_record_names self.endpoint = endpoint self.rp_client = rp_client - def filter(self, record): + def filter(self, record: logging.LogRecord) -> bool: """Filter specific records to avoid sending those to RP. :param record: A log record to be filtered @@ -131,23 +156,20 @@ def filter(self, record): if record.name.startswith("urllib3.connectionpool"): # Filter the reportportal_client requests instance # urllib3 usage - hostname = urlparse(self.endpoint).hostname - if hostname: - if hasattr(hostname, "decode") and callable(hostname.decode): - if hostname.decode("utf-8") in self.format(record): - return False - else: - if str(hostname) in self.format(record): + if self.endpoint: + hostname = urlparse(self.endpoint).hostname + if hostname: + if hostname in self.format(record): return False return True - def _get_rp_log_level(self, levelno): + def _get_rp_log_level(self, levelno: int) -> str: return next( (self._loglevel_map[level] for level in self._sorted_levelnos if levelno >= level), self._loglevel_map[logging.NOTSET], ) - def emit(self, record): + def emit(self, record: logging.LogRecord) -> None: """ Emit function. diff --git a/reportportal_client/logs/log_manager.py b/reportportal_client/logs/log_manager.py deleted file mode 100644 index 5589e69..0000000 --- a/reportportal_client/logs/log_manager.py +++ /dev/null @@ -1,141 +0,0 @@ -# Copyright (c) 2022 EPAM Systems -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License - -"""This module contains management functionality for processing logs.""" - -import logging -import queue -import warnings -from threading import Lock - -from reportportal_client import helpers -from reportportal_client.core.rp_requests import HttpRequest, RPFile, RPLogBatch, RPRequestLog -from reportportal_client.core.worker import APIWorker -from reportportal_client.logs import MAX_LOG_BATCH_PAYLOAD_SIZE, MAX_LOG_BATCH_SIZE - -logger = logging.getLogger(__name__) - - -class LogManager: - """Manager of the log items.""" - - def __init__( - self, - rp_url, - session, - api_version, - launch_id, - project_name, - max_entry_number=MAX_LOG_BATCH_SIZE, - verify_ssl=True, - max_payload_size=MAX_LOG_BATCH_PAYLOAD_SIZE, - ): - """Initialize instance attributes. - - :param rp_url: ReportPortal URL - :param session: HTTP Session object - :param api_version: RP API version - :param launch_id: Parent launch UUID - :param project_name: RP project name - :param max_entry_number: The amount of log objects that need to be - gathered before processing - :param verify_ssl: Indicates that it is necessary to verify SSL - certificates within HTTP request - :param max_payload_size: maximum size in bytes of logs that can be - processed in one batch - """ - warnings.warn( - message="`LogManager` class is deprecated since 5.5.0 and will be subject for removing in the" - " next major version.", - category=DeprecationWarning, - stacklevel=2, - ) - self._lock = Lock() - self._batch = [] - self._payload_size = helpers.TYPICAL_MULTIPART_FOOTER_LENGTH - self._worker = None - self.api_version = api_version - self.queue = queue.PriorityQueue() - self.launch_id = launch_id - self.max_entry_number = max_entry_number - self.max_payload_size = max_payload_size - self.project_name = project_name - self.rp_url = rp_url - self.session = session - self.verify_ssl = verify_ssl - - self._log_endpoint = "{rp_url}/api/{version}/{project_name}/log".format( - rp_url=rp_url.rstrip("/"), version=self.api_version, project_name=self.project_name - ) - - def _send_batch(self): - """Send existing batch logs to the worker.""" - batch = RPLogBatch(self._batch) - http_request = HttpRequest( - self.session.post, self._log_endpoint, files=batch.payload, verify_ssl=self.verify_ssl - ) - self._worker.send(http_request) - self._batch = [] - self._payload_size = helpers.TYPICAL_MULTIPART_FOOTER_LENGTH - - def _log_process(self, log_req): - """Process the given log request. - - :param log_req: RPRequestLog object - """ - rq_size = log_req.multipart_size - with self._lock: - if self._payload_size + rq_size >= self.max_payload_size: - if len(self._batch) > 0: - self._send_batch() - self._batch.append(log_req) - self._payload_size += rq_size - if len(self._batch) >= self.max_entry_number: - self._send_batch() - - def log(self, time, message=None, level=None, attachment=None, item_id=None): - """Log message. Can be added to test item in any state. - - :param time: Log time - :param message: Log message - :param level: Log level - :param attachment: Attachments(images,files,etc.) - :param item_id: parent item UUID - """ - if not item_id: - logger.warning("Attempt to log to non-existent item") - return - rp_file = RPFile(**attachment) if attachment else None - rp_log = RPRequestLog(self.launch_id, time, rp_file, item_id, level, message) - self._log_process(rp_log) - - def start(self): - """Create a new instance of the Worker class and start it.""" - if not self._worker: - # the worker might be already created in case of deserialization - self._worker = APIWorker(self.queue) - self._worker.start() - - def stop(self): - """Send last batches to the worker followed by the stop command.""" - if self._worker: - with self._lock: - if self._batch: - self._send_batch() - logger.debug("Waiting for worker {0} to complete" "processing batches.".format(self._worker)) - self._worker.stop() - - def stop_force(self): - """Send stop immediate command to the worker.""" - if self._worker: - self._worker.stop_immediate() diff --git a/reportportal_client/logs/log_manager.pyi b/reportportal_client/logs/log_manager.pyi deleted file mode 100644 index 58e167a..0000000 --- a/reportportal_client/logs/log_manager.pyi +++ /dev/null @@ -1,65 +0,0 @@ -# Copyright (c) 2022 EPAM Systems -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License - -import queue -from logging import Logger -from threading import Lock -from typing import Dict, List, Optional, Text - -from requests import Session - -from reportportal_client.core.rp_requests import RPRequestLog -from reportportal_client.core.worker import APIWorker as APIWorker - -logger: Logger - -class LogManager: - _lock: Lock = ... - _log_endpoint: Text = ... - _batch: List = ... - _payload_size: int = ... - _worker: Optional[APIWorker] = ... - api_version: Text = ... - queue: queue.PriorityQueue = ... - launch_id: Text = ... - max_entry_number: int = ... - project_name: Text = ... - rp_url: Text = ... - session: Session = ... - verify_ssl: bool = ... - max_payload_size: int = ... - - def __init__( - self, - rp_url: Text, - session: Session, - api_version: Text, - launch_id: Text, - project_name: Text, - max_entry_number: int = ..., - verify_ssl: bool = ..., - max_payload_size: int = ..., - ) -> None: ... - def _log_process(self, log_req: RPRequestLog) -> None: ... - def _send_batch(self) -> None: ... - def log( - self, - time: Text, - message: Optional[Text] = ..., - level: Optional[Text] = ..., - attachment: Optional[Dict] = ..., - item_id: Optional[Text] = ..., - ) -> None: ... - def start(self) -> None: ... - def stop(self) -> None: ... - def stop_force(self) -> None: ... diff --git a/reportportal_client/steps/__init__.py b/reportportal_client/steps/__init__.py index 542b5bf..2e4ddad 100644 --- a/reportportal_client/steps/__init__.py +++ b/reportportal_client/steps/__init__.py @@ -43,7 +43,7 @@ def test_my_nested_step(): """ from functools import wraps -from typing import Any, Callable, Dict, Optional, Type, TypeVar, Union +from typing import Any, Callable, Optional, TypeVar, Union import reportportal_client as rp @@ -88,7 +88,7 @@ def __init__(self, rp_client: "rp.RP"): self.client = rp_client def start_nested_step( - self, name: str, start_time: str, parameters: Optional[Dict[str, Any]] = None, **_: Dict[str, Any] + self, name: str, start_time: str, parameters: Optional[dict[str, Any]] = None, **_: dict[str, Any] ) -> Union[Optional[str], Task[Optional[str]]]: """Start Nested Step on ReportPortal. @@ -104,7 +104,7 @@ def start_nested_step( ) def finish_nested_step( - self, item_id: str, end_time: str, status: str = None, **_: Dict[str, Any] + self, item_id: str, end_time: str, status: str = None, **_: dict[str, Any] ) -> Union[Optional[str], Task[Optional[str]]]: """Finish a Nested Step on ReportPortal. @@ -119,12 +119,12 @@ class Step(Callable[[_Param], _Return]): """Step context handling class.""" name: str - params: Dict + params: dict status: str client: Optional["rp.RP"] __item_id: Union[Optional[str], Task[Optional[str]]] - def __init__(self, name: str, params: Dict, status: str, rp_client: Optional["rp.RP"]) -> None: + def __init__(self, name: str, params: dict, status: str, rp_client: Optional["rp.RP"]) -> None: """Initialize required attributes. :param name: Nested Step name @@ -153,7 +153,7 @@ def __enter__(self) -> None: param_str = "Parameters: " + "; ".join(param_list) rp_client.log(timestamp(), param_str, level="INFO", item_id=self.__item_id) - def __exit__(self, exc_type: Type[BaseException], exc_val, exc_tb) -> None: + def __exit__(self, exc_type: type[BaseException], exc_val, exc_tb) -> None: """Exit the runtime context related to this object.""" # Cannot call local.current() early since it will be initialized before client put something in there rp_client = self.client or current() @@ -188,7 +188,7 @@ def wrapper(*my_args, **my_kwargs): def step( name_source: Union[Callable[[_Param], _Return], str], - params: Optional[Dict] = None, + params: Optional[dict] = None, status: str = "PASSED", rp_client: Optional["rp.RP"] = None, ) -> Callable[[_Param], _Return]: diff --git a/requirements-dev.txt b/requirements-dev.txt index b72c70d..1b16115 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -3,3 +3,5 @@ pytest-cov pytest-asyncio black isort +types-requests +mypy diff --git a/requirements.txt b/requirements.txt index 1012378..1eec00f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ aenum typing-extensions>=4.13.2 -requests>=2.32.3 -aiohttp>=3.10.11 -certifi>=2024.8.30 +requests>=2.32.4 +aiohttp>=3.11.18 +certifi>=2025.11.12 diff --git a/setup.py b/setup.py index e7967e3..9a8fd8c 100644 --- a/setup.py +++ b/setup.py @@ -4,7 +4,7 @@ from setuptools import find_packages, setup -__version__ = "5.6.7" +__version__ = "5.7.0" TYPE_STUBS = ["*.pyi"] @@ -39,12 +39,12 @@ def read_file(fname): license="Apache-2.0", keywords=["testing", "reporting", "reportportal", "client"], classifiers=[ - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", ], install_requires=read_file("requirements.txt").splitlines(), ) diff --git a/tests/logs/test_log_manager.py b/tests/logs/test_log_manager.py deleted file mode 100644 index fa3f6d8..0000000 --- a/tests/logs/test_log_manager.py +++ /dev/null @@ -1,187 +0,0 @@ -# Copyright (c) 2022 EPAM Systems -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License - -import json -import os -from unittest import mock - -from reportportal_client import helpers -from reportportal_client.core.rp_requests import HttpRequest -from reportportal_client.logs import MAX_LOG_BATCH_PAYLOAD_SIZE -from reportportal_client.logs.log_manager import LogManager - -RP_URL = "http://docker.local:8080" -API_VERSION = "v2" -TEST_LAUNCH_ID = "test_launch_id" -TEST_ITEM_ID = "test_item_id" -PROJECT_NAME = "test_project" -TEST_MASSAGE = "test_message" -TEST_LEVEL = "DEBUG" -TEST_BATCH_SIZE = 5 -TEST_ATTACHMENT_NAME = "test_file.bin" -TEST_ATTACHMENT_TYPE = "application/zip" - - -# noinspection PyUnresolvedReferences -def test_log_batch_send_by_length(): - session = mock.Mock() - log_manager = LogManager( - RP_URL, session, API_VERSION, TEST_LAUNCH_ID, PROJECT_NAME, max_entry_number=TEST_BATCH_SIZE, verify_ssl=False - ) - log_manager._worker = mock.Mock() - - for _ in range(TEST_BATCH_SIZE): - log_manager.log(helpers.timestamp(), TEST_MASSAGE, TEST_LEVEL, item_id=TEST_ITEM_ID) - - assert log_manager._worker.send.call_count == 1 - batch = log_manager._worker.send.call_args[0][0] - assert isinstance(batch, HttpRequest) - assert len(json.loads(batch.files[0][1][1])) == 5 - assert "post" in session._mock_children - assert len(log_manager._batch) == 0 - assert log_manager._payload_size == helpers.TYPICAL_MULTIPART_FOOTER_LENGTH - - -# noinspection PyUnresolvedReferences -def test_log_batch_send_url_format(): - session = mock.Mock() - log_manager = LogManager( - RP_URL + "/", - session, - API_VERSION, - TEST_LAUNCH_ID, - PROJECT_NAME, - max_entry_number=TEST_BATCH_SIZE, - verify_ssl=False, - ) - log_manager._worker = mock.Mock() - - for _ in range(TEST_BATCH_SIZE): - log_manager.log(helpers.timestamp(), TEST_MASSAGE, TEST_LEVEL, item_id=TEST_ITEM_ID) - - assert log_manager._worker.send.call_count == 1 - batch = log_manager._worker.send.call_args[0][0] - assert isinstance(batch, HttpRequest) - assert batch.url == RP_URL + "/api/" + API_VERSION + "/" + PROJECT_NAME + "/log" - - -# noinspection PyUnresolvedReferences -def test_log_batch_not_send_by_length(): - session = mock.Mock() - log_manager = LogManager( - RP_URL, session, API_VERSION, TEST_LAUNCH_ID, PROJECT_NAME, max_entry_number=TEST_BATCH_SIZE, verify_ssl=False - ) - log_manager._worker = mock.Mock() - - for _ in range(TEST_BATCH_SIZE - 1): - log_manager.log(helpers.timestamp(), TEST_MASSAGE, TEST_LEVEL, item_id=TEST_ITEM_ID) - - assert log_manager._worker.send.call_count == 0 - assert "post" not in session._mock_children - assert len(log_manager._batch) == 4 - assert log_manager._payload_size > helpers.TYPICAL_MULTIPART_FOOTER_LENGTH - - -# noinspection PyUnresolvedReferences -def test_log_batch_send_by_stop(): - session = mock.Mock() - log_manager = LogManager( - RP_URL, session, API_VERSION, TEST_LAUNCH_ID, PROJECT_NAME, max_entry_number=TEST_BATCH_SIZE, verify_ssl=False - ) - log_manager._worker = mock.Mock() - - for _ in range(TEST_BATCH_SIZE - 1): - log_manager.log(helpers.timestamp(), TEST_MASSAGE, TEST_LEVEL, item_id=TEST_ITEM_ID) - log_manager.stop() - - assert log_manager._worker.send.call_count == 1 - batch = log_manager._worker.send.call_args[0][0] - assert isinstance(batch, HttpRequest) - assert len(json.loads(batch.files[0][1][1])) == 4 - assert "post" in session._mock_children - assert len(log_manager._batch) == 0 - assert log_manager._payload_size == helpers.TYPICAL_MULTIPART_FOOTER_LENGTH - - -# noinspection PyUnresolvedReferences -def test_log_batch_not_send_by_size(): - session = mock.Mock() - log_manager = LogManager( - RP_URL, session, API_VERSION, TEST_LAUNCH_ID, PROJECT_NAME, max_entry_number=TEST_BATCH_SIZE, verify_ssl=False - ) - log_manager._worker = mock.Mock() - - headers_size = helpers.TYPICAL_MULTIPART_FOOTER_LENGTH - len( - helpers.TYPICAL_FILE_PART_HEADER.format(TEST_ATTACHMENT_NAME, TEST_ATTACHMENT_TYPE) - ) - attachment_size = MAX_LOG_BATCH_PAYLOAD_SIZE - headers_size - 1024 - random_byte_array = bytearray(os.urandom(attachment_size)) - attachment = {"name": TEST_ATTACHMENT_NAME, "content": random_byte_array, "content_type": TEST_ATTACHMENT_TYPE} - - log_manager.log(helpers.timestamp(), TEST_MASSAGE, TEST_LEVEL, item_id=TEST_ITEM_ID, attachment=attachment) - log_manager.log(helpers.timestamp(), TEST_MASSAGE, TEST_LEVEL, item_id=TEST_ITEM_ID) - - assert log_manager._worker.send.call_count == 0 - assert "post" not in session._mock_children - assert len(log_manager._batch) == 2 - assert log_manager._payload_size > MAX_LOG_BATCH_PAYLOAD_SIZE - 1024 - assert log_manager._payload_size < MAX_LOG_BATCH_PAYLOAD_SIZE - - -# noinspection PyUnresolvedReferences -def test_log_batch_send_by_size(): - session = mock.Mock() - log_manager = LogManager( - RP_URL, session, API_VERSION, TEST_LAUNCH_ID, PROJECT_NAME, max_entry_number=TEST_BATCH_SIZE, verify_ssl=False - ) - log_manager._worker = mock.Mock() - - random_byte_array = bytearray(os.urandom(MAX_LOG_BATCH_PAYLOAD_SIZE)) - attachment = {"name": TEST_ATTACHMENT_NAME, "content": random_byte_array, "content_type": TEST_ATTACHMENT_TYPE} - - log_manager.log(helpers.timestamp(), TEST_MASSAGE, TEST_LEVEL, item_id=TEST_ITEM_ID, attachment=attachment) - log_manager.log(helpers.timestamp(), TEST_MASSAGE, TEST_LEVEL, item_id=TEST_ITEM_ID) - - assert log_manager._worker.send.call_count == 1 - batch = log_manager._worker.send.call_args[0][0] - assert isinstance(batch, HttpRequest) - assert len(json.loads(batch.files[0][1][1])) == 1 - assert "post" in session._mock_children - assert len(log_manager._batch) == 1 - assert log_manager._payload_size < helpers.TYPICAL_MULTIPART_FOOTER_LENGTH + 1024 - - -# noinspection PyUnresolvedReferences -def test_log_batch_triggers_previous_request_to_send(): - session = mock.Mock() - log_manager = LogManager( - RP_URL, session, API_VERSION, TEST_LAUNCH_ID, PROJECT_NAME, max_entry_number=TEST_BATCH_SIZE, verify_ssl=False - ) - log_manager._worker = mock.Mock() - - random_byte_array = bytearray(os.urandom(MAX_LOG_BATCH_PAYLOAD_SIZE)) - attachment = {"name": TEST_ATTACHMENT_NAME, "content": random_byte_array, "content_type": TEST_ATTACHMENT_TYPE} - - log_manager.log(helpers.timestamp(), TEST_MASSAGE, TEST_LEVEL, item_id=TEST_ITEM_ID) - payload_size = log_manager._payload_size - assert payload_size < helpers.TYPICAL_MULTIPART_FOOTER_LENGTH + 1024 - - log_manager.log(helpers.timestamp(), TEST_MASSAGE, TEST_LEVEL, item_id=TEST_ITEM_ID, attachment=attachment) - - assert log_manager._worker.send.call_count == 1 - batch = log_manager._worker.send.call_args[0][0] - assert isinstance(batch, HttpRequest) - assert len(json.loads(batch.files[0][1][1])) == 1 - assert "post" in session._mock_children - assert len(log_manager._batch) == 1 - assert log_manager._payload_size > MAX_LOG_BATCH_PAYLOAD_SIZE diff --git a/tests/logs/test_rp_log_handler.py b/tests/logs/test_rp_log_handler.py index 6508553..9ab7fd1 100644 --- a/tests/logs/test_rp_log_handler.py +++ b/tests/logs/test_rp_log_handler.py @@ -76,7 +76,7 @@ def test_emit_simple(mocked_handle): @mock.patch("reportportal_client.logs.logging.Logger.handle") -def test_emit_custom_level(mocked_handle): +def test_emit_int_warn_level(mocked_handle): test_message = "test message" RPLogger("test_logger").log(30, test_message) record = mocked_handle.call_args[0][0] @@ -91,6 +91,60 @@ def test_emit_custom_level(mocked_handle): assert call_kwargs["level"] == "WARN" +@mock.patch("reportportal_client.logs.logging.Logger.handle") +def test_emit_custom_int_warn_level(mocked_handle): + test_message = "test message" + RPLogger("test_logger").log(35, test_message) + record = mocked_handle.call_args[0][0] + + mock_client = mock.Mock() + set_current(mock_client) + + log_handler = RPLogHandler() + log_handler.emit(record) + assert mock_client.log.call_count == 1 + call_args, call_kwargs = mock_client.log.call_args + assert call_kwargs["level"] == "WARN" + + +@mock.patch("reportportal_client.logs.logging.Logger.handle") +def test_emit_custom_int_custom_level(mocked_handle): + test_message = "test message" + custom_level_id = 35 + custom_level = "BIG_WARN" + RPLogger("test_logger").log(custom_level_id, test_message) + record = mocked_handle.call_args[0][0] + + mock_client = mock.Mock() + set_current(mock_client) + + custom_levels = {custom_level_id: custom_level} + log_handler = RPLogHandler(custom_levels=custom_levels) + log_handler.emit(record) + assert mock_client.log.call_count == 1 + call_args, call_kwargs = mock_client.log.call_args + assert call_kwargs["level"] == custom_level + + +@mock.patch("reportportal_client.logs.logging.Logger.handle") +def test_emit_custom_int_custom_level_override(mocked_handle): + test_message = "test message" + custom_level_id = 30 + custom_level = "BIG_WARN" + RPLogger("test_logger").log(custom_level_id, test_message) + record = mocked_handle.call_args[0][0] + + mock_client = mock.Mock() + set_current(mock_client) + + custom_levels = {custom_level_id: custom_level} + log_handler = RPLogHandler(custom_levels=custom_levels) + log_handler.emit(record) + assert mock_client.log.call_count == 1 + call_args, call_kwargs = mock_client.log.call_args + assert call_kwargs["level"] == custom_level + + @mock.patch("reportportal_client.logs.logging.Logger.handle") def test_emit_null_client_no_error(mocked_handle): test_message = "test message" diff --git a/tox.ini b/tox.ini index 543dc7a..7304908 100644 --- a/tox.ini +++ b/tox.ini @@ -2,12 +2,12 @@ isolated_build = True envlist = pep - py38 py39 py310 py311 py312 py313 + py314 [testenv] deps = @@ -26,9 +26,9 @@ commands = pre-commit run --all-files --show-diff-on-failure [gh-actions] python = - 3.8: py38 3.9: py39 3.10: pep, py310 3.11: py311 3.12: py312 3.13: py313 + 3.14: py314