Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [ '3.8', '3.9', '3.10', '3.11', '3.12', '3.13' ]
python-version: [ '3.9', '3.10', '3.11', '3.12', '3.13', '3.14' ]
steps:
- name: Checkout repository
uses: actions/checkout@v4
Expand Down
8 changes: 8 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,14 @@
# Changelog

## [Unreleased]
### Added
- Official `Python 3.14` support, by @HardNorth
- Custom log level support in `RPLogHandler` class, by @HardNorth
### Removed
- `Python 3.7` support, by @HardNorth
- Deprecated `log_manager.py` module, by @HardNorth

## [5.6.7]
### Changed
- `log_batch_payload_size` was renamed to `log_batch_payload_limit` as it was originally supposed, by @HardNorth

Expand Down
14 changes: 6 additions & 8 deletions reportportal_client/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,15 +15,15 @@

import sys
import warnings
from typing import Optional, Tuple, TypedDict, Union
from typing import Optional, TypedDict, Union

# noinspection PyUnreachableCode
if sys.version_info >= (3, 11):
from typing import Unpack
else:
from typing_extensions import Unpack

import aenum
import aenum # type: ignore

# noinspection PyProtectedMember
from reportportal_client._internal.local import current, set_current
Expand All @@ -43,9 +43,6 @@ class ClientType(aenum.Enum):


class _ClientOptions(TypedDict, total=False):
client_type: ClientType
endpoint: str
project: str
api_key: Optional[str]
# OAuth 2.0 parameters
oauth_uri: Optional[str]
Expand All @@ -60,7 +57,7 @@ class _ClientOptions(TypedDict, total=False):
verify_ssl: Union[bool, str]
retries: int
max_pool_size: int
http_timeout: Union[float, Tuple[float, float]]
http_timeout: Union[float, tuple[float, float]]
mode: str
launch_uuid_print: bool
print_output: OutputType
Expand Down Expand Up @@ -122,15 +119,16 @@ def create_client(
:return: ReportPortal Client instance.
"""
my_kwargs = kwargs.copy()
if "log_batch_payload_size" in my_kwargs:
if "log_batch_payload_size" in my_kwargs: # type: ignore
warnings.warn(
message="Your agent is using `log_batch_payload_size` property which was introduced by mistake. "
"The real property name is `log_batch_payload_limit`. Please consider Agent version update.",
category=DeprecationWarning,
stacklevel=2,
)
payload_size = my_kwargs.pop("log_batch_payload_size") # type: ignore
if "log_batch_payload_limit" not in my_kwargs:
my_kwargs["log_batch_payload_limit"] = my_kwargs.pop("log_batch_payload_size")
my_kwargs["log_batch_payload_limit"] = payload_size

if client_type is ClientType.SYNC:
return RPClient(endpoint, project, **my_kwargs)
Expand Down
28 changes: 16 additions & 12 deletions reportportal_client/_internal/aio/http.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,9 @@
import asyncio
import sys
from types import TracebackType
from typing import Any, Callable, Coroutine, Optional, Type, Union
from typing import Any, Callable, Coroutine, Optional, Union

from aenum import Enum
from aenum import Enum # type: ignore
from aiohttp import ClientResponse, ClientResponseError
from aiohttp import ClientSession as AioHttpClientSession
from aiohttp import ServerConnectionError
Expand Down Expand Up @@ -77,31 +77,33 @@ def __init__(
self.__retry_number = max_retry_number
self.__retry_delay = base_retry_delay

async def __nothing(self):
async def __nothing(self) -> None:
pass

def __sleep(self, retry_num: int, retry_factor: int) -> Coroutine:
if retry_num > 0: # don't wait at the first retry attempt
def __sleep(self, retry_num: int, retry_factor: Optional[int]) -> Coroutine:
if retry_num > 0 and retry_factor is not None: # don't wait at the first retry attempt
delay = (((retry_factor * self.__retry_delay) * 1000) ** retry_num) / 1000
return asyncio.sleep(delay)
else:
return self.__nothing()

async def __request(self, method: Callable, url, **kwargs: Any) -> ClientResponse:
async def __request(
self, method: Callable[..., Coroutine[Any, Any, ClientResponse]], url: str, **kwargs: Any
) -> ClientResponse:
"""Make a request and retry if necessary.

The method retries requests depending on error class and retry number. For no-retry errors, such as
400 Bad Request it just returns result, for cases where it's reasonable to retry it does it in
exponential manner.
"""
result = None
result: Optional[ClientResponse] = None
exceptions = []

for i in range(self.__retry_number + 1): # add one for the first attempt, which is not a retry
retry_factor = None
retry_factor: Optional[int] = None
if result is not None:
# Release previous result to return connection to pool
await result.release()
result.release()
try:
result = await method(url, **kwargs)
except Exception as exc:
Expand Down Expand Up @@ -136,6 +138,8 @@ async def __request(self, method: Callable, url, **kwargs: Any) -> ClientRespons
raise exceptions[-1]
else:
raise exceptions[0]
if result is None:
raise IOError("Request failed without exceptions")
return result

def get(self, url: str, *, allow_redirects: bool = True, **kwargs: Any) -> Coroutine[Any, Any, ClientResponse]:
Expand All @@ -150,7 +154,7 @@ def put(self, url: str, *, data: Any = None, **kwargs: Any) -> Coroutine[Any, An
"""Perform HTTP PUT request."""
return self.__request(self._client.put, url, data=data, **kwargs)

def close(self) -> Coroutine:
def close(self) -> Coroutine[None, None, None]:
"""Gracefully close internal aiohttp.ClientSession class instance."""
return self._client.close()

Expand All @@ -160,7 +164,7 @@ async def __aenter__(self) -> "RetryingClientSession":

async def __aexit__(
self,
exc_type: Optional[Type[BaseException]],
exc_type: Optional[type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> None:
Expand Down Expand Up @@ -241,7 +245,7 @@ async def __aenter__(self) -> "ClientSession":

async def __aexit__(
self,
exc_type: Optional[Type[BaseException]],
exc_type: Optional[type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> None:
Expand Down
12 changes: 6 additions & 6 deletions reportportal_client/_internal/aio/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
import sys
import time
from asyncio import Future
from typing import Any, Awaitable, Coroutine, Generator, Generic, List, Optional, TypeVar, Union
from typing import Any, Awaitable, Coroutine, Generator, Generic, Optional, TypeVar, Union

from reportportal_client.aio.tasks import BlockingOperationError, Task

Expand Down Expand Up @@ -142,7 +142,7 @@ def __call__(
class TriggerTaskBatcher(Generic[_T]):
"""Batching class which compile its batches by object number or by passed time."""

__task_list: List[_T]
__task_list: list[_T]
__last_run_time: float
__trigger_num: int
__trigger_interval: float
Expand Down Expand Up @@ -170,7 +170,7 @@ def __ready_to_run(self) -> bool:
return True
return False

def append(self, value: _T) -> Optional[List[_T]]:
def append(self, value: _T) -> Optional[list[_T]]:
"""Add an object to internal batch and return the batch if it's triggered.

:param value: an object to add to the batch
Expand All @@ -184,7 +184,7 @@ def append(self, value: _T) -> Optional[List[_T]]:
self.__task_list = []
return tasks

def flush(self) -> Optional[List[_T]]:
def flush(self) -> Optional[list[_T]]:
"""Immediately return everything what's left in the internal batch.

:return: a batch or None
Expand All @@ -200,7 +200,7 @@ def flush(self) -> Optional[List[_T]]:
class BackgroundTaskList(Generic[_T]):
"""Task list class which collects Tasks into internal batch and removes when they complete."""

__task_list: List[_T]
__task_list: list[_T]

def __init__(self):
"""Initialize an instance of the Batcher."""
Expand All @@ -222,7 +222,7 @@ def append(self, value: _T) -> None:
self.__remove_finished()
self.__task_list.append(value)

def flush(self) -> Optional[List[_T]]:
def flush(self) -> Optional[list[_T]]:
"""Immediately return everything what's left unfinished in the internal batch.

:return: a batch or None
Expand Down
4 changes: 2 additions & 2 deletions reportportal_client/_internal/http.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
"""This module designed to help with synchronous HTTP request/response handling."""

from types import TracebackType
from typing import Any, Callable, Optional, Type, Union
from typing import Any, Callable, Optional, Union

from requests import Response, Session
from requests.adapters import BaseAdapter
Expand Down Expand Up @@ -104,7 +104,7 @@ def __enter__(self) -> "ClientSession":

def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_type: Optional[type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> None:
Expand Down
16 changes: 8 additions & 8 deletions reportportal_client/_internal/logs/batcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@

import logging
import threading
from typing import Any, Dict, Generic, List, Optional, TypeVar
from typing import Any, Generic, Optional, TypeVar

from reportportal_client.core.rp_requests import AsyncRPRequestLog, RPRequestLog
from reportportal_client.logs import MAX_LOG_BATCH_PAYLOAD_SIZE, MAX_LOG_BATCH_SIZE
Expand All @@ -35,7 +35,7 @@ class LogBatcher(Generic[T_co]):
entry_num: int
payload_limit: int
_lock: threading.Lock
_batch: List[T_co]
_batch: list[T_co]
_payload_size: int

def __init__(self, entry_num=MAX_LOG_BATCH_SIZE, payload_limit=MAX_LOG_BATCH_PAYLOAD_SIZE) -> None:
Expand All @@ -50,7 +50,7 @@ def __init__(self, entry_num=MAX_LOG_BATCH_SIZE, payload_limit=MAX_LOG_BATCH_PAY
self._batch = []
self._payload_size = 0

def _append(self, size: int, log_req: RPRequestLog) -> Optional[List[RPRequestLog]]:
def _append(self, size: int, log_req: RPRequestLog) -> Optional[list[RPRequestLog]]:
with self._lock:
if self._payload_size + size >= self.payload_limit:
if len(self._batch) > 0:
Expand All @@ -68,23 +68,23 @@ def _append(self, size: int, log_req: RPRequestLog) -> Optional[List[RPRequestLo
self._payload_size = 0
return batch

def append(self, log_req: RPRequestLog) -> Optional[List[RPRequestLog]]:
def append(self, log_req: RPRequestLog) -> Optional[list[RPRequestLog]]:
"""Add a log request object to internal batch and return the batch if it's full.

:param log_req: log request object
:return: a batch or None
"""
return self._append(log_req.multipart_size, log_req)

async def append_async(self, log_req: AsyncRPRequestLog) -> Optional[List[AsyncRPRequestLog]]:
async def append_async(self, log_req: AsyncRPRequestLog) -> Optional[list[AsyncRPRequestLog]]:
"""Add a log request object to internal batch and return the batch if it's full.

:param log_req: log request object
:return: a batch or None
"""
return self._append(await log_req.multipart_size, log_req)

def flush(self) -> Optional[List[T_co]]:
def flush(self) -> Optional[list[T_co]]:
"""Immediately return everything what's left in the internal batch.

:return: a batch or None
Expand All @@ -99,7 +99,7 @@ def flush(self) -> Optional[List[T_co]]:
self._payload_size = 0
return batch

def __getstate__(self) -> Dict[str, Any]:
def __getstate__(self) -> dict[str, Any]:
"""Control object pickling and return object fields as Dictionary.

:return: object state dictionary
Expand All @@ -110,7 +110,7 @@ def __getstate__(self) -> Dict[str, Any]:
del state["_lock"]
return state

def __setstate__(self, state: Dict[str, Any]) -> None:
def __setstate__(self, state: dict[str, Any]) -> None:
"""Control object pickling, receives object state as Dictionary.

:param dict state: object state dictionary
Expand Down
17 changes: 9 additions & 8 deletions reportportal_client/_internal/services/client_id.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
import io
import logging
import os
from typing import Iterable, Optional
from uuid import uuid4

from .constants import CLIENT_ID_PROPERTY, RP_FOLDER_PATH, RP_PROPERTIES_FILE_PATH
Expand All @@ -32,35 +33,35 @@ def __preprocess_file(self, fp):
content = "[" + self.DEFAULT_SECTION + "]\n" + fp.read()
return io.StringIO(content)

def read(self, filenames, encoding=None):
def read_file(self, filenames: Iterable[str], source: Optional[str] = None) -> None:
if isinstance(filenames, str):
filenames = [filenames]
for filename in filenames:
with open(filename, "r") as fp:
preprocessed_fp = self.__preprocess_file(fp)
self.read_file(preprocessed_fp, filename)
super().read_file(preprocessed_fp, filename)

def write(self, fp, space_around_delimiters=True):
def write(self, fp, space_around_delimiters: bool = True) -> None:
for key, value in self.items(self.DEFAULT_SECTION):
delimiter = " = " if space_around_delimiters else "="
fp.write("{}{}{}\n".format(key, delimiter, value))


def __read_config():
def __read_config() -> configparser.ConfigParser:
config = __NoSectionConfigParser()
if os.path.exists(RP_PROPERTIES_FILE_PATH):
config.read(RP_PROPERTIES_FILE_PATH)
config.read_file(RP_PROPERTIES_FILE_PATH)
return config


def _read_client_id():
def _read_client_id() -> Optional[str]:
config = __read_config()
if config.has_option(__NoSectionConfigParser.DEFAULT_SECTION, CLIENT_ID_PROPERTY):
return config.get(__NoSectionConfigParser.DEFAULT_SECTION, CLIENT_ID_PROPERTY)
return None


def _store_client_id(client_id):
def _store_client_id(client_id: str) -> None:
config = __read_config()
if not os.path.exists(RP_FOLDER_PATH):
os.makedirs(RP_FOLDER_PATH)
Expand All @@ -69,7 +70,7 @@ def _store_client_id(client_id):
config.write(fp)


def get_client_id():
def get_client_id() -> str:
"""Return unique client ID of the instance, generate new if not exists."""
client_id = None
try:
Expand Down
18 changes: 0 additions & 18 deletions reportportal_client/_internal/services/client_id.pyi

This file was deleted.

Loading