diff --git a/Dockerfile--std-all.tmpl b/Dockerfile--std-all.tmpl index c41c5a06..d19f52a6 100644 --- a/Dockerfile--std-all.tmpl +++ b/Dockerfile--std-all.tmpl @@ -4,11 +4,6 @@ ARG PYTHON_VERSION # --------------------------------------------- base1 FROM postgres:${PG_VERSION}-alpine as base1 -# --------------------------------------------- base2_with_python-2 -FROM base1 as base2_with_python-2 -RUN apk add --no-cache curl python2 python2-dev build-base musl-dev linux-headers py-virtualenv py-pip -ENV PYTHON_VERSION=2 - # --------------------------------------------- base2_with_python-3 FROM base1 as base2_with_python-3 RUN apk add --no-cache curl python3 python3-dev build-base musl-dev linux-headers py-virtualenv diff --git a/Dockerfile--std.tmpl b/Dockerfile--std.tmpl index 91886ede..67aa30b4 100644 --- a/Dockerfile--std.tmpl +++ b/Dockerfile--std.tmpl @@ -4,11 +4,6 @@ ARG PYTHON_VERSION # --------------------------------------------- base1 FROM postgres:${PG_VERSION}-alpine as base1 -# --------------------------------------------- base2_with_python-2 -FROM base1 as base2_with_python-2 -RUN apk add --no-cache curl python2 python2-dev build-base musl-dev linux-headers py-virtualenv py-pip -ENV PYTHON_VERSION=2 - # --------------------------------------------- base2_with_python-3 FROM base1 as base2_with_python-3 RUN apk add --no-cache curl python3 python3-dev build-base musl-dev linux-headers py-virtualenv diff --git a/README.md b/README.md index a3b854f8..defbc8b3 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,7 @@ # testgres -PostgreSQL testing utility. Both Python 2.7 and 3.3+ are supported. +PostgreSQL testing utility. Python 3.8+ is supported. ## Installation diff --git a/__init__.py b/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/setup.py b/setup.py index 2c44b18f..0b209181 100755 --- a/setup.py +++ b/setup.py @@ -29,7 +29,7 @@ setup( version='1.11.0', name='testgres', - packages=['testgres', 'testgres.operations'], + packages=['testgres', 'testgres.operations', 'testgres.impl'], description='Testing utility for PostgreSQL and its extensions', url='https://github.com/postgrespro/testgres', long_description=readme, diff --git a/testgres/backup.py b/testgres/backup.py index 388697b7..857c46d4 100644 --- a/testgres/backup.py +++ b/testgres/backup.py @@ -184,14 +184,19 @@ def spawn_replica(self, name=None, destroy=True, slot=None): """ # Build a new PostgresNode - with clean_on_error(self.spawn_primary(name=name, - destroy=destroy)) as node: + node = self.spawn_primary(name=name, destroy=destroy) + assert node is not None + try: # Assign it a master and a recovery file (private magic) node._assign_master(self.original_node) node._create_recovery_conf(username=self.username, slot=slot) + except: # noqa: E722 + # TODO: Pass 'final=True' ? + node.cleanup(release_resources=True) + raise - return node + return node def cleanup(self): """ diff --git a/testgres/cache.py b/testgres/cache.py index 3ac63326..499cce91 100644 --- a/testgres/cache.py +++ b/testgres/cache.py @@ -22,12 +22,16 @@ from .operations.os_ops import OsOperations -def cached_initdb(data_dir, logfile=None, params=None, os_ops: OsOperations = LocalOperations(), bin_path=None, cached=True): +def cached_initdb(data_dir, logfile=None, params=None, os_ops: OsOperations = None, bin_path=None, cached=True): """ Perform initdb or use cached node files. """ - assert os_ops is not None + assert os_ops is None or isinstance(os_ops, OsOperations) + + if os_ops is None: + os_ops = LocalOperations.get_single_instance() + assert isinstance(os_ops, OsOperations) def make_utility_path(name): diff --git a/testgres/config.py b/testgres/config.py index 67d467d3..55d52426 100644 --- a/testgres/config.py +++ b/testgres/config.py @@ -50,8 +50,9 @@ class GlobalConfig(object): _cached_initdb_dir = None """ underlying class attribute for cached_initdb_dir property """ - os_ops = LocalOperations() + os_ops = LocalOperations.get_single_instance() """ OsOperation object that allows work on remote host """ + @property def cached_initdb_dir(self): """ path to a temp directory for cached initdb. """ diff --git a/testgres/impl/port_manager__generic.py b/testgres/impl/port_manager__generic.py new file mode 100755 index 00000000..a51af2bd --- /dev/null +++ b/testgres/impl/port_manager__generic.py @@ -0,0 +1,64 @@ +from ..operations.os_ops import OsOperations + +from ..port_manager import PortManager +from ..exceptions import PortForException + +import threading +import random +import typing + + +class PortManager__Generic(PortManager): + _os_ops: OsOperations + _guard: object + # TODO: is there better to use bitmap fot _available_ports? + _available_ports: typing.Set[int] + _reserved_ports: typing.Set[int] + + def __init__(self, os_ops: OsOperations): + assert os_ops is not None + assert isinstance(os_ops, OsOperations) + self._os_ops = os_ops + self._guard = threading.Lock() + self._available_ports: typing.Set[int] = set(range(1024, 65535)) + self._reserved_ports: typing.Set[int] = set() + + def reserve_port(self) -> int: + assert self._guard is not None + assert type(self._available_ports) == set # noqa: E721t + assert type(self._reserved_ports) == set # noqa: E721 + + with self._guard: + t = tuple(self._available_ports) + assert len(t) == len(self._available_ports) + sampled_ports = random.sample(t, min(len(t), 100)) + t = None + + for port in sampled_ports: + assert not (port in self._reserved_ports) + assert port in self._available_ports + + if not self._os_ops.is_port_free(port): + continue + + self._reserved_ports.add(port) + self._available_ports.discard(port) + assert port in self._reserved_ports + assert not (port in self._available_ports) + return port + + raise PortForException("Can't select a port.") + + def release_port(self, number: int) -> None: + assert type(number) == int # noqa: E721 + + assert self._guard is not None + assert type(self._reserved_ports) == set # noqa: E721 + + with self._guard: + assert number in self._reserved_ports + assert not (number in self._available_ports) + self._available_ports.add(number) + self._reserved_ports.discard(number) + assert not (number in self._reserved_ports) + assert number in self._available_ports diff --git a/testgres/impl/port_manager__this_host.py b/testgres/impl/port_manager__this_host.py new file mode 100755 index 00000000..0d56f356 --- /dev/null +++ b/testgres/impl/port_manager__this_host.py @@ -0,0 +1,33 @@ +from ..port_manager import PortManager + +from .. import utils + +import threading + + +class PortManager__ThisHost(PortManager): + sm_single_instance: PortManager = None + sm_single_instance_guard = threading.Lock() + + @staticmethod + def get_single_instance() -> PortManager: + assert __class__ == PortManager__ThisHost + assert __class__.sm_single_instance_guard is not None + + if __class__.sm_single_instance is not None: + assert type(__class__.sm_single_instance) == __class__ # noqa: E721 + return __class__.sm_single_instance + + with __class__.sm_single_instance_guard: + if __class__.sm_single_instance is None: + __class__.sm_single_instance = __class__() + assert __class__.sm_single_instance is not None + assert type(__class__.sm_single_instance) == __class__ # noqa: E721 + return __class__.sm_single_instance + + def reserve_port(self) -> int: + return utils.reserve_port() + + def release_port(self, number: int) -> None: + assert type(number) == int # noqa: E721 + return utils.release_port(number) diff --git a/testgres/node.py b/testgres/node.py index 3a294044..9a2f4e77 100644 --- a/testgres/node.py +++ b/testgres/node.py @@ -84,8 +84,8 @@ InvalidOperationException from .port_manager import PortManager -from .port_manager import PortManager__ThisHost -from .port_manager import PortManager__Generic +from .impl.port_manager__this_host import PortManager__ThisHost +from .impl.port_manager__generic import PortManager__Generic from .logger import TestgresLogger @@ -93,6 +93,8 @@ from .standby import First +from . import utils + from .utils import \ PgVer, \ eprint, \ @@ -107,7 +109,6 @@ from .operations.os_ops import ConnectionParams from .operations.os_ops import OsOperations from .operations.local_ops import LocalOperations -from .operations.remote_ops import RemoteOperations InternalError = pglib.InternalError ProgrammingError = pglib.ProgrammingError @@ -151,7 +152,7 @@ def __init__(self, name=None, base_dir=None, port: typing.Optional[int] = None, - conn_params: ConnectionParams = ConnectionParams(), + conn_params: ConnectionParams = None, bin_dir=None, prefix=None, os_ops: typing.Optional[OsOperations] = None, @@ -171,11 +172,15 @@ def __init__(self, assert os_ops is None or isinstance(os_ops, OsOperations) assert port_manager is None or isinstance(port_manager, PortManager) + if conn_params is not None: + assert type(conn_params) == ConnectionParams # noqa: E721 + + raise InvalidOperationException("conn_params is deprecated, please use os_ops parameter instead.") + # private if os_ops is None: - self._os_ops = __class__._get_os_ops(conn_params) + self._os_ops = __class__._get_os_ops() else: - assert conn_params is None assert isinstance(os_ops, OsOperations) self._os_ops = os_ops pass @@ -200,11 +205,14 @@ def __init__(self, self._should_free_port = False self._port_manager = None else: - if port_manager is not None: + if port_manager is None: + self._port_manager = __class__._get_port_manager(self._os_ops) + elif os_ops is None: + raise InvalidOperationException("When port_manager is not None you have to define os_ops, too.") + else: assert isinstance(port_manager, PortManager) + assert self._os_ops is os_ops self._port_manager = port_manager - else: - self._port_manager = __class__._get_port_manager(self._os_ops) assert self._port_manager is not None assert isinstance(self._port_manager, PortManager) @@ -231,8 +239,6 @@ def __enter__(self): return self def __exit__(self, type, value, traceback): - self.free_port() - # NOTE: Ctrl+C does not count! got_exception = type is not None and type != KeyboardInterrupt @@ -246,6 +252,8 @@ def __exit__(self, type, value, traceback): else: self._try_shutdown(attempts) + self._release_resources() + def __repr__(self): return "{}(name='{}', port={}, base_dir='{}')".format( self.__class__.__name__, @@ -255,24 +263,22 @@ def __repr__(self): ) @staticmethod - def _get_os_ops(conn_params: ConnectionParams) -> OsOperations: + def _get_os_ops() -> OsOperations: if testgres_config.os_ops: return testgres_config.os_ops - assert type(conn_params) == ConnectionParams # noqa: E721 - - if conn_params.ssh_key: - return RemoteOperations(conn_params) - - return LocalOperations(conn_params) + return LocalOperations.get_single_instance() @staticmethod def _get_port_manager(os_ops: OsOperations) -> PortManager: assert os_ops is not None assert isinstance(os_ops, OsOperations) - if isinstance(os_ops, LocalOperations): - return PortManager__ThisHost() + if os_ops is LocalOperations.get_single_instance(): + assert utils._old_port_manager is not None + assert type(utils._old_port_manager) == PortManager__Generic # noqa: E721 + assert utils._old_port_manager._os_ops is os_ops + return PortManager__ThisHost.get_single_instance() # TODO: Throw the exception "Please define a port manager." ? return PortManager__Generic(os_ops) @@ -294,7 +300,6 @@ def clone_with_new_name_and_base_dir(self, name: str, base_dir: str): node = PostgresNode( name=name, base_dir=base_dir, - conn_params=None, bin_dir=self._bin_dir, prefix=self._prefix, os_ops=self._os_ops, @@ -663,6 +668,9 @@ def _try_shutdown(self, max_attempts, with_force=False): ps_output, ps_command) + def _release_resources(self): + self.free_port() + @staticmethod def _throw_bugcheck__unexpected_result_of_ps(result, cmd): assert type(result) == str # noqa: E721 @@ -776,28 +784,6 @@ def _collect_special_files(self): return result - def _collect_log_files(self): - # dictionary of log files + size in bytes - - files = [ - self.pg_log_file - ] # yapf: disable - - result = {} - - for f in files: - # skip missing files - if not self.os_ops.path_exists(f): - continue - - file_size = self.os_ops.get_file_size(f) - assert type(file_size) == int # noqa: E721 - assert file_size >= 0 - - result[f] = file_size - - return result - def init(self, initdb_params=None, cached=True, **kwargs): """ Perform initdb for this node. @@ -813,10 +799,13 @@ def init(self, initdb_params=None, cached=True, **kwargs): """ # initialize this PostgreSQL node + assert self._os_ops is not None + assert isinstance(self._os_ops, OsOperations) + cached_initdb( data_dir=self.data_dir, logfile=self.utils_log_file, - os_ops=self.os_ops, + os_ops=self._os_ops, params=initdb_params, bin_path=self.bin_dir, cached=False) @@ -1051,22 +1040,6 @@ def slow_start(self, replica=False, dbname='template1', username=None, max_attem OperationalError}, max_attempts=max_attempts) - def _detect_port_conflict(self, log_files0, log_files1): - assert type(log_files0) == dict # noqa: E721 - assert type(log_files1) == dict # noqa: E721 - - for file in log_files1.keys(): - read_pos = 0 - - if file in log_files0.keys(): - read_pos = log_files0[file] # the previous size - - file_content = self.os_ops.read_binary(file, read_pos) - file_content_s = file_content.decode() - if 'Is another postmaster already running on port' in file_content_s: - return True - return False - def start(self, params=[], wait=True, exec_env=None): """ Starts the PostgreSQL node using pg_ctl if node has not been started. @@ -1126,8 +1099,7 @@ def LOCAL__raise_cannot_start_node__std(from_exception): assert isinstance(self._port_manager, PortManager) assert __class__._C_MAX_START_ATEMPTS > 1 - log_files0 = self._collect_log_files() - assert type(log_files0) == dict # noqa: E721 + log_reader = PostgresNodeLogReader(self, from_beginnig=False) nAttempt = 0 timeout = 1 @@ -1143,11 +1115,11 @@ def LOCAL__raise_cannot_start_node__std(from_exception): if nAttempt == __class__._C_MAX_START_ATEMPTS: LOCAL__raise_cannot_start_node(e, "Cannot start node after multiple attempts.") - log_files1 = self._collect_log_files() - if not self._detect_port_conflict(log_files0, log_files1): + is_it_port_conflict = PostgresNodeUtils.delect_port_conflict(log_reader) + + if not is_it_port_conflict: LOCAL__raise_cannot_start_node__std(e) - log_files0 = log_files1 logging.warning( "Detected a conflict with using the port {0}. Trying another port after a {1}-second sleep...".format(self._port, timeout) ) @@ -1340,7 +1312,7 @@ def free_port(self): self._port = None self._port_manager.release_port(port) - def cleanup(self, max_attempts=3, full=False): + def cleanup(self, max_attempts=3, full=False, release_resources=False): """ Stop node if needed and remove its data/logs directory. NOTE: take a look at TestgresConfig.node_cleanup_full. @@ -1363,6 +1335,9 @@ def cleanup(self, max_attempts=3, full=False): self.os_ops.rmdirs(rm_dir, ignore_errors=False) + if release_resources: + self._release_resources() + return self @method_decorator(positional_args_hack(['dbname', 'query'])) @@ -1372,6 +1347,8 @@ def psql(self, dbname=None, username=None, input=None, + host: typing.Optional[str] = None, + port: typing.Optional[int] = None, **variables): """ Execute a query using psql. @@ -1382,6 +1359,8 @@ def psql(self, dbname: database name to connect to. username: database user name. input: raw input to be passed. + host: an explicit host of server. + port: an explicit port of server. **variables: vars to be set before execution. Returns: @@ -1393,6 +1372,10 @@ def psql(self, >>> psql(query='select 3', ON_ERROR_STOP=1) """ + assert host is None or type(host) == str # noqa: E721 + assert port is None or type(port) == int # noqa: E721 + assert type(variables) == dict # noqa: E721 + return self._psql( ignore_errors=True, query=query, @@ -1400,6 +1383,8 @@ def psql(self, dbname=dbname, username=username, input=input, + host=host, + port=port, **variables ) @@ -1411,7 +1396,11 @@ def _psql( dbname=None, username=None, input=None, + host: typing.Optional[str] = None, + port: typing.Optional[int] = None, **variables): + assert host is None or type(host) == str # noqa: E721 + assert port is None or type(port) == int # noqa: E721 assert type(variables) == dict # noqa: E721 # @@ -1424,10 +1413,21 @@ def _psql( else: raise Exception("Input data must be None or bytes.") + if host is None: + host = self.host + + if port is None: + port = self.port + + assert host is not None + assert port is not None + assert type(host) == str # noqa: E721 + assert type(port) == int # noqa: E721 + psql_params = [ self._get_bin_path("psql"), - "-p", str(self.port), - "-h", self.host, + "-p", str(port), + "-h", host, "-U", username or self.os_ops.username, "-d", dbname or default_dbname(), "-X", # no .psqlrc @@ -2153,9 +2153,177 @@ def _escape_config_value(value): return result +class PostgresNodeLogReader: + class LogInfo: + position: int + + def __init__(self, position: int): + self.position = position + + # -------------------------------------------------------------------- + class LogDataBlock: + _file_name: str + _position: int + _data: str + + def __init__( + self, + file_name: str, + position: int, + data: str + ): + assert type(file_name) == str # noqa: E721 + assert type(position) == int # noqa: E721 + assert type(data) == str # noqa: E721 + assert file_name != "" + assert position >= 0 + self._file_name = file_name + self._position = position + self._data = data + + @property + def file_name(self) -> str: + assert type(self._file_name) == str # noqa: E721 + assert self._file_name != "" + return self._file_name + + @property + def position(self) -> int: + assert type(self._position) == int # noqa: E721 + assert self._position >= 0 + return self._position + + @property + def data(self) -> str: + assert type(self._data) == str # noqa: E721 + return self._data + + # -------------------------------------------------------------------- + _node: PostgresNode + _logs: typing.Dict[str, LogInfo] + + # -------------------------------------------------------------------- + def __init__(self, node: PostgresNode, from_beginnig: bool): + assert node is not None + assert isinstance(node, PostgresNode) + assert type(from_beginnig) == bool # noqa: E721 + + self._node = node + + if from_beginnig: + self._logs = dict() + else: + self._logs = self._collect_logs() + + assert type(self._logs) == dict # noqa: E721 + return + + def read(self) -> typing.List[LogDataBlock]: + assert self._node is not None + assert isinstance(self._node, PostgresNode) + + cur_logs: typing.Dict[__class__.LogInfo] = self._collect_logs() + assert cur_logs is not None + assert type(cur_logs) == dict # noqa: E721 + + assert type(self._logs) == dict # noqa: E721 + + result = list() + + for file_name, cur_log_info in cur_logs.items(): + assert type(file_name) == str # noqa: E721 + assert type(cur_log_info) == __class__.LogInfo # noqa: E721 + + read_pos = 0 + + if file_name in self._logs.keys(): + prev_log_info = self._logs[file_name] + assert type(prev_log_info) == __class__.LogInfo # noqa: E721 + read_pos = prev_log_info.position # the previous size + + file_content_b = self._node.os_ops.read_binary(file_name, read_pos) + assert type(file_content_b) == bytes # noqa: E721 + + # + # A POTENTIAL PROBLEM: file_content_b may contain an incompleted UTF-8 symbol. + # + file_content_s = file_content_b.decode() + assert type(file_content_s) == str # noqa: E721 + + next_read_pos = read_pos + len(file_content_b) + + # It is a research/paranoja check. + # When we will process partial UTF-8 symbol, it must be adjusted. + assert cur_log_info.position <= next_read_pos + + cur_log_info.position = next_read_pos + + block = __class__.LogDataBlock( + file_name, + read_pos, + file_content_s + ) + + result.append(block) + + # A new check point + self._logs = cur_logs + + return result + + def _collect_logs(self) -> typing.Dict[LogInfo]: + assert self._node is not None + assert isinstance(self._node, PostgresNode) + + files = [ + self._node.pg_log_file + ] # yapf: disable + + result = dict() + + for f in files: + assert type(f) == str # noqa: E721 + + # skip missing files + if not self._node.os_ops.path_exists(f): + continue + + file_size = self._node.os_ops.get_file_size(f) + assert type(file_size) == int # noqa: E721 + assert file_size >= 0 + + result[f] = __class__.LogInfo(file_size) + + return result + + +class PostgresNodeUtils: + @staticmethod + def delect_port_conflict(log_reader: PostgresNodeLogReader) -> bool: + assert type(log_reader) == PostgresNodeLogReader # noqa: E721 + + blocks = log_reader.read() + assert type(blocks) == list # noqa: E721 + + for block in blocks: + assert type(block) == PostgresNodeLogReader.LogDataBlock # noqa: E721 + + if 'Is another postmaster already running on port' in block.data: + return True + + return False + + class NodeApp: - def __init__(self, test_path=None, nodes_to_cleanup=None, os_ops=LocalOperations()): + def __init__(self, test_path=None, nodes_to_cleanup=None, os_ops=None): + assert os_ops is None or isinstance(os_ops, OsOperations) + + if os_ops is None: + os_ops = LocalOperations.get_single_instance() + + assert isinstance(os_ops, OsOperations) + if test_path: if os.path.isabs(test_path): self.test_path = test_path diff --git a/testgres/operations/local_ops.py b/testgres/operations/local_ops.py index 74323bb8..ccf1ab82 100644 --- a/testgres/operations/local_ops.py +++ b/testgres/operations/local_ops.py @@ -10,6 +10,7 @@ import psutil import typing +import threading from ..exceptions import ExecUtilException from ..exceptions import InvalidOperationException @@ -28,6 +29,9 @@ class LocalOperations(OsOperations): + sm_single_instance: OsOperations = None + sm_single_instance_guard = threading.Lock() + def __init__(self, conn_params=None): if conn_params is None: conn_params = ConnectionParams() @@ -38,6 +42,22 @@ def __init__(self, conn_params=None): self.remote = False self.username = conn_params.username or getpass.getuser() + @staticmethod + def get_single_instance() -> OsOperations: + assert __class__ == LocalOperations + assert __class__.sm_single_instance_guard is not None + + if __class__.sm_single_instance is not None: + assert type(__class__.sm_single_instance) == __class__ # noqa: E721 + return __class__.sm_single_instance + + with __class__.sm_single_instance_guard: + if __class__.sm_single_instance is None: + __class__.sm_single_instance = __class__() + assert __class__.sm_single_instance is not None + assert type(__class__.sm_single_instance) == __class__ # noqa: E721 + return __class__.sm_single_instance + @staticmethod def _process_output(encoding, temp_file_path): """Process the output of a command from a temporary file.""" @@ -230,6 +250,10 @@ def makedirs(self, path, remove_existing=False): except FileExistsError: pass + def makedir(self, path: str): + assert type(path) == str # noqa: E721 + os.mkdir(path) + # [2025-02-03] Old name of parameter attempts is "retries". def rmdirs(self, path, ignore_errors=True, attempts=3, delay=1): """ @@ -273,6 +297,10 @@ def rmdirs(self, path, ignore_errors=True, attempts=3, delay=1): # OK! return True + def rmdir(self, path: str): + assert type(path) == str # noqa: E721 + os.rmdir(path) + def listdir(self, path): return os.listdir(path) @@ -500,3 +528,10 @@ def is_port_free(self, number: int) -> bool: return True except OSError: return False + + def get_tempdir(self) -> str: + r = tempfile.gettempdir() + assert r is not None + assert type(r) == str # noqa: E721 + assert os.path.exists(r) + return r diff --git a/testgres/operations/os_ops.py b/testgres/operations/os_ops.py index d25e76bc..45e4f71c 100644 --- a/testgres/operations/os_ops.py +++ b/testgres/operations/os_ops.py @@ -53,9 +53,17 @@ def get_name(self): def makedirs(self, path, remove_existing=False): raise NotImplementedError() + def makedir(self, path: str): + assert type(path) == str # noqa: E721 + raise NotImplementedError() + def rmdirs(self, path, ignore_errors=True): raise NotImplementedError() + def rmdir(self, path: str): + assert type(path) == str # noqa: E721 + raise NotImplementedError() + def listdir(self, path): raise NotImplementedError() @@ -122,3 +130,6 @@ def get_process_children(self, pid): def is_port_free(self, number: int): assert type(number) == int # noqa: E721 raise NotImplementedError() + + def get_tempdir(self) -> str: + raise NotImplementedError() diff --git a/testgres/operations/remote_ops.py b/testgres/operations/remote_ops.py index e722a2cb..a478b453 100644 --- a/testgres/operations/remote_ops.py +++ b/testgres/operations/remote_ops.py @@ -225,6 +225,11 @@ def makedirs(self, path, remove_existing=False): raise Exception("Couldn't create dir {} because of error {}".format(path, error)) return result + def makedir(self, path: str): + assert type(path) == str # noqa: E721 + cmd = ["mkdir", path] + self.exec_command(cmd) + def rmdirs(self, path, ignore_errors=True): """ Remove a directory in the remote server. @@ -265,6 +270,11 @@ def rmdirs(self, path, ignore_errors=True): return False return True + def rmdir(self, path: str): + assert type(path) == str # noqa: E721 + cmd = ["rmdir", path] + self.exec_command(cmd) + def listdir(self, path): """ List all files and directories in a directory. @@ -649,6 +659,34 @@ def is_port_free(self, number: int) -> bool: out=output ) + def get_tempdir(self) -> str: + command = ["mktemp", "-u", "-d"] + + exec_exitcode, exec_output, exec_error = self.exec_command( + command, + verbose=True, + encoding=get_default_encoding(), + ignore_errors=True + ) + + assert type(exec_exitcode) == int # noqa: E721 + assert type(exec_output) == str # noqa: E721 + assert type(exec_error) == str # noqa: E721 + + if exec_exitcode != 0: + RaiseError.CommandExecutionError( + cmd=command, + exit_code=exec_exitcode, + message="Could not detect a temporary directory.", + error=exec_error, + out=exec_output) + + temp_subdir = exec_output.strip() + assert type(temp_subdir) == str # noqa: E721 + temp_dir = os.path.dirname(temp_subdir) + assert type(temp_dir) == str # noqa: E721 + return temp_dir + @staticmethod def _is_port_free__process_0(error: str) -> bool: assert type(error) == str # noqa: E721 diff --git a/testgres/plugins/pg_probackup2/pg_probackup2/app.py b/testgres/plugins/pg_probackup2/pg_probackup2/app.py index 5166e9b8..2b87b48f 100644 --- a/testgres/plugins/pg_probackup2/pg_probackup2/app.py +++ b/testgres/plugins/pg_probackup2/pg_probackup2/app.py @@ -60,6 +60,7 @@ def __init__(self, test_class: unittest.TestCase, self.archive_compress = init_params.archive_compress self.test_class.output = None self.execution_time = None + self.valgrind_sup_path = init_params.valgrind_sup_path def form_daemon_process(self, cmdline, env): def stream_output(stream: subprocess.PIPE) -> None: @@ -88,6 +89,7 @@ def stream_output(stream: subprocess.PIPE) -> None: return self.process.pid + # ---- Start run function ---- # def run(self, command, gdb=False, old_binary=False, return_id=True, env=None, skip_log_directory=False, expect_error=False, use_backup_dir=True, daemonize=False): """ @@ -98,26 +100,46 @@ def run(self, command, gdb=False, old_binary=False, return_id=True, env=None, gdb: when True it returns GDBObj(), when tuple('suspend', port) it runs probackup in suspended gdb mode with attachable gdb port, for local debugging """ + command = self._add_backup_dir_to_cmd(command, use_backup_dir) + # Old bin or regular one + binary_path = self._get_binary_path(old_binary) + + if not env: + env = self.test_env + # Add additional options if needed + command, strcommand = self._add_options(command, skip_log_directory) + + self.test_class.cmd = f"{binary_path} {strcommand}" + if self.verbose: + print(self.test_class.cmd) + + cmdline = self._form_cmdline(binary_path, command) + + if gdb is True: + # general test flow for using GDBObj + return GDBobj(cmdline, self.test_class) + + return self._execute_command(cmdline, env, command, gdb, expect_error, return_id, daemonize) + + def _add_backup_dir_to_cmd(self, command: list, use_backup_dir: TestBackupDir): if isinstance(use_backup_dir, TestBackupDir): - command = [command[0], *use_backup_dir.pb_args, *command[1:]] + return [command[0], *use_backup_dir.pb_args, *command[1:]] elif use_backup_dir: - command = [command[0], *self.backup_dir.pb_args, *command[1:]] + return [command[0], *self.backup_dir.pb_args, *command[1:]] else: - command = [command[0], *self.backup_dir.pb_args[2:], *command[1:]] - - if not self.probackup_old_path and old_binary: - logging.error('PGPROBACKUPBIN_OLD is not set') - exit(1) + return [command[0], *self.backup_dir.pb_args[2:], *command[1:]] + def _get_binary_path(self, old_binary): if old_binary: - binary_path = self.probackup_old_path - else: - binary_path = self.probackup_path - - if not env: - env = self.test_env + if not self.probackup_old_path: + logging.error('PGPROBACKUPBIN_OLD is not set') + exit(1) + return self.probackup_old_path + return self.probackup_path + def _add_options(self, command: list, skip_log_directory: bool): strcommand = ' '.join(str(p) for p in command) + if '--log-level-file' in strcommand and \ '--log-directory' not in strcommand and \ not skip_log_directory: @@ -125,26 +147,46 @@ def run(self, command, gdb=False, old_binary=False, return_id=True, env=None, strcommand += ' ' + command[-1] if 'pglz' in strcommand and \ - ' -j' not in strcommand and '--thread' not in strcommand: + ' -j' not in strcommand and \ + '--thread' not in strcommand: command += ['-j', '1'] strcommand += ' -j 1' - self.test_class.cmd = binary_path + ' ' + strcommand - if self.verbose: - print(self.test_class.cmd) + return command, strcommand + def _form_cmdline(self, binary_path, command): cmdline = [binary_path, *command] - if gdb is True: - # general test flow for using GDBObj - return GDBobj(cmdline, self.test_class) + if self.valgrind_sup_path and command[0] != "--version": + os.makedirs(self.pb_log_path, exist_ok=True) + if self.valgrind_sup_path and not os.path.isfile(self.valgrind_sup_path): + raise FileNotFoundError(f"PG_PROBACKUP_VALGRIND_SUP should contain path to valgrind suppression file, " + f"but found: {self.valgrind_sup_path}") + valgrind_cmd = [ + "valgrind", + "--gen-suppressions=all", + "--leak-check=full", + "--show-reachable=yes", + "--error-limit=no", + "--show-leak-kinds=all", + "--errors-for-leak-kinds=all", + "--error-exitcode=0", + f"--log-file={os.path.join(self.pb_log_path, f'valgrind-{command[0]}-%p.log')}", + f"--suppressions={self.valgrind_sup_path}", + "--" + ] + cmdline = valgrind_cmd + cmdline + + return cmdline + + def _execute_command(self, cmdline, env, command, gdb, expect_error, return_id, daemonize): try: - if type(gdb) is tuple and gdb[0] == 'suspend': - # special test flow for manually debug probackup + if isinstance(gdb, tuple) and gdb[0] == 'suspend': gdb_port = gdb[1] cmdline = ['gdbserver'] + ['localhost:' + str(gdb_port)] + cmdline logging.warning("pg_probackup gdb suspended, waiting gdb connection on localhost:{0}".format(gdb_port)) + # Execute command start_time = time.time() if daemonize: return self.form_daemon_process(cmdline, env) @@ -174,6 +216,7 @@ def run(self, command, gdb=False, old_binary=False, return_id=True, env=None, return self.test_class.output else: raise ProbackupException(self.test_class.output, self.test_class.cmd) + # ---- End run function ---- # def get_backup_id(self): if init_params.major_version > 2: diff --git a/testgres/plugins/pg_probackup2/pg_probackup2/gdb.py b/testgres/plugins/pg_probackup2/pg_probackup2/gdb.py index 2424c04d..b7ca549e 100644 --- a/testgres/plugins/pg_probackup2/pg_probackup2/gdb.py +++ b/testgres/plugins/pg_probackup2/pg_probackup2/gdb.py @@ -1,6 +1,5 @@ import functools import os -import re import subprocess import sys import unittest @@ -57,13 +56,6 @@ def __init__(self, cmd, env, attach=False): else: self.cmd = self.base_cmd + ['--args'] + cmd - # Get version - gdb_version_number = re.search( - br"^GNU gdb [^\d]*(\d+)\.(\d)", - gdb_version) - self.major_version = int(gdb_version_number.group(1)) - self.minor_version = int(gdb_version_number.group(2)) - if self.verbose: print([' '.join(map(str, self.cmd))]) diff --git a/testgres/plugins/pg_probackup2/pg_probackup2/init_helpers.py b/testgres/plugins/pg_probackup2/pg_probackup2/init_helpers.py index c4570a39..9c62dcf1 100644 --- a/testgres/plugins/pg_probackup2/pg_probackup2/init_helpers.py +++ b/testgres/plugins/pg_probackup2/pg_probackup2/init_helpers.py @@ -214,6 +214,8 @@ def __init__(self): else: raise Exception('Can\'t process pg_probackup version \"{}\": the major version is expected to be a number'.format(self.probackup_version)) + self.valgrind_sup_path = test_env.get('PG_PROBACKUP_VALGRIND_SUP', None) + def test_env(self): return self._test_env.copy() diff --git a/testgres/plugins/pg_probackup2/pg_probackup2/tests/test_basic.py b/testgres/plugins/pg_probackup2/pg_probackup2/tests/test_basic.py index ba788623..2540ddb0 100644 --- a/testgres/plugins/pg_probackup2/pg_probackup2/tests/test_basic.py +++ b/testgres/plugins/pg_probackup2/pg_probackup2/tests/test_basic.py @@ -4,14 +4,14 @@ import shutil import pytest -from ...... import testgres +import testgres from ...pg_probackup2.app import ProbackupApp from ...pg_probackup2.init_helpers import Init, init_params from ..storage.fs_backup import FSTestBackupDir class ProbackupTest: - pg_node: testgres.PostgresNode + pg_node: testgres.NodeApp @staticmethod def probackup_is_available() -> bool: @@ -75,21 +75,30 @@ def helper__build_backup_dir(self, backup='backup'): @pytest.mark.skipif(not ProbackupTest.probackup_is_available(), reason="Check that PGPROBACKUPBIN is defined and is valid.") class TestBasic(ProbackupTest): def test_full_backup(self): + assert self.pg_node is not None + assert type(self.pg_node) == testgres.NodeApp # noqa: E721 + assert self.pb is not None + assert type(self.pb) == ProbackupApp # noqa: E721 + # Setting up a simple test node node = self.pg_node.make_simple('node', pg_options={"fsync": "off", "synchronous_commit": "off"}) - # Initialize and configure Probackup - self.pb.init() - self.pb.add_instance('node', node) - self.pb.set_archiving('node', node) + assert node is not None + assert type(node) == testgres.PostgresNode # noqa: E721 + + with node: + # Initialize and configure Probackup + self.pb.init() + self.pb.add_instance('node', node) + self.pb.set_archiving('node', node) - # Start the node and initialize pgbench - node.slow_start() - node.pgbench_init(scale=100, no_vacuum=True) + # Start the node and initialize pgbench + node.slow_start() + node.pgbench_init(scale=100, no_vacuum=True) - # Perform backup and validation - backup_id = self.pb.backup_node('node', node) - out = self.pb.validate('node', backup_id) + # Perform backup and validation + backup_id = self.pb.backup_node('node', node) + out = self.pb.validate('node', backup_id) - # Check if the backup is valid - assert f"INFO: Backup {backup_id} is valid" in out + # Check if the backup is valid + assert f"INFO: Backup {backup_id} is valid" in out diff --git a/testgres/plugins/pg_probackup2/setup.py b/testgres/plugins/pg_probackup2/setup.py index 7a3212e4..b9b0067e 100644 --- a/testgres/plugins/pg_probackup2/setup.py +++ b/testgres/plugins/pg_probackup2/setup.py @@ -4,7 +4,7 @@ from distutils.core import setup setup( - version='0.1.0', + version='0.1.1', name='testgres_pg_probackup2', packages=['pg_probackup2', 'pg_probackup2.storage'], description='Plugin for testgres that manages pg_probackup2', diff --git a/testgres/port_manager.py b/testgres/port_manager.py index e2530470..1ae696c8 100644 --- a/testgres/port_manager.py +++ b/testgres/port_manager.py @@ -1,14 +1,3 @@ -from .operations.os_ops import OsOperations - -from .exceptions import PortForException - -from . import utils - -import threading -import random -import typing - - class PortManager: def __init__(self): super().__init__() @@ -19,85 +8,3 @@ def reserve_port(self) -> int: def release_port(self, number: int) -> None: assert type(number) == int # noqa: E721 raise NotImplementedError("PortManager::release_port is not implemented.") - - -class PortManager__ThisHost(PortManager): - sm_single_instance: PortManager = None - sm_single_instance_guard = threading.Lock() - - def __init__(self): - pass - - def __new__(cls) -> PortManager: - assert __class__ == PortManager__ThisHost - assert __class__.sm_single_instance_guard is not None - - if __class__.sm_single_instance is None: - with __class__.sm_single_instance_guard: - __class__.sm_single_instance = super().__new__(cls) - assert __class__.sm_single_instance - assert type(__class__.sm_single_instance) == __class__ # noqa: E721 - return __class__.sm_single_instance - - def reserve_port(self) -> int: - return utils.reserve_port() - - def release_port(self, number: int) -> None: - assert type(number) == int # noqa: E721 - return utils.release_port(number) - - -class PortManager__Generic(PortManager): - _os_ops: OsOperations - _guard: object - # TODO: is there better to use bitmap fot _available_ports? - _available_ports: typing.Set[int] - _reserved_ports: typing.Set[int] - - def __init__(self, os_ops: OsOperations): - assert os_ops is not None - assert isinstance(os_ops, OsOperations) - self._os_ops = os_ops - self._guard = threading.Lock() - self._available_ports: typing.Set[int] = set(range(1024, 65535)) - self._reserved_ports: typing.Set[int] = set() - - def reserve_port(self) -> int: - assert self._guard is not None - assert type(self._available_ports) == set # noqa: E721t - assert type(self._reserved_ports) == set # noqa: E721 - - with self._guard: - t = tuple(self._available_ports) - assert len(t) == len(self._available_ports) - sampled_ports = random.sample(t, min(len(t), 100)) - t = None - - for port in sampled_ports: - assert not (port in self._reserved_ports) - assert port in self._available_ports - - if not self._os_ops.is_port_free(port): - continue - - self._reserved_ports.add(port) - self._available_ports.discard(port) - assert port in self._reserved_ports - assert not (port in self._available_ports) - return port - - raise PortForException("Can't select a port.") - - def release_port(self, number: int) -> None: - assert type(number) == int # noqa: E721 - - assert self._guard is not None - assert type(self._reserved_ports) == set # noqa: E721 - - with self._guard: - assert number in self._reserved_ports - assert not (number in self._available_ports) - self._available_ports.add(number) - self._reserved_ports.discard(number) - assert not (number in self._reserved_ports) - assert number in self._available_ports diff --git a/testgres/utils.py b/testgres/utils.py index 2ff6f2a0..d231eec3 100644 --- a/testgres/utils.py +++ b/testgres/utils.py @@ -6,8 +6,6 @@ import os import sys -import socket -import random from contextlib import contextmanager from packaging.version import Version, InvalidVersion @@ -15,18 +13,25 @@ from six import iteritems -from .exceptions import PortForException from .exceptions import ExecUtilException from .config import testgres_config as tconf from .operations.os_ops import OsOperations from .operations.remote_ops import RemoteOperations +from .operations.local_ops import LocalOperations from .operations.helpers import Helpers as OsHelpers +from .impl.port_manager__generic import PortManager__Generic + # rows returned by PG_CONFIG _pg_config_data = {} +# +# The old, global "port manager" always worked with LOCAL system +# +_old_port_manager = PortManager__Generic(LocalOperations.get_single_instance()) + # ports used by nodes -bound_ports = set() +bound_ports = _old_port_manager._reserved_ports # re-export version type @@ -43,28 +48,7 @@ def internal__reserve_port(): """ Generate a new port and add it to 'bound_ports'. """ - def LOCAL__is_port_free(port: int) -> bool: - """Check if a port is free to use.""" - with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: - try: - s.bind(("", port)) - return True - except OSError: - return False - - ports = set(range(1024, 65535)) - assert type(ports) == set # noqa: E721 - assert type(bound_ports) == set # noqa: E721 - ports.difference_update(bound_ports) - - sampled_ports = random.sample(tuple(ports), min(len(ports), 100)) - - for port in sampled_ports: - if LOCAL__is_port_free(port): - bound_ports.add(port) - return port - - raise PortForException("Can't select a port") + return _old_port_manager.reserve_port() def internal__release_port(port): @@ -73,9 +57,7 @@ def internal__release_port(port): """ assert type(port) == int # noqa: E721 - assert port in bound_ports - - bound_ports.discard(port) + return _old_port_manager.release_port(port) reserve_port = internal__reserve_port diff --git a/tests/conftest.py b/tests/conftest.py index 6f2f9e41..25580aee 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -50,13 +50,24 @@ def CalcRootDir() -> str: r = os.path.abspath(r) return r + # -------------------------------------------------------------------- + def CalcRootLogDir() -> str: + if TestConfigPropNames.TEST_CFG__LOG_DIR in os.environ: + resultPath = os.environ[TestConfigPropNames.TEST_CFG__LOG_DIR] + else: + rootDir = __class__.CalcRootDir() + resultPath = os.path.join(rootDir, "logs") + + assert type(resultPath) == str # noqa: E721 + return resultPath + # -------------------------------------------------------------------- def CalcCurrentTestWorkerSignature() -> str: currentPID = os.getpid() - assert type(currentPID) + assert type(currentPID) == int # noqa: E721 startTS = __class__.sm_StartTS - assert type(startTS) + assert type(startTS) == datetime.datetime # noqa: E721 result = "pytest-{0:04d}{1:02d}{2:02d}_{3:02d}{4:02d}{5:02d}".format( startTS.year, @@ -86,11 +97,18 @@ class TestStartupData: TestStartupData__Helper.CalcCurrentTestWorkerSignature() ) + sm_RootLogDir: str = TestStartupData__Helper.CalcRootLogDir() + # -------------------------------------------------------------------- def GetRootDir() -> str: assert type(__class__.sm_RootDir) == str # noqa: E721 return __class__.sm_RootDir + # -------------------------------------------------------------------- + def GetRootLogDir() -> str: + assert type(__class__.sm_RootLogDir) == str # noqa: E721 + return __class__.sm_RootLogDir + # -------------------------------------------------------------------- def GetCurrentTestWorkerSignature() -> str: assert type(__class__.sm_CurrentTestWorkerSignature) == str # noqa: E721 @@ -320,6 +338,7 @@ def helper__build_test_id(item: pytest.Function) -> str: g_error_msg_count_key = pytest.StashKey[int]() g_warning_msg_count_key = pytest.StashKey[int]() +g_critical_msg_count_key = pytest.StashKey[int]() # ///////////////////////////////////////////////////////////////////////////// @@ -395,10 +414,17 @@ def helper__makereport__call( assert type(outcome) == pluggy.Result # noqa: E721 # -------- - item_error_msg_count = item.stash.get(g_error_msg_count_key, 0) - assert type(item_error_msg_count) == int # noqa: E721 - assert item_error_msg_count >= 0 + item_error_msg_count1 = item.stash.get(g_error_msg_count_key, 0) + assert type(item_error_msg_count1) == int # noqa: E721 + assert item_error_msg_count1 >= 0 + item_error_msg_count2 = item.stash.get(g_critical_msg_count_key, 0) + assert type(item_error_msg_count2) == int # noqa: E721 + assert item_error_msg_count2 >= 0 + + item_error_msg_count = item_error_msg_count1 + item_error_msg_count2 + + # -------- item_warning_msg_count = item.stash.get(g_warning_msg_count_key, 0) assert type(item_warning_msg_count) == int # noqa: E721 assert item_warning_msg_count >= 0 @@ -582,103 +608,87 @@ def pytest_runtest_makereport(item: pytest.Function, call: pytest.CallInfo): # ///////////////////////////////////////////////////////////////////////////// -class LogErrorWrapper2: +class LogWrapper2: _old_method: any - _counter: typing.Optional[int] + _err_counter: typing.Optional[int] + _warn_counter: typing.Optional[int] + + _critical_counter: typing.Optional[int] # -------------------------------------------------------------------- def __init__(self): self._old_method = None - self._counter = None + self._err_counter = None + self._warn_counter = None + + self._critical_counter = None # -------------------------------------------------------------------- def __enter__(self): assert self._old_method is None - assert self._counter is None - - self._old_method = logging.error - self._counter = 0 - - logging.error = self - return self - - # -------------------------------------------------------------------- - def __exit__(self, exc_type, exc_val, exc_tb): - assert self._old_method is not None - assert self._counter is not None - - assert logging.error is self - - logging.error = self._old_method - - self._old_method = None - self._counter = None - return False - - # -------------------------------------------------------------------- - def __call__(self, *args, **kwargs): - assert self._old_method is not None - assert self._counter is not None + assert self._err_counter is None + assert self._warn_counter is None - assert type(self._counter) == int # noqa: E721 - assert self._counter >= 0 + assert self._critical_counter is None - r = self._old_method(*args, **kwargs) + assert logging.root is not None + assert isinstance(logging.root, logging.RootLogger) - self._counter += 1 - assert self._counter > 0 + self._old_method = logging.root.handle + self._err_counter = 0 + self._warn_counter = 0 - return r - - -# ///////////////////////////////////////////////////////////////////////////// - - -class LogWarningWrapper2: - _old_method: any - _counter: typing.Optional[int] + self._critical_counter = 0 - # -------------------------------------------------------------------- - def __init__(self): - self._old_method = None - self._counter = None - - # -------------------------------------------------------------------- - def __enter__(self): - assert self._old_method is None - assert self._counter is None - - self._old_method = logging.warning - self._counter = 0 - - logging.warning = self + logging.root.handle = self return self # -------------------------------------------------------------------- def __exit__(self, exc_type, exc_val, exc_tb): assert self._old_method is not None - assert self._counter is not None + assert self._err_counter is not None + assert self._warn_counter is not None - assert logging.warning is self + assert logging.root is not None + assert isinstance(logging.root, logging.RootLogger) - logging.warning = self._old_method + assert logging.root.handle is self + + logging.root.handle = self._old_method self._old_method = None - self._counter = None + self._err_counter = None + self._warn_counter = None + self._critical_counter = None return False # -------------------------------------------------------------------- - def __call__(self, *args, **kwargs): + def __call__(self, record: logging.LogRecord): + assert record is not None + assert isinstance(record, logging.LogRecord) assert self._old_method is not None - assert self._counter is not None - - assert type(self._counter) == int # noqa: E721 - assert self._counter >= 0 - - r = self._old_method(*args, **kwargs) - - self._counter += 1 - assert self._counter > 0 + assert self._err_counter is not None + assert self._warn_counter is not None + assert self._critical_counter is not None + + assert type(self._err_counter) == int # noqa: E721 + assert self._err_counter >= 0 + assert type(self._warn_counter) == int # noqa: E721 + assert self._warn_counter >= 0 + assert type(self._critical_counter) == int # noqa: E721 + assert self._critical_counter >= 0 + + r = self._old_method(record) + + if record.levelno == logging.ERROR: + self._err_counter += 1 + assert self._err_counter > 0 + elif record.levelno == logging.WARNING: + self._warn_counter += 1 + assert self._warn_counter > 0 + elif record.levelno == logging.CRITICAL: + self._critical_counter += 1 + assert self._critical_counter > 0 return r @@ -699,6 +709,13 @@ def pytest_pyfunc_call(pyfuncitem: pytest.Function): assert pyfuncitem is not None assert isinstance(pyfuncitem, pytest.Function) + assert logging.root is not None + assert isinstance(logging.root, logging.RootLogger) + assert logging.root.handle is not None + + debug__log_handle_method = logging.root.handle + assert debug__log_handle_method is not None + debug__log_error_method = logging.error assert debug__log_error_method is not None @@ -707,55 +724,56 @@ def pytest_pyfunc_call(pyfuncitem: pytest.Function): pyfuncitem.stash[g_error_msg_count_key] = 0 pyfuncitem.stash[g_warning_msg_count_key] = 0 + pyfuncitem.stash[g_critical_msg_count_key] = 0 try: - with LogErrorWrapper2() as logErrorWrapper, LogWarningWrapper2() as logWarningWrapper: - assert type(logErrorWrapper) == LogErrorWrapper2 # noqa: E721 - assert logErrorWrapper._old_method is not None - assert type(logErrorWrapper._counter) == int # noqa: E721 - assert logErrorWrapper._counter == 0 - assert logging.error is logErrorWrapper - - assert type(logWarningWrapper) == LogWarningWrapper2 # noqa: E721 - assert logWarningWrapper._old_method is not None - assert type(logWarningWrapper._counter) == int # noqa: E721 - assert logWarningWrapper._counter == 0 - assert logging.warning is logWarningWrapper + with LogWrapper2() as logWrapper: + assert type(logWrapper) == LogWrapper2 # noqa: E721 + assert logWrapper._old_method is not None + assert type(logWrapper._err_counter) == int # noqa: E721 + assert logWrapper._err_counter == 0 + assert type(logWrapper._warn_counter) == int # noqa: E721 + assert logWrapper._warn_counter == 0 + assert type(logWrapper._critical_counter) == int # noqa: E721 + assert logWrapper._critical_counter == 0 + assert logging.root.handle is logWrapper r: pluggy.Result = yield assert r is not None assert type(r) == pluggy.Result # noqa: E721 - assert logErrorWrapper._old_method is not None - assert type(logErrorWrapper._counter) == int # noqa: E721 - assert logErrorWrapper._counter >= 0 - assert logging.error is logErrorWrapper - - assert logWarningWrapper._old_method is not None - assert type(logWarningWrapper._counter) == int # noqa: E721 - assert logWarningWrapper._counter >= 0 - assert logging.warning is logWarningWrapper + assert logWrapper._old_method is not None + assert type(logWrapper._err_counter) == int # noqa: E721 + assert logWrapper._err_counter >= 0 + assert type(logWrapper._warn_counter) == int # noqa: E721 + assert logWrapper._warn_counter >= 0 + assert type(logWrapper._critical_counter) == int # noqa: E721 + assert logWrapper._critical_counter >= 0 + assert logging.root.handle is logWrapper assert g_error_msg_count_key in pyfuncitem.stash assert g_warning_msg_count_key in pyfuncitem.stash + assert g_critical_msg_count_key in pyfuncitem.stash assert pyfuncitem.stash[g_error_msg_count_key] == 0 assert pyfuncitem.stash[g_warning_msg_count_key] == 0 + assert pyfuncitem.stash[g_critical_msg_count_key] == 0 - pyfuncitem.stash[g_error_msg_count_key] = logErrorWrapper._counter - pyfuncitem.stash[g_warning_msg_count_key] = logWarningWrapper._counter + pyfuncitem.stash[g_error_msg_count_key] = logWrapper._err_counter + pyfuncitem.stash[g_warning_msg_count_key] = logWrapper._warn_counter + pyfuncitem.stash[g_critical_msg_count_key] = logWrapper._critical_counter if r.exception is not None: pass - elif logErrorWrapper._counter == 0: - pass - else: - assert logErrorWrapper._counter > 0 + elif logWrapper._err_counter > 0: + r.force_exception(SIGNAL_EXCEPTION()) + elif logWrapper._critical_counter > 0: r.force_exception(SIGNAL_EXCEPTION()) finally: assert logging.error is debug__log_error_method assert logging.warning is debug__log_warning_method + assert logging.root.handle == debug__log_handle_method pass @@ -954,13 +972,9 @@ def pytest_configure(config: pytest.Config) -> None: log_name = TestStartupData.GetCurrentTestWorkerSignature() log_name += ".log" - if TestConfigPropNames.TEST_CFG__LOG_DIR in os.environ: - log_path_v = os.environ[TestConfigPropNames.TEST_CFG__LOG_DIR] - log_path = pathlib.Path(log_path_v) - else: - log_path = config.rootpath.joinpath("logs") + log_dir = TestStartupData.GetRootLogDir() - log_path.mkdir(exist_ok=True) + pathlib.Path(log_dir).mkdir(exist_ok=True) logging_plugin: _pytest.logging.LoggingPlugin = config.pluginmanager.get_plugin( "logging-plugin" @@ -969,7 +983,7 @@ def pytest_configure(config: pytest.Config) -> None: assert logging_plugin is not None assert isinstance(logging_plugin, _pytest.logging.LoggingPlugin) - logging_plugin.set_log_path(str(log_path / log_name)) + logging_plugin.set_log_path(os.path.join(log_dir, log_name)) # ///////////////////////////////////////////////////////////////////////////// diff --git a/tests/helpers/global_data.py b/tests/helpers/global_data.py index c21d7dd8..f3df41a3 100644 --- a/tests/helpers/global_data.py +++ b/tests/helpers/global_data.py @@ -1,11 +1,11 @@ -from ...testgres.operations.os_ops import OsOperations -from ...testgres.operations.os_ops import ConnectionParams -from ...testgres.operations.local_ops import LocalOperations -from ...testgres.operations.remote_ops import RemoteOperations +from testgres.operations.os_ops import OsOperations +from testgres.operations.os_ops import ConnectionParams +from testgres.operations.local_ops import LocalOperations +from testgres.operations.remote_ops import RemoteOperations -from ...testgres.node import PortManager -from ...testgres.node import PortManager__ThisHost -from ...testgres.node import PortManager__Generic +from testgres.node import PortManager +from testgres.node import PortManager__ThisHost +from testgres.node import PortManager__Generic import os @@ -31,7 +31,7 @@ class OsOpsDescrs: sm_remote_os_ops_descr = OsOpsDescr("remote_ops", sm_remote_os_ops) - sm_local_os_ops = LocalOperations() + sm_local_os_ops = LocalOperations.get_single_instance() sm_local_os_ops_descr = OsOpsDescr("local_ops", sm_local_os_ops) @@ -39,7 +39,7 @@ class OsOpsDescrs: class PortManagers: sm_remote_port_manager = PortManager__Generic(OsOpsDescrs.sm_remote_os_ops) - sm_local_port_manager = PortManager__ThisHost() + sm_local_port_manager = PortManager__ThisHost.get_single_instance() sm_local2_port_manager = PortManager__Generic(OsOpsDescrs.sm_local_os_ops) diff --git a/tests/test_config.py b/tests/test_config.py index 05702e9a..a80a11f1 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -1,9 +1,9 @@ -from ..testgres import TestgresConfig -from ..testgres import configure_testgres -from ..testgres import scoped_config -from ..testgres import pop_config +from testgres import TestgresConfig +from testgres import configure_testgres +from testgres import scoped_config +from testgres import pop_config -from .. import testgres +import testgres import pytest diff --git a/tests/test_os_ops_common.py b/tests/test_os_ops_common.py index 17c3151c..5ae3a61f 100644 --- a/tests/test_os_ops_common.py +++ b/tests/test_os_ops_common.py @@ -13,9 +13,13 @@ import socket import threading import typing +import uuid -from ..testgres import InvalidOperationException -from ..testgres import ExecUtilException +from testgres import InvalidOperationException +from testgres import ExecUtilException + +from concurrent.futures import ThreadPoolExecutor +from concurrent.futures import Future as ThreadFuture class TestOsOpsCommon: @@ -812,3 +816,300 @@ def LOCAL_server(s: socket.socket): if ok_count == 0: raise RuntimeError("No one free port was found.") + + def test_get_tmpdir(self, os_ops: OsOperations): + assert isinstance(os_ops, OsOperations) + + dir = os_ops.get_tempdir() + assert type(dir) == str # noqa: E721 + assert os_ops.path_exists(dir) + assert os.path.exists(dir) + + file_path = os.path.join(dir, "testgres--" + uuid.uuid4().hex + ".tmp") + + os_ops.write(file_path, "1234", binary=False) + + assert os_ops.path_exists(file_path) + assert os.path.exists(file_path) + + d = os_ops.read(file_path, binary=False) + + assert d == "1234" + + os_ops.remove_file(file_path) + + assert not os_ops.path_exists(file_path) + assert not os.path.exists(file_path) + + def test_get_tmpdir__compare_with_py_info(self, os_ops: OsOperations): + assert isinstance(os_ops, OsOperations) + + actual_dir = os_ops.get_tempdir() + assert actual_dir is not None + assert type(actual_dir) == str # noqa: E721 + expected_dir = str(tempfile.tempdir) + assert actual_dir == expected_dir + + class tagData_OS_OPS__NUMS: + os_ops_descr: OsOpsDescr + nums: int + + def __init__(self, os_ops_descr: OsOpsDescr, nums: int): + assert isinstance(os_ops_descr, OsOpsDescr) + assert type(nums) == int # noqa: E721 + + self.os_ops_descr = os_ops_descr + self.nums = nums + + sm_test_exclusive_creation__mt__data = [ + tagData_OS_OPS__NUMS(OsOpsDescrs.sm_local_os_ops_descr, 100000), + tagData_OS_OPS__NUMS(OsOpsDescrs.sm_remote_os_ops_descr, 120), + ] + + @pytest.fixture( + params=sm_test_exclusive_creation__mt__data, + ids=[x.os_ops_descr.sign for x in sm_test_exclusive_creation__mt__data] + ) + def data001(self, request: pytest.FixtureRequest) -> tagData_OS_OPS__NUMS: + assert isinstance(request, pytest.FixtureRequest) + return request.param + + def test_mkdir__mt(self, data001: tagData_OS_OPS__NUMS): + assert type(data001) == __class__.tagData_OS_OPS__NUMS # noqa: E721 + + N_WORKERS = 4 + N_NUMBERS = data001.nums + assert type(N_NUMBERS) == int # noqa: E721 + + os_ops = data001.os_ops_descr.os_ops + assert isinstance(os_ops, OsOperations) + + lock_dir_prefix = "test_mkdir_mt--" + uuid.uuid4().hex + + lock_dir = os_ops.mkdtemp(prefix=lock_dir_prefix) + + logging.info("A lock file [{}] is creating ...".format(lock_dir)) + + assert os.path.exists(lock_dir) + + def MAKE_PATH(lock_dir: str, num: int) -> str: + assert type(lock_dir) == str # noqa: E721 + assert type(num) == int # noqa: E721 + return os.path.join(lock_dir, str(num) + ".lock") + + def LOCAL_WORKER(os_ops: OsOperations, + workerID: int, + lock_dir: str, + cNumbers: int, + reservedNumbers: typing.Set[int]) -> None: + assert isinstance(os_ops, OsOperations) + assert type(workerID) == int # noqa: E721 + assert type(lock_dir) == str # noqa: E721 + assert type(cNumbers) == int # noqa: E721 + assert type(reservedNumbers) == set # noqa: E721 + assert cNumbers > 0 + assert len(reservedNumbers) == 0 + + assert os.path.exists(lock_dir) + + def LOG_INFO(template: str, *args: list) -> None: + assert type(template) == str # noqa: E721 + assert type(args) == tuple # noqa: E721 + + msg = template.format(*args) + assert type(msg) == str # noqa: E721 + + logging.info("[Worker #{}] {}".format(workerID, msg)) + return + + LOG_INFO("HELLO! I am here!") + + for num in range(cNumbers): + assert not (num in reservedNumbers) + + file_path = MAKE_PATH(lock_dir, num) + + try: + os_ops.makedir(file_path) + except Exception as e: + LOG_INFO( + "Can't reserve {}. Error ({}): {}", + num, + type(e).__name__, + str(e) + ) + continue + + LOG_INFO("Number {} is reserved!", num) + assert os_ops.path_exists(file_path) + reservedNumbers.add(num) + continue + + n_total = cNumbers + n_ok = len(reservedNumbers) + assert n_ok <= n_total + + LOG_INFO("Finish! OK: {}. FAILED: {}.", n_ok, n_total - n_ok) + return + + # ----------------------- + logging.info("Worker are creating ...") + + threadPool = ThreadPoolExecutor( + max_workers=N_WORKERS, + thread_name_prefix="ex_creator" + ) + + class tadWorkerData: + future: ThreadFuture + reservedNumbers: typing.Set[int] + + workerDatas: typing.List[tadWorkerData] = list() + + nErrors = 0 + + try: + for n in range(N_WORKERS): + logging.info("worker #{} is creating ...".format(n)) + + workerDatas.append(tadWorkerData()) + + workerDatas[n].reservedNumbers = set() + + workerDatas[n].future = threadPool.submit( + LOCAL_WORKER, + os_ops, + n, + lock_dir, + N_NUMBERS, + workerDatas[n].reservedNumbers + ) + + assert workerDatas[n].future is not None + + logging.info("OK. All the workers were created!") + except Exception as e: + nErrors += 1 + logging.error("A problem is detected ({}): {}".format(type(e).__name__, str(e))) + + logging.info("Will wait for stop of all the workers...") + + nWorkers = 0 + + assert type(workerDatas) == list # noqa: E721 + + for i in range(len(workerDatas)): + worker = workerDatas[i].future + + if worker is None: + continue + + nWorkers += 1 + + assert isinstance(worker, ThreadFuture) + + try: + logging.info("Wait for worker #{}".format(i)) + worker.result() + except Exception as e: + nErrors += 1 + logging.error("Worker #{} finished with error ({}): {}".format( + i, + type(e).__name__, + str(e), + )) + continue + + assert nWorkers == N_WORKERS + + if nErrors != 0: + raise RuntimeError("Some problems were detected. Please examine the log messages.") + + logging.info("OK. Let's check worker results!") + + reservedNumbers: typing.Dict[int, int] = dict() + + for i in range(N_WORKERS): + logging.info("Worker #{} is checked ...".format(i)) + + workerNumbers = workerDatas[i].reservedNumbers + assert type(workerNumbers) == set # noqa: E721 + + for n in workerNumbers: + if n < 0 or n >= N_NUMBERS: + nErrors += 1 + logging.error("Unexpected number {}".format(n)) + continue + + if n in reservedNumbers.keys(): + nErrors += 1 + logging.error("Number {} was already reserved by worker #{}".format( + n, + reservedNumbers[n] + )) + else: + reservedNumbers[n] = i + + file_path = MAKE_PATH(lock_dir, n) + if not os_ops.path_exists(file_path): + nErrors += 1 + logging.error("File {} is not found!".format(file_path)) + continue + + continue + + logging.info("OK. Let's check reservedNumbers!") + + for n in range(N_NUMBERS): + if not (n in reservedNumbers.keys()): + nErrors += 1 + logging.error("Number {} is not reserved!".format(n)) + continue + + file_path = MAKE_PATH(lock_dir, n) + if not os_ops.path_exists(file_path): + nErrors += 1 + logging.error("File {} is not found!".format(file_path)) + continue + + # OK! + continue + + logging.info("Verification is finished! Total error count is {}.".format(nErrors)) + + if nErrors == 0: + logging.info("Root lock-directory [{}] will be deleted.".format( + lock_dir + )) + + for n in range(N_NUMBERS): + file_path = MAKE_PATH(lock_dir, n) + try: + os_ops.rmdir(file_path) + except Exception as e: + nErrors += 1 + logging.error("Cannot delete directory [{}]. Error ({}): {}".format( + file_path, + type(e).__name__, + str(e) + )) + continue + + if os_ops.path_exists(file_path): + nErrors += 1 + logging.error("Directory {} is not deleted!".format(file_path)) + continue + + if nErrors == 0: + try: + os_ops.rmdir(lock_dir) + except Exception as e: + nErrors += 1 + logging.error("Cannot delete directory [{}]. Error ({}): {}".format( + lock_dir, + type(e).__name__, + str(e) + )) + + logging.info("Test is finished! Total error count is {}.".format(nErrors)) + return diff --git a/tests/test_os_ops_remote.py b/tests/test_os_ops_remote.py index 338e49f3..65830218 100755 --- a/tests/test_os_ops_remote.py +++ b/tests/test_os_ops_remote.py @@ -3,7 +3,7 @@ from .helpers.global_data import OsOpsDescrs from .helpers.global_data import OsOperations -from ..testgres import ExecUtilException +from testgres import ExecUtilException import os import pytest diff --git a/tests/test_testgres_common.py b/tests/test_testgres_common.py index e1252de2..cf203a67 100644 --- a/tests/test_testgres_common.py +++ b/tests/test_testgres_common.py @@ -3,29 +3,31 @@ from .helpers.global_data import OsOperations from .helpers.global_data import PortManager -from ..testgres.node import PgVer -from ..testgres.node import PostgresNode -from ..testgres.utils import get_pg_version2 -from ..testgres.utils import file_tail -from ..testgres.utils import get_bin_path2 -from ..testgres import ProcessType -from ..testgres import NodeStatus -from ..testgres import IsolationLevel +from testgres.node import PgVer +from testgres.node import PostgresNode +from testgres.node import PostgresNodeLogReader +from testgres.node import PostgresNodeUtils +from testgres.utils import get_pg_version2 +from testgres.utils import file_tail +from testgres.utils import get_bin_path2 +from testgres import ProcessType +from testgres import NodeStatus +from testgres import IsolationLevel # New name prevents to collect test-functions in TestgresException and fixes # the problem with pytest warning. -from ..testgres import TestgresException as testgres_TestgresException - -from ..testgres import InitNodeException -from ..testgres import StartNodeException -from ..testgres import QueryException -from ..testgres import ExecUtilException -from ..testgres import TimeoutException -from ..testgres import InvalidOperationException -from ..testgres import BackupException -from ..testgres import ProgrammingError -from ..testgres import scoped_config -from ..testgres import First, Any +from testgres import TestgresException as testgres_TestgresException + +from testgres import InitNodeException +from testgres import StartNodeException +from testgres import QueryException +from testgres import ExecUtilException +from testgres import TimeoutException +from testgres import InvalidOperationException +from testgres import BackupException +from testgres import ProgrammingError +from testgres import scoped_config +from testgres import First, Any from contextlib import contextmanager @@ -678,6 +680,89 @@ def test_psql(self, node_svc: PostgresNodeService): r = node.safe_psql('select 1') # raises! logging.error("node.safe_psql returns [{}]".format(r)) + def test_psql__another_port(self, node_svc: PostgresNodeService): + assert isinstance(node_svc, PostgresNodeService) + with __class__.helper__get_node(node_svc).init() as node1: + with __class__.helper__get_node(node_svc).init() as node2: + node1.start() + node2.start() + assert node1.port != node2.port + assert node1.host == node2.host + + node1.stop() + + logging.info("test table in node2 is creating ...") + node2.safe_psql( + dbname="postgres", + query="create table test (id integer);" + ) + + logging.info("try to find test table through node1.psql ...") + res = node1.psql( + dbname="postgres", + query="select count(*) from pg_class where relname='test'", + host=node2.host, + port=node2.port, + ) + assert (__class__.helper__rm_carriage_returns(res) == (0, b'1\n', b'')) + + def test_psql__another_bad_host(self, node_svc: PostgresNodeService): + assert isinstance(node_svc, PostgresNodeService) + with __class__.helper__get_node(node_svc).init() as node: + logging.info("try to execute node1.psql ...") + res = node.psql( + dbname="postgres", + query="select count(*) from pg_class where relname='test'", + host="DUMMY_HOST_NAME", + port=node.port, + ) + + res2 = __class__.helper__rm_carriage_returns(res) + + assert res2[0] != 0 + assert b"DUMMY_HOST_NAME" in res[2] + + def test_safe_psql__another_port(self, node_svc: PostgresNodeService): + assert isinstance(node_svc, PostgresNodeService) + with __class__.helper__get_node(node_svc).init() as node1: + with __class__.helper__get_node(node_svc).init() as node2: + node1.start() + node2.start() + assert node1.port != node2.port + assert node1.host == node2.host + + node1.stop() + + logging.info("test table in node2 is creating ...") + node2.safe_psql( + dbname="postgres", + query="create table test (id integer);" + ) + + logging.info("try to find test table through node1.psql ...") + res = node1.safe_psql( + dbname="postgres", + query="select count(*) from pg_class where relname='test'", + host=node2.host, + port=node2.port, + ) + assert (__class__.helper__rm_carriage_returns(res) == b'1\n') + + def test_safe_psql__another_bad_host(self, node_svc: PostgresNodeService): + assert isinstance(node_svc, PostgresNodeService) + with __class__.helper__get_node(node_svc).init() as node: + logging.info("try to execute node1.psql ...") + + with pytest.raises(expected_exception=Exception) as x: + node.safe_psql( + dbname="postgres", + query="select count(*) from pg_class where relname='test'", + host="DUMMY_HOST_NAME", + port=node.port, + ) + + assert "DUMMY_HOST_NAME" in str(x.value) + def test_safe_psql__expect_error(self, node_svc: PostgresNodeService): assert isinstance(node_svc, PostgresNodeService) with __class__.helper__get_node(node_svc).init().start() as node: @@ -800,15 +885,55 @@ def test_backup_wrong_xlog_method(self, node_svc: PostgresNodeService): def test_pg_ctl_wait_option(self, node_svc: PostgresNodeService): assert isinstance(node_svc, PostgresNodeService) - C_MAX_ATTEMPTS = 50 - node = __class__.helper__get_node(node_svc) + C_MAX_ATTEMPT = 5 + + nAttempt = 0 + + while True: + if nAttempt == C_MAX_ATTEMPT: + raise Exception("PostgresSQL did not start.") + + nAttempt += 1 + logging.info("------------------------ NODE #{}".format( + nAttempt + )) + + with __class__.helper__get_node(node_svc, port=12345) as node: + if self.impl__test_pg_ctl_wait_option(node_svc, node): + break + continue + + logging.info("OK. Test is passed. Number of attempts is {}".format( + nAttempt + )) + return + + def impl__test_pg_ctl_wait_option( + self, + node_svc: PostgresNodeService, + node: PostgresNode + ) -> None: + assert isinstance(node_svc, PostgresNodeService) + assert isinstance(node, PostgresNode) assert node.status() == NodeStatus.Uninitialized + + C_MAX_ATTEMPTS = 50 + node.init() assert node.status() == NodeStatus.Stopped + + node_log_reader = PostgresNodeLogReader(node, from_beginnig=True) + node.start(wait=False) nAttempt = 0 while True: + if PostgresNodeUtils.delect_port_conflict(node_log_reader): + logging.info("Node port {} conflicted with another PostgreSQL instance.".format( + node.port + )) + return False + if nAttempt == C_MAX_ATTEMPTS: # # [2025-03-11] @@ -867,7 +992,7 @@ def test_pg_ctl_wait_option(self, node_svc: PostgresNodeService): raise Exception("Unexpected node status: {0}.".format(s1)) logging.info("OK. Node is stopped.") - node.cleanup() + return True def test_replicate(self, node_svc: PostgresNodeService): assert isinstance(node_svc, PostgresNodeService) @@ -1395,7 +1520,6 @@ def helper__get_node( return PostgresNode( name, port=port, - conn_params=None, os_ops=node_svc.os_ops, port_manager=port_manager if port is None else None ) diff --git a/tests/test_testgres_local.py b/tests/test_testgres_local.py index 9dbd455b..63e5f37e 100644 --- a/tests/test_testgres_local.py +++ b/tests/test_testgres_local.py @@ -7,21 +7,21 @@ import platform import logging -from .. import testgres +import testgres -from ..testgres import StartNodeException -from ..testgres import ExecUtilException -from ..testgres import NodeApp -from ..testgres import scoped_config -from ..testgres import get_new_node -from ..testgres import get_bin_path -from ..testgres import get_pg_config -from ..testgres import get_pg_version +from testgres import StartNodeException +from testgres import ExecUtilException +from testgres import NodeApp +from testgres import scoped_config +from testgres import get_new_node +from testgres import get_bin_path +from testgres import get_pg_config +from testgres import get_pg_version # NOTE: those are ugly imports -from ..testgres.utils import bound_ports -from ..testgres.utils import PgVer -from ..testgres.node import ProcessProxy +from testgres.utils import bound_ports +from testgres.utils import PgVer +from testgres.node import ProcessProxy def pg_version_ge(version): @@ -158,15 +158,15 @@ def test_child_process_dies(self): def test_upgrade_node(self): old_bin_dir = os.path.dirname(get_bin_path("pg_config")) new_bin_dir = os.path.dirname(get_bin_path("pg_config")) - node_old = get_new_node(prefix='node_old', bin_dir=old_bin_dir) - node_old.init() - node_old.start() - node_old.stop() - node_new = get_new_node(prefix='node_new', bin_dir=new_bin_dir) - node_new.init(cached=False) - res = node_new.upgrade_from(old_node=node_old) - node_new.start() - assert (b'Upgrade Complete' in res) + with get_new_node(prefix='node_old', bin_dir=old_bin_dir) as node_old: + node_old.init() + node_old.start() + node_old.stop() + with get_new_node(prefix='node_new', bin_dir=new_bin_dir) as node_new: + node_new.init(cached=False) + res = node_new.upgrade_from(old_node=node_old) + node_new.start() + assert (b'Upgrade Complete' in res) class tagPortManagerProxy: sm_prev_testgres_reserve_port = None @@ -341,10 +341,10 @@ def test_simple_with_bin_dir(self): bin_dir = node.bin_dir app = NodeApp() - correct_bin_dir = app.make_simple(base_dir=node.base_dir, bin_dir=bin_dir) - correct_bin_dir.slow_start() - correct_bin_dir.safe_psql("SELECT 1;") - correct_bin_dir.stop() + with app.make_simple(base_dir=node.base_dir, bin_dir=bin_dir) as correct_bin_dir: + correct_bin_dir.slow_start() + correct_bin_dir.safe_psql("SELECT 1;") + correct_bin_dir.stop() while True: try: diff --git a/tests/test_testgres_remote.py b/tests/test_testgres_remote.py index e38099b7..6a8d068b 100755 --- a/tests/test_testgres_remote.py +++ b/tests/test_testgres_remote.py @@ -7,16 +7,16 @@ from .helpers.global_data import PostgresNodeService from .helpers.global_data import PostgresNodeServices -from .. import testgres +import testgres -from ..testgres.exceptions import InitNodeException -from ..testgres.exceptions import ExecUtilException +from testgres.exceptions import InitNodeException +from testgres.exceptions import ExecUtilException -from ..testgres.config import scoped_config -from ..testgres.config import testgres_config +from testgres.config import scoped_config +from testgres.config import testgres_config -from ..testgres import get_bin_path -from ..testgres import get_pg_config +from testgres import get_bin_path +from testgres import get_pg_config # NOTE: those are ugly imports @@ -173,7 +173,6 @@ def helper__get_node(name=None): return testgres.PostgresNode( name, - conn_params=None, os_ops=svc.os_ops, port_manager=svc.port_manager) diff --git a/tests/test_utils.py b/tests/test_utils.py index c05bd2fe..39e9dda0 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -2,9 +2,9 @@ from .helpers.global_data import OsOpsDescrs from .helpers.global_data import OsOperations -from ..testgres.utils import parse_pg_version -from ..testgres.utils import get_pg_config2 -from ..testgres import scoped_config +from testgres.utils import parse_pg_version +from testgres.utils import get_pg_config2 +from testgres import scoped_config import pytest import typing
Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.
Alternative Proxies: